From c88a007d3941c047f2c9a80fd3503b0d2770dfce Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 19 Nov 2023 00:46:06 -0600 Subject: [PATCH 01/17] test: Update tmpdir to tmp_path --- tests/conftest.py | 2 +- tests/contrib/test_contrib_utils.py | 57 ++++----- tests/test_examples.py | 2 +- tests/test_infer.py | 22 ++-- tests/test_notebooks.py | 4 +- tests/test_scripts.py | 172 ++++++++++++++-------------- 6 files changed, 133 insertions(+), 126 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 2f969e3444..ad2d9d7cba 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -167,6 +167,6 @@ def datadir(tmp_path, request): dir_util.copy_tree(test_dir, str(tmp_path)) # shutil is nicer, but doesn't work: https://bugs.python.org/issue20849 # Once pyhf is Python 3.8+ only then the below can be used. - # shutil.copytree(test_dir, tmpdir) + # shutil.copytree(test_dir, tmp_path) return tmp_path diff --git a/tests/contrib/test_contrib_utils.py b/tests/contrib/test_contrib_utils.py index 5a0b69261b..40ea0ccb26 100644 --- a/tests/contrib/test_contrib_utils.py +++ b/tests/contrib/test_contrib_utils.py @@ -10,70 +10,75 @@ @pytest.fixture(scope="function") -def tarfile_path(tmpdir): +def tarfile_path(tmp_path): with open( - tmpdir.join("test_file.txt").strpath, "w", encoding="utf-8" + tmp_path.join("test_file.txt").strpath, "w", encoding="utf-8" ) as write_file: write_file.write("test file") with tarfile.open( - tmpdir.join("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" + tmp_path.join("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" ) as archive: - archive.add(tmpdir.join("test_file.txt").strpath) - return Path(tmpdir.join("test_tar.tar.gz").strpath) + archive.add(tmp_path.join("test_file.txt").strpath) + return Path(tmp_path.join("test_tar.tar.gz").strpath) @pytest.fixture(scope="function") -def tarfile_uncompressed_path(tmpdir): +def tarfile_uncompressed_path(tmp_path): with open( - tmpdir.join("test_file.txt").strpath, "w", encoding="utf-8" + tmp_path.join("test_file.txt").strpath, "w", encoding="utf-8" ) as write_file: write_file.write("test file") with tarfile.open( - tmpdir.join("test_tar.tar").strpath, mode="w", encoding="utf-8" + tmp_path.join("test_tar.tar").strpath, mode="w", encoding="utf-8" ) as archive: - archive.add(tmpdir.join("test_file.txt").strpath) - return Path(tmpdir.join("test_tar.tar").strpath) + archive.add(tmp_path.join("test_file.txt").strpath) + return Path(tmp_path.join("test_tar.tar").strpath) @pytest.fixture(scope="function") -def zipfile_path(tmpdir): +def zipfile_path(tmp_path): with open( - tmpdir.join("test_file.txt").strpath, "w", encoding="utf-8" + tmp_path.join("test_file.txt").strpath, "w", encoding="utf-8" ) as write_file: write_file.write("test file") - with zipfile.ZipFile(tmpdir.join("test_zip.zip").strpath, "w") as archive: - archive.write(tmpdir.join("test_file.txt").strpath) - return Path(tmpdir.join("test_zip.zip").strpath) + with zipfile.ZipFile(tmp_path.join("test_zip.zip").strpath, "w") as archive: + archive.write(tmp_path.join("test_file.txt").strpath) + return Path(tmp_path.join("test_zip.zip").strpath) -def test_download_untrusted_archive_host(tmpdir, requests_mock): +def test_download_untrusted_archive_host(tmp_path, requests_mock): archive_url = "https://www.pyhfthisdoesnotexist.org" requests_mock.get(archive_url) with pytest.raises(InvalidArchiveHost): - download(archive_url, tmpdir.join("likelihoods").strpath) + download(archive_url, tmp_path.join("likelihoods").strpath) -def test_download_invalid_archive(tmpdir, requests_mock): +def test_download_invalid_archive(tmp_path, requests_mock): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" requests_mock.get(archive_url, status_code=404) with pytest.raises(InvalidArchive): - download(archive_url, tmpdir.join("likelihoods").strpath) + download(archive_url, tmp_path.join("likelihoods").strpath) -def test_download_compress(tmpdir, requests_mock): +def test_download_compress(tmp_path, requests_mock): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" requests_mock.get(archive_url) - download(archive_url, tmpdir.join("likelihoods").strpath, compress=True) + download(archive_url, tmp_path.join("likelihoods").strpath, compress=True) def test_download_archive_type( - tmpdir, mocker, requests_mock, tarfile_path, tarfile_uncompressed_path, zipfile_path + tmp_path, + mocker, + requests_mock, + tarfile_path, + tarfile_uncompressed_path, + zipfile_path, ): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" - output_directory = tmpdir.join("likelihoods").strpath + output_directory = tmp_path.join("likelihoods").strpath # Give BytesIO a tarfile requests_mock.get(archive_url, content=open(tarfile_path, "rb").read()) download(archive_url, output_directory) @@ -97,13 +102,13 @@ def test_download_archive_type( download(archive_url, output_directory) -def test_download_archive_force(tmpdir, requests_mock, tarfile_path): +def test_download_archive_force(tmp_path, requests_mock, tarfile_path): archive_url = "https://www.cern.ch/record/resource/123456789" requests_mock.get( archive_url, content=open(tarfile_path, "rb").read(), status_code=200 ) with pytest.raises(InvalidArchiveHost): - download(archive_url, tmpdir.join("likelihoods").strpath, force=False) + download(archive_url, tmp_path.join("likelihoods").strpath, force=False) - download(archive_url, tmpdir.join("likelihoods").strpath, force=True) + download(archive_url, tmp_path.join("likelihoods").strpath, force=True) diff --git a/tests/test_examples.py b/tests/test_examples.py index 9d4c2a1e1c..fa545726ec 100644 --- a/tests/test_examples.py +++ b/tests/test_examples.py @@ -1,7 +1,7 @@ import shlex -def test_2bin_1channel(tmpdir, script_runner): +def test_2bin_1channel(tmp_path, script_runner): command = f"pyhf inspect {'docs/examples/json/2-bin_1-channel.json':s}" ret = script_runner.run(shlex.split(command)) assert ret.success diff --git a/tests/test_infer.py b/tests/test_infer.py index f6a5bc6e92..0ccd072b94 100644 --- a/tests/test_infer.py +++ b/tests/test_infer.py @@ -23,7 +23,7 @@ def check_uniform_type(in_list): ) -def test_toms748_scan(tmpdir, hypotest_args): +def test_toms748_scan(tmp_path, hypotest_args): """ Test the upper limit toms748 scan returns the correct structure and values """ @@ -166,7 +166,7 @@ def test_upper_limit_with_kwargs(hypotest_args): ) -def test_mle_fit_default(tmpdir, hypotest_args): +def test_mle_fit_default(tmp_path, hypotest_args): """ Check that the default return structure of pyhf.infer.mle.fit is as expected """ @@ -180,7 +180,7 @@ def test_mle_fit_default(tmpdir, hypotest_args): assert pyhf.tensorlib.shape(result) == (model.config.npars,) -def test_mle_fit_return_fitted_val(tmpdir, hypotest_args): +def test_mle_fit_return_fitted_val(tmp_path, hypotest_args): """ Check that the return structure of pyhf.infer.mle.fit with the return_fitted_val keyword arg is as expected @@ -196,7 +196,7 @@ def test_mle_fit_return_fitted_val(tmpdir, hypotest_args): assert pyhf.tensorlib.shape(result[1]) == () -def test_hypotest_default(tmpdir, hypotest_args): +def test_hypotest_default(tmp_path, hypotest_args): """ Check that the default return structure of pyhf.infer.hypotest is as expected """ @@ -209,7 +209,7 @@ def test_hypotest_default(tmpdir, hypotest_args): assert isinstance(result, type(tb.astensor(result))) -def test_hypotest_poi_outofbounds(tmpdir, hypotest_args): +def test_hypotest_poi_outofbounds(tmp_path, hypotest_args): """ Check that the fit errors for POI outside of parameter bounds """ @@ -226,7 +226,7 @@ def test_hypotest_poi_outofbounds(tmpdir, hypotest_args): @pytest.mark.parametrize('test_stat', ['q0', 'q', 'qtilde']) -def test_hypotest_return_tail_probs(tmpdir, hypotest_args, test_stat): +def test_hypotest_return_tail_probs(tmp_path, hypotest_args, test_stat): """ Check that the return structure of pyhf.infer.hypotest with the return_tail_probs keyword arg is as expected @@ -243,7 +243,7 @@ def test_hypotest_return_tail_probs(tmpdir, hypotest_args, test_stat): @pytest.mark.parametrize('test_stat', ['q0', 'q', 'qtilde']) -def test_hypotest_return_expected(tmpdir, hypotest_args, test_stat): +def test_hypotest_return_expected(tmp_path, hypotest_args, test_stat): """ Check that the return structure of pyhf.infer.hypotest with the addition of the return_expected keyword arg is as expected @@ -265,7 +265,7 @@ def test_hypotest_return_expected(tmpdir, hypotest_args, test_stat): @pytest.mark.parametrize('test_stat', ['q0', 'q', 'qtilde']) -def test_hypotest_return_expected_set(tmpdir, hypotest_args, test_stat): +def test_hypotest_return_expected_set(tmp_path, hypotest_args, test_stat): """ Check that the return structure of pyhf.infer.hypotest with the addition of the return_expected_set keyword arg is as expected @@ -300,7 +300,7 @@ def test_hypotest_return_expected_set(tmpdir, hypotest_args, test_stat): @pytest.mark.parametrize('return_expected', [True, False]) @pytest.mark.parametrize('return_expected_set', [True, False]) def test_hypotest_return_calculator( - tmpdir, + tmp_path, hypotest_args, calctype, kwargs, @@ -491,7 +491,7 @@ def test_significance_to_pvalue_roundtrip(backend): assert np.allclose(sigma, back_to_sigma, atol=0, rtol=rtol) -def test_emperical_distribution(tmpdir, hypotest_args): +def test_emperical_distribution(tmp_path, hypotest_args): """ Check that the empirical distribution of the test statistic gives expected results @@ -537,7 +537,7 @@ def test_emperical_distribution(tmpdir, hypotest_args): ) -def test_toy_calculator(tmpdir, hypotest_args): +def test_toy_calculator(tmp_path, hypotest_args): """ Check that the toy calculator is performing as expected """ diff --git a/tests/test_notebooks.py b/tests/test_notebooks.py index 07b978c2ff..750dc35169 100644 --- a/tests/test_notebooks.py +++ b/tests/test_notebooks.py @@ -11,8 +11,8 @@ @pytest.fixture() -def common_kwargs(tmpdir): - outputnb = tmpdir.join('output.ipynb') +def common_kwargs(tmp_path): + outputnb = tmp_path.join('output.ipynb') return { 'output_path': str(outputnb), 'kernel_name': f'python{sys.version_info.major}', diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 0dd88e9b8a..02d6006c78 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -15,16 +15,16 @@ @pytest.fixture(scope="function") -def tarfile_path(tmpdir): +def tarfile_path(tmp_path): with open( - tmpdir.join("test_file.txt").strpath, "w", encoding="utf-8" + tmp_path.join("test_file.txt").strpath, "w", encoding="utf-8" ) as write_file: write_file.write("test file") with tarfile.open( - tmpdir.join("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" + tmp_path.join("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" ) as archive: - archive.add(tmpdir.join("test_file.txt").strpath) - return Path(tmpdir.join("test_tar.tar.gz").strpath) + archive.add(tmp_path.join("test_file.txt").strpath) + return Path(tmp_path.join("test_tar.tar.gz").strpath) def test_version(script_runner): @@ -57,8 +57,8 @@ def test_citation(script_runner, flag): # see test_import.py for the same (detailed) test -def test_import_prepHistFactory(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_import_prepHistFactory(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -70,8 +70,8 @@ def test_import_prepHistFactory(tmpdir, script_runner): pyhf.schema.validate(spec, 'model.json') -def test_import_prepHistFactory_withProgress(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_import_prepHistFactory_withProgress(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -79,7 +79,7 @@ def test_import_prepHistFactory_withProgress(tmpdir, script_runner): assert ret.stderr != '' -def test_import_prepHistFactory_stdout(tmpdir, script_runner): +def test_import_prepHistFactory_stdout(tmp_path, script_runner): command = 'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -89,8 +89,8 @@ def test_import_prepHistFactory_stdout(tmpdir, script_runner): assert d -def test_import_prepHistFactory_and_fit(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_import_prepHistFactory_and_fit(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" ret = script_runner.run(shlex.split(command)) @@ -118,7 +118,7 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): assert "mle_parameters" in ret_json assert "twice_nll" in ret_json - tmp_out = tmpdir.join(f"{measurement:s}_output.json") + tmp_out = tmp_path.join(f"{measurement:s}_output.json") # make sure output file works too command += f" --output-file {tmp_out.strpath:s}" ret = script_runner.run(shlex.split(command)) @@ -128,8 +128,8 @@ def test_import_prepHistFactory_and_fit(tmpdir, script_runner): assert "twice_nll" in ret_json -def test_import_prepHistFactory_and_cls(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_import_prepHistFactory_and_cls(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -157,7 +157,7 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): assert 'CLs_obs' in d assert 'CLs_exp' in d - tmp_out = tmpdir.join(f'{measurement:s}_output.json') + tmp_out = tmp_path.join(f'{measurement:s}_output.json') # make sure output file works too command += f' --output-file {tmp_out.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -167,10 +167,10 @@ def test_import_prepHistFactory_and_cls(tmpdir, script_runner): assert 'CLs_exp' in d -def test_import_usingMounts(datadir, tmpdir, script_runner): +def test_import_usingMounts(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") - temp = tmpdir.join("parsed_output.json") + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' ret = script_runner.run(shlex.split(command)) @@ -183,10 +183,10 @@ def test_import_usingMounts(datadir, tmpdir, script_runner): pyhf.schema.validate(spec, 'model.json') -def test_import_usingMounts_badDelimitedPaths(datadir, tmpdir, script_runner): +def test_import_usingMounts_badDelimitedPaths(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") - temp = tmpdir.join("parsed_output.json") + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' ret = script_runner.run(shlex.split(command)) @@ -196,8 +196,8 @@ def test_import_usingMounts_badDelimitedPaths(datadir, tmpdir, script_runner): @pytest.mark.parametrize("backend", ["numpy", "tensorflow", "pytorch", "jax"]) -def test_fit_backend_option(tmpdir, script_runner, backend): - temp = tmpdir.join("parsed_output.json") +def test_fit_backend_option(tmp_path, script_runner, backend): + temp = tmp_path.join("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" ret = script_runner.run(shlex.split(command)) @@ -211,8 +211,8 @@ def test_fit_backend_option(tmpdir, script_runner, backend): @pytest.mark.parametrize("backend", ["numpy", "tensorflow", "pytorch", "jax"]) -def test_cls_backend_option(tmpdir, script_runner, backend): - temp = tmpdir.join("parsed_output.json") +def test_cls_backend_option(tmp_path, script_runner, backend): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -226,18 +226,18 @@ def test_cls_backend_option(tmpdir, script_runner, backend): assert 'CLs_exp' in d -def test_import_and_export(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_import_and_export(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output').strpath:s}" + command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmp_path.mkdir('output').strpath:s}" ret = script_runner.run(shlex.split(command)) assert ret.success -def test_patch(tmpdir, script_runner): - patch = tmpdir.join('patch.json') +def test_patch(tmp_path, script_runner): + patch = tmp_path.join('patch.json') patch.write( ''' @@ -245,7 +245,7 @@ def test_patch(tmpdir, script_runner): ''' ) - temp = tmpdir.join("parsed_output.json") + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -253,7 +253,7 @@ def test_patch(tmpdir, script_runner): ret = script_runner.run(shlex.split(command)) assert ret.success - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output_1').strpath:s} --patch {patch.strpath:s}" + command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmp_path.mkdir('output_1').strpath:s} --patch {patch.strpath:s}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -262,17 +262,17 @@ def test_patch(tmpdir, script_runner): ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output_2').strpath:s} --patch -" + command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmp_path.mkdir('output_2').strpath:s} --patch -" ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success -def test_patch_fail(tmpdir, script_runner): - patch = tmpdir.join('patch.json') +def test_patch_fail(tmp_path, script_runner): + patch = tmp_path.join('patch.json') patch.write('''not,json''') - temp = tmpdir.join("parsed_output.json") + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -280,13 +280,13 @@ def test_patch_fail(tmpdir, script_runner): ret = script_runner.run(shlex.split(command)) assert not ret.success - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmpdir.mkdir('output').strpath:s} --patch {patch.strpath:s}" + command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmp_path.mkdir('output').strpath:s} --patch {patch.strpath:s}" ret = script_runner.run(shlex.split(command)) assert not ret.success -def test_bad_measurement_name(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_bad_measurement_name(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -296,8 +296,8 @@ def test_bad_measurement_name(tmpdir, script_runner): # assert 'no measurement by name' in ret.stderr # numpy swallows the log.error() here, dunno why -def test_testpoi(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_testpoi(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -331,8 +331,8 @@ def test_testpoi(tmpdir, script_runner): @pytest.mark.parametrize( "opts,success", [(["maxiter=1000"], True), (["maxiter=1"], False)] ) -def test_fit_optimizer(tmpdir, script_runner, optimizer, opts, success): - temp = tmpdir.join("parsed_output.json") +def test_fit_optimizer(tmp_path, script_runner, optimizer, opts, success): + temp = tmp_path.join("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" ret = script_runner.run(shlex.split(command)) @@ -347,8 +347,8 @@ def test_fit_optimizer(tmpdir, script_runner, optimizer, opts, success): @pytest.mark.parametrize( 'opts,success', [(['maxiter=1000'], True), (['maxiter=1'], False)] ) -def test_cls_optimizer(tmpdir, script_runner, optimizer, opts, success): - temp = tmpdir.join("parsed_output.json") +def test_cls_optimizer(tmp_path, script_runner, optimizer, opts, success): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -359,8 +359,8 @@ def test_cls_optimizer(tmpdir, script_runner, optimizer, opts, success): assert ret.success == success -def test_inspect(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_inspect(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -369,12 +369,12 @@ def test_inspect(tmpdir, script_runner): assert ret.success -def test_inspect_outfile(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_inspect_outfile(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("inspect_output.json") + tempout = tmp_path.join("inspect_output.json") command = f'pyhf inspect {temp.strpath:s} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -396,8 +396,8 @@ def test_inspect_outfile(tmpdir, script_runner): assert len(summary['systematics']) == 6 -def test_prune(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_prune(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -408,12 +408,12 @@ def test_prune(tmpdir, script_runner): assert ret.success -def test_prune_outfile(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_prune_outfile(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("prune_output.json") + tempout = tmp_path.join("prune_output.json") command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -428,8 +428,8 @@ def test_prune_outfile(tmpdir, script_runner): assert 'staterror_channel1' not in pruned_ws.model().config.parameters -def test_rename(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_rename(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -438,12 +438,12 @@ def test_rename(tmpdir, script_runner): assert ret.success -def test_rename_outfile(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_rename_outfile(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("rename_output.json") + tempout = tmp_path.join("rename_output.json") command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -462,9 +462,9 @@ def test_rename_outfile(tmpdir, script_runner): assert 'staterror_channelone' in renamed_ws.model().config.parameters -def test_combine(tmpdir, script_runner): - temp_1 = tmpdir.join("parsed_output.json") - temp_2 = tmpdir.join("renamed_output.json") +def test_combine(tmp_path, script_runner): + temp_1 = tmp_path.join("parsed_output.json") + temp_2 = tmp_path.join("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -490,9 +490,9 @@ def test_combine(tmpdir, script_runner): assert ret.success -def test_combine_outfile(tmpdir, script_runner): - temp_1 = tmpdir.join("parsed_output.json") - temp_2 = tmpdir.join("renamed_output.json") +def test_combine_outfile(tmp_path, script_runner): + temp_1 = tmp_path.join("parsed_output.json") + temp_2 = tmp_path.join("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -513,7 +513,7 @@ def test_combine_outfile(tmpdir, script_runner): command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("combined_output.json") + tempout = tmp_path.join("combined_output.json") command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -524,9 +524,9 @@ def test_combine_outfile(tmpdir, script_runner): assert len(combined_ws.measurement_names) == 8 -def test_combine_merge_channels(tmpdir, script_runner): - temp_1 = tmpdir.join("parsed_output.json") - temp_2 = tmpdir.join("renamed_output.json") +def test_combine_merge_channels(tmp_path, script_runner): + temp_1 = tmp_path.join("parsed_output.json") + temp_2 = tmp_path.join("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath} --hide-progress' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -547,13 +547,13 @@ def test_combine_merge_channels(tmpdir, script_runner): @pytest.mark.parametrize( 'algorithms', [['md5'], ['sha256'], ['sha256', 'md5'], ['sha256', 'md5']] ) -def test_workspace_digest(tmpdir, script_runner, algorithms, do_json): +def test_workspace_digest(tmp_path, script_runner, algorithms, do_json): results = { 'md5': '7de8930ff37e5a4f6a31da11bda7813f', 'sha256': '6d416ee67a40460499ea2ef596fb1e682a563d7df06e690018a211d35238aecc', } - temp = tmpdir.join("parsed_output.json") + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -588,21 +588,23 @@ def test_workspace_digest(tmpdir, script_runner, algorithms, do_json): "https://doi.org/10.17182/hepdata.89408.v1/r2", ], ) -def test_patchset_download(tmpdir, script_runner, requests_mock, tarfile_path, archive): +def test_patchset_download( + tmp_path, script_runner, requests_mock, tarfile_path, archive +): requests_mock.get(archive, content=open(tarfile_path, "rb").read()) - command = f'pyhf contrib download {archive} {tmpdir.join("likelihoods").strpath}' + command = f'pyhf contrib download {archive} {tmp_path.join("likelihoods").strpath}' ret = script_runner.run(shlex.split(command)) assert ret.success # Run with all optional flags - command = f'pyhf contrib download --verbose --force {archive} {tmpdir.join("likelihoods").strpath}' + command = f'pyhf contrib download --verbose --force {archive} {tmp_path.join("likelihoods").strpath}' ret = script_runner.run(shlex.split(command)) assert ret.success requests_mock.get( "https://www.pyhfthisdoesnotexist.org/record/resource/1234567", status_code=200 ) - command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmpdir.join("likelihoods").strpath}' + command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmp_path.join("likelihoods").strpath}' ret = script_runner.run(shlex.split(command)) assert not ret.success assert ( @@ -614,7 +616,7 @@ def test_patchset_download(tmpdir, script_runner, requests_mock, tarfile_path, a requests_mock.get( "https://httpstat.us/404/record/resource/1234567", status_code=404 ) - command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmpdir.join("likelihoods").strpath}' + command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmp_path.join("likelihoods").strpath}' ret = script_runner.run(shlex.split(command)) assert not ret.success assert "gives a response code of 404" in ret.stderr @@ -686,8 +688,8 @@ def test_patchset_inspect(datadir, script_runner): @pytest.mark.parametrize('output_file', [False, True]) @pytest.mark.parametrize('with_metadata', [False, True]) -def test_patchset_extract(datadir, tmpdir, script_runner, output_file, with_metadata): - temp = tmpdir.join("extracted_output.json") +def test_patchset_extract(datadir, tmp_path, script_runner, output_file, with_metadata): + temp = tmp_path.join("extracted_output.json") command = f'pyhf patchset extract {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' if output_file: command += f" --output-file {temp.strpath}" @@ -721,8 +723,8 @@ def test_patchset_verify(datadir, script_runner): @pytest.mark.parametrize('output_file', [False, True]) -def test_patchset_apply(datadir, tmpdir, script_runner, output_file): - temp = tmpdir.join("patched_output.json") +def test_patchset_apply(datadir, tmp_path, script_runner, output_file): + temp = tmp_path.join("patched_output.json") command = f'pyhf patchset apply {datadir.joinpath("example_bkgonly.json")} {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' if output_file: command += f" --output-file {temp.strpath}" @@ -740,8 +742,8 @@ def test_patchset_apply(datadir, tmpdir, script_runner, output_file): } -def test_sort(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_sort(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -751,12 +753,12 @@ def test_sort(tmpdir, script_runner): assert ret.success -def test_sort_outfile(tmpdir, script_runner): - temp = tmpdir.join("parsed_output.json") +def test_sort_outfile(tmp_path, script_runner): + temp = tmp_path.join("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmpdir.join("sort_output.json") + tempout = tmp_path.join("sort_output.json") command = f'pyhf sort {temp.strpath} --output-file {tempout.strpath}' ret = script_runner.run(shlex.split(command)) From 68a5187fb11e2fa5f87cd71af44073632c409a1e Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 19 Nov 2023 00:59:10 -0600 Subject: [PATCH 02/17] pathlib uses joinpath not join --- tests/test_scripts.py | 100 +++++++++++++++++++++--------------------- 1 file changed, 51 insertions(+), 49 deletions(-) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 02d6006c78..4066141990 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -17,14 +17,14 @@ @pytest.fixture(scope="function") def tarfile_path(tmp_path): with open( - tmp_path.join("test_file.txt").strpath, "w", encoding="utf-8" + tmp_path.joinpath("test_file.txt").strpath, "w", encoding="utf-8" ) as write_file: write_file.write("test file") with tarfile.open( - tmp_path.join("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" + tmp_path.joinpath("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" ) as archive: - archive.add(tmp_path.join("test_file.txt").strpath) - return Path(tmp_path.join("test_tar.tar.gz").strpath) + archive.add(tmp_path.joinpath("test_file.txt").strpath) + return Path(tmp_path.joinpath("test_tar.tar.gz").strpath) def test_version(script_runner): @@ -58,7 +58,7 @@ def test_citation(script_runner, flag): # see test_import.py for the same (detailed) test def test_import_prepHistFactory(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -71,7 +71,7 @@ def test_import_prepHistFactory(tmp_path, script_runner): def test_import_prepHistFactory_withProgress(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -90,7 +90,7 @@ def test_import_prepHistFactory_stdout(tmp_path, script_runner): def test_import_prepHistFactory_and_fit(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" ret = script_runner.run(shlex.split(command)) @@ -118,7 +118,7 @@ def test_import_prepHistFactory_and_fit(tmp_path, script_runner): assert "mle_parameters" in ret_json assert "twice_nll" in ret_json - tmp_out = tmp_path.join(f"{measurement:s}_output.json") + tmp_out = tmp_path.joinpath(f"{measurement:s}_output.json") # make sure output file works too command += f" --output-file {tmp_out.strpath:s}" ret = script_runner.run(shlex.split(command)) @@ -129,7 +129,7 @@ def test_import_prepHistFactory_and_fit(tmp_path, script_runner): def test_import_prepHistFactory_and_cls(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -157,7 +157,7 @@ def test_import_prepHistFactory_and_cls(tmp_path, script_runner): assert 'CLs_obs' in d assert 'CLs_exp' in d - tmp_out = tmp_path.join(f'{measurement:s}_output.json') + tmp_out = tmp_path.joinpath(f'{measurement:s}_output.json') # make sure output file works too command += f' --output-file {tmp_out.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -170,7 +170,7 @@ def test_import_prepHistFactory_and_cls(tmp_path, script_runner): def test_import_usingMounts(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' ret = script_runner.run(shlex.split(command)) @@ -186,7 +186,7 @@ def test_import_usingMounts(datadir, tmp_path, script_runner): def test_import_usingMounts_badDelimitedPaths(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' ret = script_runner.run(shlex.split(command)) @@ -197,7 +197,7 @@ def test_import_usingMounts_badDelimitedPaths(datadir, tmp_path, script_runner): @pytest.mark.parametrize("backend", ["numpy", "tensorflow", "pytorch", "jax"]) def test_fit_backend_option(tmp_path, script_runner, backend): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" ret = script_runner.run(shlex.split(command)) @@ -212,7 +212,7 @@ def test_fit_backend_option(tmp_path, script_runner, backend): @pytest.mark.parametrize("backend", ["numpy", "tensorflow", "pytorch", "jax"]) def test_cls_backend_option(tmp_path, script_runner, backend): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -227,7 +227,7 @@ def test_cls_backend_option(tmp_path, script_runner, backend): def test_import_and_export(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -237,7 +237,7 @@ def test_import_and_export(tmp_path, script_runner): def test_patch(tmp_path, script_runner): - patch = tmp_path.join('patch.json') + patch = tmp_path.joinpath('patch.json') patch.write( ''' @@ -245,7 +245,7 @@ def test_patch(tmp_path, script_runner): ''' ) - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -268,11 +268,11 @@ def test_patch(tmp_path, script_runner): def test_patch_fail(tmp_path, script_runner): - patch = tmp_path.join('patch.json') + patch = tmp_path.joinpath('patch.json') patch.write('''not,json''') - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -286,7 +286,7 @@ def test_patch_fail(tmp_path, script_runner): def test_bad_measurement_name(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -297,7 +297,7 @@ def test_bad_measurement_name(tmp_path, script_runner): def test_testpoi(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -332,7 +332,7 @@ def test_testpoi(tmp_path, script_runner): "opts,success", [(["maxiter=1000"], True), (["maxiter=1"], False)] ) def test_fit_optimizer(tmp_path, script_runner, optimizer, opts, success): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" ret = script_runner.run(shlex.split(command)) @@ -348,7 +348,7 @@ def test_fit_optimizer(tmp_path, script_runner, optimizer, opts, success): 'opts,success', [(['maxiter=1000'], True), (['maxiter=1'], False)] ) def test_cls_optimizer(tmp_path, script_runner, optimizer, opts, success): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' ret = script_runner.run(shlex.split(command)) @@ -360,7 +360,7 @@ def test_cls_optimizer(tmp_path, script_runner, optimizer, opts, success): def test_inspect(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -370,11 +370,11 @@ def test_inspect(tmp_path, script_runner): def test_inspect_outfile(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmp_path.join("inspect_output.json") + tempout = tmp_path.joinpath("inspect_output.json") command = f'pyhf inspect {temp.strpath:s} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -397,7 +397,7 @@ def test_inspect_outfile(tmp_path, script_runner): def test_prune(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -409,11 +409,11 @@ def test_prune(tmp_path, script_runner): def test_prune_outfile(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmp_path.join("prune_output.json") + tempout = tmp_path.joinpath("prune_output.json") command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -429,7 +429,7 @@ def test_prune_outfile(tmp_path, script_runner): def test_rename(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -439,11 +439,11 @@ def test_rename(tmp_path, script_runner): def test_rename_outfile(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmp_path.join("rename_output.json") + tempout = tmp_path.joinpath("rename_output.json") command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -463,8 +463,8 @@ def test_rename_outfile(tmp_path, script_runner): def test_combine(tmp_path, script_runner): - temp_1 = tmp_path.join("parsed_output.json") - temp_2 = tmp_path.join("renamed_output.json") + temp_1 = tmp_path.joinpath("parsed_output.json") + temp_2 = tmp_path.joinpath("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -491,8 +491,8 @@ def test_combine(tmp_path, script_runner): def test_combine_outfile(tmp_path, script_runner): - temp_1 = tmp_path.join("parsed_output.json") - temp_2 = tmp_path.join("renamed_output.json") + temp_1 = tmp_path.joinpath("parsed_output.json") + temp_2 = tmp_path.joinpath("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -513,7 +513,7 @@ def test_combine_outfile(tmp_path, script_runner): command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" ret = script_runner.run(shlex.split(command)) - tempout = tmp_path.join("combined_output.json") + tempout = tmp_path.joinpath("combined_output.json") command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -525,8 +525,8 @@ def test_combine_outfile(tmp_path, script_runner): def test_combine_merge_channels(tmp_path, script_runner): - temp_1 = tmp_path.join("parsed_output.json") - temp_2 = tmp_path.join("renamed_output.json") + temp_1 = tmp_path.joinpath("parsed_output.json") + temp_2 = tmp_path.joinpath("renamed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath} --hide-progress' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -553,7 +553,7 @@ def test_workspace_digest(tmp_path, script_runner, algorithms, do_json): 'sha256': '6d416ee67a40460499ea2ef596fb1e682a563d7df06e690018a211d35238aecc', } - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -592,19 +592,21 @@ def test_patchset_download( tmp_path, script_runner, requests_mock, tarfile_path, archive ): requests_mock.get(archive, content=open(tarfile_path, "rb").read()) - command = f'pyhf contrib download {archive} {tmp_path.join("likelihoods").strpath}' + command = ( + f'pyhf contrib download {archive} {tmp_path.joinpath("likelihoods").strpath}' + ) ret = script_runner.run(shlex.split(command)) assert ret.success # Run with all optional flags - command = f'pyhf contrib download --verbose --force {archive} {tmp_path.join("likelihoods").strpath}' + command = f'pyhf contrib download --verbose --force {archive} {tmp_path.joinpath("likelihoods").strpath}' ret = script_runner.run(shlex.split(command)) assert ret.success requests_mock.get( "https://www.pyhfthisdoesnotexist.org/record/resource/1234567", status_code=200 ) - command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmp_path.join("likelihoods").strpath}' + command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmp_path.joinpath("likelihoods").strpath}' ret = script_runner.run(shlex.split(command)) assert not ret.success assert ( @@ -616,7 +618,7 @@ def test_patchset_download( requests_mock.get( "https://httpstat.us/404/record/resource/1234567", status_code=404 ) - command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmp_path.join("likelihoods").strpath}' + command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmp_path.joinpath("likelihoods").strpath}' ret = script_runner.run(shlex.split(command)) assert not ret.success assert "gives a response code of 404" in ret.stderr @@ -689,7 +691,7 @@ def test_patchset_inspect(datadir, script_runner): @pytest.mark.parametrize('output_file', [False, True]) @pytest.mark.parametrize('with_metadata', [False, True]) def test_patchset_extract(datadir, tmp_path, script_runner, output_file, with_metadata): - temp = tmp_path.join("extracted_output.json") + temp = tmp_path.joinpath("extracted_output.json") command = f'pyhf patchset extract {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' if output_file: command += f" --output-file {temp.strpath}" @@ -724,7 +726,7 @@ def test_patchset_verify(datadir, script_runner): @pytest.mark.parametrize('output_file', [False, True]) def test_patchset_apply(datadir, tmp_path, script_runner, output_file): - temp = tmp_path.join("patched_output.json") + temp = tmp_path.joinpath("patched_output.json") command = f'pyhf patchset apply {datadir.joinpath("example_bkgonly.json")} {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' if output_file: command += f" --output-file {temp.strpath}" @@ -743,7 +745,7 @@ def test_patchset_apply(datadir, tmp_path, script_runner, output_file): def test_sort(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) @@ -754,11 +756,11 @@ def test_sort(tmp_path, script_runner): def test_sort_outfile(tmp_path, script_runner): - temp = tmp_path.join("parsed_output.json") + temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' ret = script_runner.run(shlex.split(command)) - tempout = tmp_path.join("sort_output.json") + tempout = tmp_path.joinpath("sort_output.json") command = f'pyhf sort {temp.strpath} --output-file {tempout.strpath}' ret = script_runner.run(shlex.split(command)) From df33624bb3728da0ad8cc9d038ce6ecbca9b8024 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 19 Nov 2023 01:20:28 -0600 Subject: [PATCH 03/17] temp.strpath to temp --- tests/test_scripts.py | 101 ++++++++++++++++++++---------------------- 1 file changed, 48 insertions(+), 53 deletions(-) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 4066141990..1605136f3c 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -5,7 +5,6 @@ import tarfile import time from importlib import import_module, reload -from pathlib import Path from unittest import mock import pytest @@ -16,15 +15,13 @@ @pytest.fixture(scope="function") def tarfile_path(tmp_path): - with open( - tmp_path.joinpath("test_file.txt").strpath, "w", encoding="utf-8" - ) as write_file: + with open(tmp_path.joinpath("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") with tarfile.open( - tmp_path.joinpath("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" + tmp_path.joinpath("test_tar.tar.gz"), mode="w:gz", encoding="utf-8" ) as archive: - archive.add(tmp_path.joinpath("test_file.txt").strpath) - return Path(tmp_path.joinpath("test_tar.tar.gz").strpath) + archive.add(tmp_path.joinpath("test_file.txt")) + return tmp_path.joinpath("test_tar.tar.gz") def test_version(script_runner): @@ -59,7 +56,7 @@ def test_citation(script_runner, flag): # see test_import.py for the same (detailed) test def test_import_prepHistFactory(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' @@ -72,7 +69,7 @@ def test_import_prepHistFactory(tmp_path, script_runner): def test_import_prepHistFactory_withProgress(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success assert ret.stdout == '' @@ -91,10 +88,10 @@ def test_import_prepHistFactory_stdout(tmp_path, script_runner): def test_import_prepHistFactory_and_fit(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) - command = f"pyhf fit {temp.strpath:s}" + command = f"pyhf fit {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -109,7 +106,7 @@ def test_import_prepHistFactory_and_fit(tmp_path, script_runner): "LogNormExample", "ConstExample", ]: - command = f"pyhf fit {temp.strpath:s} --value --measurement {measurement:s}" + command = f"pyhf fit {temp} --value --measurement {measurement:s}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -130,10 +127,10 @@ def test_import_prepHistFactory_and_fit(tmp_path, script_runner): def test_import_prepHistFactory_and_cls(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp.strpath:s}' + command = f'pyhf cls {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -148,7 +145,7 @@ def test_import_prepHistFactory_and_cls(tmp_path, script_runner): 'LogNormExample', 'ConstExample', ]: - command = f'pyhf cls {temp.strpath:s} --measurement {measurement:s}' + command = f'pyhf cls {temp} --measurement {measurement:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -171,7 +168,7 @@ def test_import_usingMounts(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' + command = f'pyhf xml2json --hide-progress -v {data}:/absolute/path/to -v {data}:/another/absolute/path/to --output-file {temp} {data.joinpath("config/example.xml")}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -187,7 +184,7 @@ def test_import_usingMounts_badDelimitedPaths(datadir, tmp_path, script_runner): data = datadir.joinpath("xmlimport_absolutePaths") temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp.strpath:s} {data.joinpath("config/example.xml")}' + command = f'pyhf xml2json --hide-progress -v {data}::/absolute/path/to -v {data}/another/absolute/path/to --output-file {temp} {data.joinpath("config/example.xml")}' ret = script_runner.run(shlex.split(command)) assert not ret.success @@ -198,10 +195,10 @@ def test_import_usingMounts_badDelimitedPaths(datadir, tmp_path, script_runner): @pytest.mark.parametrize("backend", ["numpy", "tensorflow", "pytorch", "jax"]) def test_fit_backend_option(tmp_path, script_runner, backend): temp = tmp_path.joinpath("parsed_output.json") - command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) - command = f"pyhf fit --backend {backend:s} {temp.strpath:s}" + command = f"pyhf fit --backend {backend:s} {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -213,10 +210,10 @@ def test_fit_backend_option(tmp_path, script_runner, backend): @pytest.mark.parametrize("backend", ["numpy", "tensorflow", "pytorch", "jax"]) def test_cls_backend_option(tmp_path, script_runner, backend): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls --backend {backend:s} {temp.strpath:s}' + command = f'pyhf cls --backend {backend:s} {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -228,10 +225,10 @@ def test_cls_backend_option(tmp_path, script_runner, backend): def test_import_and_export(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmp_path.mkdir('output').strpath:s}" + command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output').strpath:s}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -246,23 +243,23 @@ def test_patch(tmp_path, script_runner): ) temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp.strpath:s} --patch {patch.strpath:s}' + command = f'pyhf cls {temp} --patch {patch.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmp_path.mkdir('output_1').strpath:s} --patch {patch.strpath:s}" + command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output_1').strpath:s} --patch {patch.strpath:s}" ret = script_runner.run(shlex.split(command)) assert ret.success - command = f'pyhf cls {temp.strpath:s} --patch -' + command = f'pyhf cls {temp} --patch -' ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmp_path.mkdir('output_2').strpath:s} --patch -" + command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output_2').strpath:s} --patch -" ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success @@ -273,24 +270,24 @@ def test_patch_fail(tmp_path, script_runner): patch.write('''not,json''') temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp.strpath:s} --patch {patch.strpath:s}' + command = f'pyhf cls {temp} --patch {patch.strpath:s}' ret = script_runner.run(shlex.split(command)) assert not ret.success - command = f"pyhf json2xml {temp.strpath:s} --output-dir {tmp_path.mkdir('output').strpath:s} --patch {patch.strpath:s}" + command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output').strpath:s} --patch {patch.strpath:s}" ret = script_runner.run(shlex.split(command)) assert not ret.success def test_bad_measurement_name(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp.strpath:s} --measurement "a-fake-measurement-name"' + command = f'pyhf cls {temp} --measurement "a-fake-measurement-name"' ret = script_runner.run(shlex.split(command)) assert not ret.success # assert 'no measurement by name' in ret.stderr # numpy swallows the log.error() here, dunno why @@ -298,14 +295,14 @@ def test_bad_measurement_name(tmp_path, script_runner): def test_testpoi(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) pois = [1.0, 0.5, 0.001] results_exp = [] results_obs = [] for test_poi in pois: - command = f'pyhf cls {temp.strpath:s} --test-poi {test_poi:f}' + command = f'pyhf cls {temp} --test-poi {test_poi:f}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -333,7 +330,7 @@ def test_testpoi(tmp_path, script_runner): ) def test_fit_optimizer(tmp_path, script_runner, optimizer, opts, success): temp = tmp_path.joinpath("parsed_output.json") - command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}" + command = f"pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}" ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) @@ -349,7 +346,7 @@ def test_fit_optimizer(tmp_path, script_runner, optimizer, opts, success): ) def test_cls_optimizer(tmp_path, script_runner, optimizer, opts, success): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s}' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) @@ -361,21 +358,21 @@ def test_cls_optimizer(tmp_path, script_runner, optimizer, opts, success): def test_inspect(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = f'pyhf inspect {temp.strpath:s}' + command = f'pyhf inspect {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success def test_inspect_outfile(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("inspect_output.json") - command = f'pyhf inspect {temp.strpath:s} --output-file {tempout.strpath:s}' + command = f'pyhf inspect {temp} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -398,23 +395,21 @@ def test_inspect_outfile(tmp_path, script_runner): def test_prune(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = ( - f"pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s}" - ) + command = f"pyhf prune -m staterror_channel1 --measurement GammaExample {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success def test_prune_outfile(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("prune_output.json") - command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp.strpath:s} --output-file {tempout.strpath:s}' + command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -430,21 +425,21 @@ def test_prune_outfile(tmp_path, script_runner): def test_rename(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s}' + command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success def test_rename_outfile(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("rename_output.json") - command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp.strpath:s} --output-file {tempout.strpath:s}' + command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp} --output-file {tempout.strpath:s}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -746,7 +741,7 @@ def test_patchset_apply(datadir, tmp_path, script_runner, output_file): def test_sort(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) command = f'pyhf sort {temp.strpath}' @@ -757,7 +752,7 @@ def test_sort(tmp_path, script_runner): def test_sort_outfile(tmp_path, script_runner): temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("sort_output.json") From 238da4021a389c1ee00bc6925469a2bd176326d8 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 19 Nov 2023 01:22:21 -0600 Subject: [PATCH 04/17] tempout.strpath to tempout --- tests/test_scripts.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 1605136f3c..4a730a7847 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -372,7 +372,7 @@ def test_inspect_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("inspect_output.json") - command = f'pyhf inspect {temp} --output-file {tempout.strpath:s}' + command = f'pyhf inspect {temp} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -409,7 +409,7 @@ def test_prune_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("prune_output.json") - command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp} --output-file {tempout.strpath:s}' + command = f'pyhf prune -m staterror_channel1 --measurement GammaExample {temp} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -439,7 +439,7 @@ def test_rename_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("rename_output.json") - command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp} --output-file {tempout.strpath:s}' + command = f'pyhf rename -m staterror_channel1 staterror_channelone --measurement GammaExample GamEx {temp} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -509,7 +509,9 @@ def test_combine_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("combined_output.json") - command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s} --output-file {tempout.strpath:s}' + command = ( + f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s} --output-file {tempout}' + ) ret = script_runner.run(shlex.split(command)) assert ret.success From 78e881b342f2655b8debefa1aebaa18b299f1b5b Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 19 Nov 2023 01:24:09 -0600 Subject: [PATCH 05/17] remove use of .strpath:s in f-strings --- tests/test_scripts.py | 34 ++++++++++++++++++---------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 4a730a7847..2d22312d94 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -117,7 +117,7 @@ def test_import_prepHistFactory_and_fit(tmp_path, script_runner): tmp_out = tmp_path.joinpath(f"{measurement:s}_output.json") # make sure output file works too - command += f" --output-file {tmp_out.strpath:s}" + command += f" --output-file {tmp_out}" ret = script_runner.run(shlex.split(command)) assert ret.success ret_json = json.load(tmp_out) @@ -156,7 +156,7 @@ def test_import_prepHistFactory_and_cls(tmp_path, script_runner): tmp_out = tmp_path.joinpath(f'{measurement:s}_output.json') # make sure output file works too - command += f' --output-file {tmp_out.strpath:s}' + command += f' --output-file {tmp_out}' ret = script_runner.run(shlex.split(command)) assert ret.success d = json.load(tmp_out) @@ -228,7 +228,7 @@ def test_import_and_export(tmp_path, script_runner): command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output').strpath:s}" + command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output')}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -246,11 +246,11 @@ def test_patch(tmp_path, script_runner): command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp} --patch {patch.strpath:s}' + command = f'pyhf cls {temp} --patch {patch}' ret = script_runner.run(shlex.split(command)) assert ret.success - command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output_1').strpath:s} --patch {patch.strpath:s}" + command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output_1')} --patch {patch}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -259,7 +259,9 @@ def test_patch(tmp_path, script_runner): ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success - command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output_2').strpath:s} --patch -" + command = ( + f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output_2')} --patch -" + ) ret = script_runner.run(shlex.split(command), stdin=patch) assert ret.success @@ -273,11 +275,13 @@ def test_patch_fail(tmp_path, script_runner): command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f'pyhf cls {temp} --patch {patch.strpath:s}' + command = f'pyhf cls {temp} --patch {patch}' ret = script_runner.run(shlex.split(command)) assert not ret.success - command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output').strpath:s} --patch {patch.strpath:s}" + command = ( + f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output')} --patch {patch}" + ) ret = script_runner.run(shlex.split(command)) assert not ret.success @@ -460,7 +464,7 @@ def test_rename_outfile(tmp_path, script_runner): def test_combine(tmp_path, script_runner): temp_1 = tmp_path.joinpath("parsed_output.json") temp_2 = tmp_path.joinpath("renamed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress' ret = script_runner.run(shlex.split(command)) rename_channels = {'channel1': 'channel2'} @@ -477,10 +481,10 @@ def test_combine(tmp_path, script_runner): _opts_measurements = ''.join( ' --measurement ' + ' '.join(item) for item in rename_measurements.items() ) - command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" + command = f"pyhf rename {temp_1} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2}" ret = script_runner.run(shlex.split(command)) - command = f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s}' + command = f'pyhf combine {temp_1} {temp_2}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -488,7 +492,7 @@ def test_combine(tmp_path, script_runner): def test_combine_outfile(tmp_path, script_runner): temp_1 = tmp_path.joinpath("parsed_output.json") temp_2 = tmp_path.joinpath("renamed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath:s} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress' ret = script_runner.run(shlex.split(command)) rename_channels = {'channel1': 'channel2'} @@ -505,13 +509,11 @@ def test_combine_outfile(tmp_path, script_runner): _opts_measurements = ''.join( ' --measurement ' + ' '.join(item) for item in rename_measurements.items() ) - command = f"pyhf rename {temp_1.strpath:s} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2.strpath:s}" + command = f"pyhf rename {temp_1} {_opts_channels:s} {_opts_measurements:s} --output-file {temp_2}" ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("combined_output.json") - command = ( - f'pyhf combine {temp_1.strpath:s} {temp_2.strpath:s} --output-file {tempout}' - ) + command = f'pyhf combine {temp_1} {temp_2} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success From e6374dcb9be2f45ec87cefa182c20b8e8c42610b Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 19 Nov 2023 01:25:41 -0600 Subject: [PATCH 06/17] Remove all use of .strpath --- tests/test_scripts.py | 36 +++++++++++++++++------------------- 1 file changed, 17 insertions(+), 19 deletions(-) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 2d22312d94..78845adc7e 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -338,7 +338,7 @@ def test_fit_optimizer(tmp_path, script_runner, optimizer, opts, success): ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) - command = f"pyhf fit --optimizer {optimizer} {optconf} {temp.strpath}" + command = f"pyhf fit --optimizer {optimizer} {optconf} {temp}" ret = script_runner.run(shlex.split(command)) assert ret.success == success @@ -354,7 +354,7 @@ def test_cls_optimizer(tmp_path, script_runner, optimizer, opts, success): ret = script_runner.run(shlex.split(command)) optconf = " ".join(f"--optconf {opt}" for opt in opts) - command = f'pyhf cls {temp.strpath} --optimizer {optimizer} {optconf}' + command = f'pyhf cls {temp} --optimizer {optimizer} {optconf}' ret = script_runner.run(shlex.split(command)) assert ret.success == success @@ -526,18 +526,16 @@ def test_combine_outfile(tmp_path, script_runner): def test_combine_merge_channels(tmp_path, script_runner): temp_1 = tmp_path.joinpath("parsed_output.json") temp_2 = tmp_path.joinpath("renamed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1.strpath} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp_1} --hide-progress' ret = script_runner.run(shlex.split(command)) assert ret.success - command = ( - f'pyhf prune {temp_1.strpath} --sample signal --output-file {temp_2.strpath}' - ) + command = f'pyhf prune {temp_1} --sample signal --output-file {temp_2}' ret = script_runner.run(shlex.split(command)) assert ret.success - command = f'pyhf combine --merge-channels --join "left outer" {temp_1.strpath} {temp_2.strpath}' + command = f'pyhf combine --merge-channels --join "left outer" {temp_1} {temp_2}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -553,10 +551,12 @@ def test_workspace_digest(tmp_path, script_runner, algorithms, do_json): } temp = tmp_path.joinpath("parsed_output.json") - command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp.strpath} --hide-progress' + command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = f"pyhf digest {temp.strpath} -a {' -a '.join(algorithms)}{' -j' if do_json else ''}" + command = ( + f"pyhf digest {temp} -a {' -a '.join(algorithms)}{' -j' if do_json else ''}" + ) ret = script_runner.run(shlex.split(command)) assert ret.success assert all(algorithm in ret.stdout for algorithm in algorithms) @@ -591,21 +591,19 @@ def test_patchset_download( tmp_path, script_runner, requests_mock, tarfile_path, archive ): requests_mock.get(archive, content=open(tarfile_path, "rb").read()) - command = ( - f'pyhf contrib download {archive} {tmp_path.joinpath("likelihoods").strpath}' - ) + command = f'pyhf contrib download {archive} {tmp_path.joinpath("likelihoods")}' ret = script_runner.run(shlex.split(command)) assert ret.success # Run with all optional flags - command = f'pyhf contrib download --verbose --force {archive} {tmp_path.joinpath("likelihoods").strpath}' + command = f'pyhf contrib download --verbose --force {archive} {tmp_path.joinpath("likelihoods")}' ret = script_runner.run(shlex.split(command)) assert ret.success requests_mock.get( "https://www.pyhfthisdoesnotexist.org/record/resource/1234567", status_code=200 ) - command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmp_path.joinpath("likelihoods").strpath}' + command = f'pyhf contrib download --verbose https://www.pyhfthisdoesnotexist.org/record/resource/1234567 {tmp_path.joinpath("likelihoods")}' ret = script_runner.run(shlex.split(command)) assert not ret.success assert ( @@ -617,7 +615,7 @@ def test_patchset_download( requests_mock.get( "https://httpstat.us/404/record/resource/1234567", status_code=404 ) - command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmp_path.joinpath("likelihoods").strpath}' + command = f'pyhf contrib download --verbose --force https://httpstat.us/404/record/resource/1234567 {tmp_path.joinpath("likelihoods")}' ret = script_runner.run(shlex.split(command)) assert not ret.success assert "gives a response code of 404" in ret.stderr @@ -693,7 +691,7 @@ def test_patchset_extract(datadir, tmp_path, script_runner, output_file, with_me temp = tmp_path.joinpath("extracted_output.json") command = f'pyhf patchset extract {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' if output_file: - command += f" --output-file {temp.strpath}" + command += f" --output-file {temp}" if with_metadata: command += " --with-metadata" @@ -728,7 +726,7 @@ def test_patchset_apply(datadir, tmp_path, script_runner, output_file): temp = tmp_path.joinpath("patched_output.json") command = f'pyhf patchset apply {datadir.joinpath("example_bkgonly.json")} {datadir.joinpath("example_patchset.json")} --name patch_channel1_signal_syst1' if output_file: - command += f" --output-file {temp.strpath}" + command += f" --output-file {temp}" ret = script_runner.run(shlex.split(command)) @@ -748,7 +746,7 @@ def test_sort(tmp_path, script_runner): command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp} --hide-progress' ret = script_runner.run(shlex.split(command)) - command = f'pyhf sort {temp.strpath}' + command = f'pyhf sort {temp}' ret = script_runner.run(shlex.split(command)) assert ret.success @@ -760,7 +758,7 @@ def test_sort_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) tempout = tmp_path.joinpath("sort_output.json") - command = f'pyhf sort {temp.strpath} --output-file {tempout.strpath}' + command = f'pyhf sort {temp} --output-file {tempout}' ret = script_runner.run(shlex.split(command)) assert ret.success From 60d17521fd533a208fc017fb47c67b998a14828c Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 19 Nov 2023 01:37:34 -0600 Subject: [PATCH 07/17] Use write_text and read_text --- tests/test_scripts.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 78845adc7e..d6bb7b727f 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -62,7 +62,7 @@ def test_import_prepHistFactory(tmp_path, script_runner): assert ret.stdout == '' assert ret.stderr == '' - parsed_xml = json.loads(temp.read()) + parsed_xml = json.loads(temp.read_text()) spec = {'channels': parsed_xml['channels']} pyhf.schema.validate(spec, 'model.json') @@ -175,7 +175,7 @@ def test_import_usingMounts(datadir, tmp_path, script_runner): assert ret.stdout == '' assert ret.stderr == '' - parsed_xml = json.loads(temp.read()) + parsed_xml = json.loads(temp.read_text()) spec = {'channels': parsed_xml['channels']} pyhf.schema.validate(spec, 'model.json') @@ -236,7 +236,7 @@ def test_import_and_export(tmp_path, script_runner): def test_patch(tmp_path, script_runner): patch = tmp_path.joinpath('patch.json') - patch.write( + patch.write_text( ''' [{"op": "replace", "path": "/channels/0/samples/0/data", "value": [5,6]}] ''' @@ -269,7 +269,7 @@ def test_patch(tmp_path, script_runner): def test_patch_fail(tmp_path, script_runner): patch = tmp_path.joinpath('patch.json') - patch.write('''not,json''') + patch.write_text('''not,json''') temp = tmp_path.joinpath("parsed_output.json") command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' @@ -380,7 +380,7 @@ def test_inspect_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) assert ret.success - summary = json.loads(tempout.read()) + summary = json.loads(tempout.read_text()) assert [ 'channels', 'measurements', @@ -417,11 +417,11 @@ def test_prune_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) assert ret.success - spec = json.loads(temp.read()) + spec = json.loads(temp.read_text()) ws = pyhf.Workspace(spec) assert 'GammaExample' in ws.measurement_names assert 'staterror_channel1' in ws.model().config.parameters - pruned_spec = json.loads(tempout.read()) + pruned_spec = json.loads(tempout.read_text()) pruned_ws = pyhf.Workspace(pruned_spec) assert 'GammaExample' not in pruned_ws.measurement_names assert 'staterror_channel1' not in pruned_ws.model().config.parameters @@ -447,13 +447,13 @@ def test_rename_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) assert ret.success - spec = json.loads(temp.read()) + spec = json.loads(temp.read_text()) ws = pyhf.Workspace(spec) assert 'GammaExample' in ws.measurement_names assert 'GamEx' not in ws.measurement_names assert 'staterror_channel1' in ws.model().config.parameters assert 'staterror_channelone' not in ws.model().config.parameters - renamed_spec = json.loads(tempout.read()) + renamed_spec = json.loads(tempout.read_text()) renamed_ws = pyhf.Workspace(renamed_spec) assert 'GammaExample' not in renamed_ws.measurement_names assert 'GamEx' in renamed_ws.measurement_names @@ -517,7 +517,7 @@ def test_combine_outfile(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) assert ret.success - combined_spec = json.loads(tempout.read()) + combined_spec = json.loads(tempout.read_text()) combined_ws = pyhf.Workspace(combined_spec) assert combined_ws.channels == ['channel1', 'channel2'] assert len(combined_ws.measurement_names) == 8 @@ -699,7 +699,7 @@ def test_patchset_extract(datadir, tmp_path, script_runner, output_file, with_me assert ret.success if output_file: - extracted_output = json.loads(temp.read()) + extracted_output = json.loads(temp.read_text()) else: extracted_output = json.loads(ret.stdout) if with_metadata: @@ -732,7 +732,7 @@ def test_patchset_apply(datadir, tmp_path, script_runner, output_file): assert ret.success if output_file: - extracted_output = json.loads(temp.read()) + extracted_output = json.loads(temp.read_text()) else: extracted_output = json.loads(ret.stdout) assert extracted_output['channels'][0]['samples'][0]['modifiers'][0]['data'] == { From 836bb665a1d268feaea4bb7ede765031c82ff684 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Sun, 19 Nov 2023 02:16:12 -0600 Subject: [PATCH 08/17] fix this: The '-' isn't working apparently --- tests/test_scripts.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index d6bb7b727f..06b9a66592 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -228,7 +228,10 @@ def test_import_and_export(tmp_path, script_runner): command = f'pyhf xml2json validation/xmlimport_input/config/example.xml --basedir validation/xmlimport_input/ --output-file {temp}' ret = script_runner.run(shlex.split(command)) - command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output')}" + output_dir_path = tmp_path / "output" + output_dir_path.mkdir() + + command = f"pyhf json2xml {temp} --output-dir {output_dir_path}" ret = script_runner.run(shlex.split(command)) assert ret.success @@ -250,19 +253,23 @@ def test_patch(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) assert ret.success - command = f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output_1')} --patch {patch}" + output_dir_path = tmp_path / "output_1" + output_dir_path.mkdir() + + command = f"pyhf json2xml {temp} --output-dir {output_dir_path} --patch {patch}" ret = script_runner.run(shlex.split(command)) assert ret.success command = f'pyhf cls {temp} --patch -' - ret = script_runner.run(shlex.split(command), stdin=patch) + ret = script_runner.run(shlex.split(command), stdin=patch.read_text()) assert ret.success - command = ( - f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output_2')} --patch -" - ) - ret = script_runner.run(shlex.split(command), stdin=patch) + output_dir_path = tmp_path / "output_2" + output_dir_path.mkdir() + + command = f"pyhf json2xml {temp} --output-dir {output_dir_path} --patch -" + ret = script_runner.run(shlex.split(command), stdin=patch.read_text()) assert ret.success @@ -279,9 +286,10 @@ def test_patch_fail(tmp_path, script_runner): ret = script_runner.run(shlex.split(command)) assert not ret.success - command = ( - f"pyhf json2xml {temp} --output-dir {tmp_path.mkdir('output')} --patch {patch}" - ) + output_dir_path = tmp_path / "output" + output_dir_path.mkdir() + + command = f"pyhf json2xml {temp} --output-dir {output_dir_path} --patch {patch}" ret = script_runner.run(shlex.split(command)) assert not ret.success From 4d34dbdafc15def8ee15829eb0b02eeef08f5343 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 20 Nov 2023 11:48:12 -0600 Subject: [PATCH 09/17] use exist_ok for mkdir --- tests/test_scripts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 06b9a66592..0f65f41d08 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -254,7 +254,7 @@ def test_patch(tmp_path, script_runner): assert ret.success output_dir_path = tmp_path / "output_1" - output_dir_path.mkdir() + output_dir_path.mkdir(exist_ok=True) command = f"pyhf json2xml {temp} --output-dir {output_dir_path} --patch {patch}" ret = script_runner.run(shlex.split(command)) @@ -266,7 +266,7 @@ def test_patch(tmp_path, script_runner): assert ret.success output_dir_path = tmp_path / "output_2" - output_dir_path.mkdir() + output_dir_path.mkdir(exist_ok=True) command = f"pyhf json2xml {temp} --output-dir {output_dir_path} --patch -" ret = script_runner.run(shlex.split(command), stdin=patch.read_text()) From 6347dbd62f738acdd4da062df83d30b89f1a1c98 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 20 Nov 2023 11:48:37 -0600 Subject: [PATCH 10/17] Use patch.open() instead of .read_text() --- tests/test_scripts.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_scripts.py b/tests/test_scripts.py index 0f65f41d08..9d00814c8e 100644 --- a/tests/test_scripts.py +++ b/tests/test_scripts.py @@ -120,7 +120,7 @@ def test_import_prepHistFactory_and_fit(tmp_path, script_runner): command += f" --output-file {tmp_out}" ret = script_runner.run(shlex.split(command)) assert ret.success - ret_json = json.load(tmp_out) + ret_json = json.load(tmp_out.open()) assert "mle_parameters" in ret_json assert "twice_nll" in ret_json @@ -159,7 +159,7 @@ def test_import_prepHistFactory_and_cls(tmp_path, script_runner): command += f' --output-file {tmp_out}' ret = script_runner.run(shlex.split(command)) assert ret.success - d = json.load(tmp_out) + d = json.load(tmp_out.open()) assert 'CLs_obs' in d assert 'CLs_exp' in d @@ -262,14 +262,14 @@ def test_patch(tmp_path, script_runner): command = f'pyhf cls {temp} --patch -' - ret = script_runner.run(shlex.split(command), stdin=patch.read_text()) + ret = script_runner.run(shlex.split(command), stdin=patch.open()) assert ret.success output_dir_path = tmp_path / "output_2" output_dir_path.mkdir(exist_ok=True) command = f"pyhf json2xml {temp} --output-dir {output_dir_path} --patch -" - ret = script_runner.run(shlex.split(command), stdin=patch.read_text()) + ret = script_runner.run(shlex.split(command), stdin=patch.open()) assert ret.success From f853a6abc9c1b84f28a916b94a51f3c41d184959 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 20 Nov 2023 17:26:43 -0600 Subject: [PATCH 11/17] Use nolegacypath plugin --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index e2a17df696..11dfe98cc8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -175,6 +175,7 @@ addopts = [ "--strict-config", "--doctest-modules", "--doctest-glob='*.rst'", + "-p no:legacypath", ] log_cli_level = "info" testpaths = "tests" From 7f8802ddc8fca4eef82715e87c684d0bafeb3b87 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 20 Nov 2023 19:04:48 -0600 Subject: [PATCH 12/17] Remove .strpath --- tests/contrib/test_contrib_utils.py | 42 +++++++++++++---------------- 1 file changed, 18 insertions(+), 24 deletions(-) diff --git a/tests/contrib/test_contrib_utils.py b/tests/contrib/test_contrib_utils.py index 40ea0ccb26..732c80d7ab 100644 --- a/tests/contrib/test_contrib_utils.py +++ b/tests/contrib/test_contrib_utils.py @@ -11,39 +11,33 @@ @pytest.fixture(scope="function") def tarfile_path(tmp_path): - with open( - tmp_path.join("test_file.txt").strpath, "w", encoding="utf-8" - ) as write_file: + with open(tmp_path.join("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") with tarfile.open( - tmp_path.join("test_tar.tar.gz").strpath, mode="w:gz", encoding="utf-8" + tmp_path.join("test_tar.tar.gz"), mode="w:gz", encoding="utf-8" ) as archive: - archive.add(tmp_path.join("test_file.txt").strpath) - return Path(tmp_path.join("test_tar.tar.gz").strpath) + archive.add(tmp_path.join("test_file.txt")) + return Path(tmp_path.join("test_tar.tar.gz")) @pytest.fixture(scope="function") def tarfile_uncompressed_path(tmp_path): - with open( - tmp_path.join("test_file.txt").strpath, "w", encoding="utf-8" - ) as write_file: + with open(tmp_path.join("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") with tarfile.open( - tmp_path.join("test_tar.tar").strpath, mode="w", encoding="utf-8" + tmp_path.join("test_tar.tar"), mode="w", encoding="utf-8" ) as archive: - archive.add(tmp_path.join("test_file.txt").strpath) - return Path(tmp_path.join("test_tar.tar").strpath) + archive.add(tmp_path.join("test_file.txt")) + return Path(tmp_path.join("test_tar.tar")) @pytest.fixture(scope="function") def zipfile_path(tmp_path): - with open( - tmp_path.join("test_file.txt").strpath, "w", encoding="utf-8" - ) as write_file: + with open(tmp_path.join("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") - with zipfile.ZipFile(tmp_path.join("test_zip.zip").strpath, "w") as archive: - archive.write(tmp_path.join("test_file.txt").strpath) - return Path(tmp_path.join("test_zip.zip").strpath) + with zipfile.ZipFile(tmp_path.join("test_zip.zip"), "w") as archive: + archive.write(tmp_path.join("test_file.txt")) + return Path(tmp_path.join("test_zip.zip")) def test_download_untrusted_archive_host(tmp_path, requests_mock): @@ -51,7 +45,7 @@ def test_download_untrusted_archive_host(tmp_path, requests_mock): requests_mock.get(archive_url) with pytest.raises(InvalidArchiveHost): - download(archive_url, tmp_path.join("likelihoods").strpath) + download(archive_url, tmp_path.join("likelihoods")) def test_download_invalid_archive(tmp_path, requests_mock): @@ -59,14 +53,14 @@ def test_download_invalid_archive(tmp_path, requests_mock): requests_mock.get(archive_url, status_code=404) with pytest.raises(InvalidArchive): - download(archive_url, tmp_path.join("likelihoods").strpath) + download(archive_url, tmp_path.join("likelihoods")) def test_download_compress(tmp_path, requests_mock): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" requests_mock.get(archive_url) - download(archive_url, tmp_path.join("likelihoods").strpath, compress=True) + download(archive_url, tmp_path.join("likelihoods"), compress=True) def test_download_archive_type( @@ -78,7 +72,7 @@ def test_download_archive_type( zipfile_path, ): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" - output_directory = tmp_path.join("likelihoods").strpath + output_directory = tmp_path.join("likelihoods") # Give BytesIO a tarfile requests_mock.get(archive_url, content=open(tarfile_path, "rb").read()) download(archive_url, output_directory) @@ -109,6 +103,6 @@ def test_download_archive_force(tmp_path, requests_mock, tarfile_path): ) with pytest.raises(InvalidArchiveHost): - download(archive_url, tmp_path.join("likelihoods").strpath, force=False) + download(archive_url, tmp_path.join("likelihoods"), force=False) - download(archive_url, tmp_path.join("likelihoods").strpath, force=True) + download(archive_url, tmp_path.join("likelihoods"), force=True) From 4bd2dd382c4ad402d69bb7c37140e4279266f6a4 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 20 Nov 2023 19:06:53 -0600 Subject: [PATCH 13/17] switch from join to joinpath --- noxfile.py | 4 ++-- tests/contrib/test_contrib_utils.py | 36 ++++++++++++++--------------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/noxfile.py b/noxfile.py index 8104dbd28b..18060ca741 100644 --- a/noxfile.py +++ b/noxfile.py @@ -35,8 +35,8 @@ def tests(session): $ nox --session tests --python 3.11 -- tests/test_tensor.py # run specific tests $ nox --session tests --python 3.11 -- coverage # run with coverage but slower """ - session.install("--upgrade", "--editable", ".[all,test]") - session.install("--upgrade", "pytest") + # session.install("--upgrade", "--editable", ".[all,test]") + # session.install("--upgrade", "pytest") # Allow tests to be run with coverage if "coverage" in session.posargs: diff --git a/tests/contrib/test_contrib_utils.py b/tests/contrib/test_contrib_utils.py index 732c80d7ab..8e64036812 100644 --- a/tests/contrib/test_contrib_utils.py +++ b/tests/contrib/test_contrib_utils.py @@ -11,33 +11,33 @@ @pytest.fixture(scope="function") def tarfile_path(tmp_path): - with open(tmp_path.join("test_file.txt"), "w", encoding="utf-8") as write_file: + with open(tmp_path.joinpath("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") with tarfile.open( - tmp_path.join("test_tar.tar.gz"), mode="w:gz", encoding="utf-8" + tmp_path.joinpath("test_tar.tar.gz"), mode="w:gz", encoding="utf-8" ) as archive: - archive.add(tmp_path.join("test_file.txt")) - return Path(tmp_path.join("test_tar.tar.gz")) + archive.add(tmp_path.joinpath("test_file.txt")) + return Path(tmp_path.joinpath("test_tar.tar.gz")) @pytest.fixture(scope="function") def tarfile_uncompressed_path(tmp_path): - with open(tmp_path.join("test_file.txt"), "w", encoding="utf-8") as write_file: + with open(tmp_path.joinpath("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") with tarfile.open( - tmp_path.join("test_tar.tar"), mode="w", encoding="utf-8" + tmp_path.joinpath("test_tar.tar"), mode="w", encoding="utf-8" ) as archive: - archive.add(tmp_path.join("test_file.txt")) - return Path(tmp_path.join("test_tar.tar")) + archive.add(tmp_path.joinpath("test_file.txt")) + return Path(tmp_path.joinpath("test_tar.tar")) @pytest.fixture(scope="function") def zipfile_path(tmp_path): - with open(tmp_path.join("test_file.txt"), "w", encoding="utf-8") as write_file: + with open(tmp_path.joinpath("test_file.txt"), "w", encoding="utf-8") as write_file: write_file.write("test file") - with zipfile.ZipFile(tmp_path.join("test_zip.zip"), "w") as archive: - archive.write(tmp_path.join("test_file.txt")) - return Path(tmp_path.join("test_zip.zip")) + with zipfile.ZipFile(tmp_path.joinpath("test_zip.zip"), "w") as archive: + archive.write(tmp_path.joinpath("test_file.txt")) + return Path(tmp_path.joinpath("test_zip.zip")) def test_download_untrusted_archive_host(tmp_path, requests_mock): @@ -45,7 +45,7 @@ def test_download_untrusted_archive_host(tmp_path, requests_mock): requests_mock.get(archive_url) with pytest.raises(InvalidArchiveHost): - download(archive_url, tmp_path.join("likelihoods")) + download(archive_url, tmp_path.joinpath("likelihoods")) def test_download_invalid_archive(tmp_path, requests_mock): @@ -53,14 +53,14 @@ def test_download_invalid_archive(tmp_path, requests_mock): requests_mock.get(archive_url, status_code=404) with pytest.raises(InvalidArchive): - download(archive_url, tmp_path.join("likelihoods")) + download(archive_url, tmp_path.joinpath("likelihoods")) def test_download_compress(tmp_path, requests_mock): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" requests_mock.get(archive_url) - download(archive_url, tmp_path.join("likelihoods"), compress=True) + download(archive_url, tmp_path.joinpath("likelihoods"), compress=True) def test_download_archive_type( @@ -72,7 +72,7 @@ def test_download_archive_type( zipfile_path, ): archive_url = "https://www.hepdata.net/record/resource/1408476?view=true" - output_directory = tmp_path.join("likelihoods") + output_directory = tmp_path.joinpath("likelihoods") # Give BytesIO a tarfile requests_mock.get(archive_url, content=open(tarfile_path, "rb").read()) download(archive_url, output_directory) @@ -103,6 +103,6 @@ def test_download_archive_force(tmp_path, requests_mock, tarfile_path): ) with pytest.raises(InvalidArchiveHost): - download(archive_url, tmp_path.join("likelihoods"), force=False) + download(archive_url, tmp_path.joinpath("likelihoods"), force=False) - download(archive_url, tmp_path.join("likelihoods"), force=True) + download(archive_url, tmp_path.joinpath("likelihoods"), force=True) From cbd3efb0f4de10e5c3e865e960d83c2464e9a2b0 Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 20 Nov 2023 19:08:46 -0600 Subject: [PATCH 14/17] Remove use of Path directly --- tests/contrib/test_contrib_utils.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/contrib/test_contrib_utils.py b/tests/contrib/test_contrib_utils.py index 8e64036812..4af73adf2d 100644 --- a/tests/contrib/test_contrib_utils.py +++ b/tests/contrib/test_contrib_utils.py @@ -1,6 +1,5 @@ import tarfile import zipfile -from pathlib import Path from shutil import rmtree import pytest @@ -17,7 +16,7 @@ def tarfile_path(tmp_path): tmp_path.joinpath("test_tar.tar.gz"), mode="w:gz", encoding="utf-8" ) as archive: archive.add(tmp_path.joinpath("test_file.txt")) - return Path(tmp_path.joinpath("test_tar.tar.gz")) + return tmp_path.joinpath("test_tar.tar.gz") @pytest.fixture(scope="function") @@ -28,7 +27,7 @@ def tarfile_uncompressed_path(tmp_path): tmp_path.joinpath("test_tar.tar"), mode="w", encoding="utf-8" ) as archive: archive.add(tmp_path.joinpath("test_file.txt")) - return Path(tmp_path.joinpath("test_tar.tar")) + return tmp_path.joinpath("test_tar.tar") @pytest.fixture(scope="function") @@ -37,7 +36,7 @@ def zipfile_path(tmp_path): write_file.write("test file") with zipfile.ZipFile(tmp_path.joinpath("test_zip.zip"), "w") as archive: archive.write(tmp_path.joinpath("test_file.txt")) - return Path(tmp_path.joinpath("test_zip.zip")) + return tmp_path.joinpath("test_zip.zip") def test_download_untrusted_archive_host(tmp_path, requests_mock): @@ -85,7 +84,7 @@ def test_download_archive_type( requests_mock.get(archive_url, content=open(zipfile_path, "rb").read()) # Run without and with existing output_directory to cover both # cases of the shutil.rmtree logic - rmtree(Path(output_directory)) + rmtree(output_directory) download(archive_url, output_directory) # without download(archive_url, output_directory) # with From a6ffd6583e594140dfef3e83b6fcab03ee993c7d Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 20 Nov 2023 21:46:52 -0600 Subject: [PATCH 15/17] Revert "Use nolegacypath plugin" This reverts commit f853a6abc9c1b84f28a916b94a51f3c41d184959. --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 11dfe98cc8..e2a17df696 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -175,7 +175,6 @@ addopts = [ "--strict-config", "--doctest-modules", "--doctest-glob='*.rst'", - "-p no:legacypath", ] log_cli_level = "info" testpaths = "tests" From 7d08e032bee1b1cdcc92b291d357726a139c50eb Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Mon, 20 Nov 2023 21:57:13 -0600 Subject: [PATCH 16/17] Restore noxfile --- noxfile.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/noxfile.py b/noxfile.py index 18060ca741..8104dbd28b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -35,8 +35,8 @@ def tests(session): $ nox --session tests --python 3.11 -- tests/test_tensor.py # run specific tests $ nox --session tests --python 3.11 -- coverage # run with coverage but slower """ - # session.install("--upgrade", "--editable", ".[all,test]") - # session.install("--upgrade", "pytest") + session.install("--upgrade", "--editable", ".[all,test]") + session.install("--upgrade", "pytest") # Allow tests to be run with coverage if "coverage" in session.posargs: From 6e59ac798237be0aa40f792e4e9540334937fe1e Mon Sep 17 00:00:00 2001 From: Matthew Feickert Date: Tue, 21 Nov 2023 00:38:04 -0600 Subject: [PATCH 17/17] use joinpath for notebooks --- tests/test_notebooks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_notebooks.py b/tests/test_notebooks.py index 750dc35169..bc005f2201 100644 --- a/tests/test_notebooks.py +++ b/tests/test_notebooks.py @@ -12,7 +12,7 @@ @pytest.fixture() def common_kwargs(tmp_path): - outputnb = tmp_path.join('output.ipynb') + outputnb = tmp_path.joinpath('output.ipynb') return { 'output_path': str(outputnb), 'kernel_name': f'python{sys.version_info.major}',