Skip to content

Add HRP countries to population theme #207

Add HRP countries to population theme

Add HRP countries to population theme #207

GitHub Actions / Test Results failed Jan 26, 2024 in 0s

1 fail, 3 pass in 11s

4 tests   3 ✅  11s ⏱️
1 suites  0 💤
1 files    1 ❌

Results for commit 347bee3.

Annotations

Check warning on line 0 in tests.test_main.TestHAPIPipelines

See this annotation in the file changed.

@github-actions github-actions / Test Results

test_pipelines (tests.test_main.TestHAPIPipelines) failed

test-results.xml [took 9s]
Raw output
FileNotFoundError: [Errno 2] No such file or directory: 'tests/fixtures/input/cod-ps-cmr.json'
self = <test_main.TestHAPIPipelines object at 0x7fddb3563a50>
configuration = {'hdx_prod_site': {'url': 'https://data.humdata.org'}, 'hdx_demo_site': {'url': 'https://demo.data-humdata-org.ahconu....ulation+age_60_64+total', '#population+f+age_65_plus', '#population+m+age_65_plus', '#population+age_65_plus+total']}}}
folder = 'tests/fixtures'

    def test_pipelines(self, configuration, folder):
        configuration["HAPI_countries"] = [
            "AFG",
            "BFA",
            "MLI",
            "NGA",
            "TCD",
            "YEM",
        ]
        with ErrorsOnExit() as errors_on_exit:
            with temp_dir(
                "TestHAPIPipelines",
                delete_on_success=True,
                delete_on_failure=False,
            ) as temp_folder:
                dbpath = join(temp_folder, "test_hapi.db")
                try:
                    remove(dbpath)
                except OSError:
                    pass
                logger.info(f"Creating database {dbpath}")
                with Database(database=dbpath, dialect="sqlite") as session:
                    today = parse_date("2023-10-11")
                    Read.create_readers(
                        temp_folder,
                        join(folder, "input"),
                        temp_folder,
                        False,
                        True,
                        today=today,
                    )
                    logger.info("Initialising pipelines")
                    pipelines = Pipelines(
                        configuration,
                        session,
                        today,
                        errors_on_exit=errors_on_exit,
                        use_live=False,
                    )
                    logger.info("Running pipelines")
>                   pipelines.run()

tests/test_main.py:106: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
src/hapi/pipelines/app/pipelines.py:162: in run
    self.runner.run()
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/scraper/runner.py:655: in run
    self.run_scraper(name, force_run)
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/scraper/runner.py:629: in run_scraper
    return self.run_one(name, force_run)
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/scraper/runner.py:584: in run_one
    scraper.run()
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/scraper/configurable/scraper.py:469: in run
    file_headers, iterator = self.get_iterator()
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/scraper/configurable/scraper.py:152: in get_iterator
    return self.get_reader().read(self.datasetinfo, **self.variables)
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/scraper/utilities/reader.py:593: in read
    headers, iterator = self.read_hdx(datasetinfo, **kwargs)
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/scraper/utilities/reader.py:558: in read_hdx
    resource = self.read_hdx_metadata(datasetinfo)
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/scraper/utilities/reader.py:461: in read_hdx_metadata
    dataset = self.read_dataset(dataset_nameinfo)
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/scraper/utilities/reader.py:253: in read_dataset
    dataset = Dataset.load_from_json(saved_path)
../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/data/dataset.py:218: in load_from_json
    jsonobj = load_json(path, loaderror_if_empty=False)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

path = 'tests/fixtures/input/cod-ps-cmr.json', encoding = 'utf-8'
loaderror_if_empty = False

    def load_json(
        path: str, encoding: str = "utf-8", loaderror_if_empty: bool = True
    ) -> Any:
        """Load JSON file into an ordered dictionary (dict for Python 3.7+)
    
        Args:
            path (str): Path to JSON file
            encoding (str): Encoding of file. Defaults to utf-8.
            loaderror_if_empty (bool): Whether to raise LoadError if file is empty. Default to True.
    
        Returns:
            Any: The data from the JSON file
        """
>       with open(path, encoding=encoding) as f:
E       FileNotFoundError: [Errno 2] No such file or directory: 'tests/fixtures/input/cod-ps-cmr.json'

../../../.local/share/hatch/env/virtual/hapi-pipelines/zEFzVURN/test.py3.11/lib/python3.11/site-packages/hdx/utilities/loader.py:85: FileNotFoundError