diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 578043b..e682915 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -13,7 +13,7 @@ jobs: - run: echo "๐Ÿ’ก The ${{ github.repository }} repository has been cloned to the runner." - run: echo "๐Ÿ–ฅ๏ธ The workflow is now ready to test your code on the runner." - name: Installing Devbox โš™๏ธ - uses: jetpack-io/devbox-install-action@v0.8.0 + uses: jetify-com/devbox-install-action@v0.11.0 - name: Install all dependencies ๐Ÿ“ฆ run: devbox run install - name: ๐Ÿงน Linting & Formatting diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index dc95afc..96bbae6 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -18,7 +18,7 @@ jobs: - run: echo "๐Ÿ’ก The ${{ github.repository }} repository has been cloned to the runner." - run: echo "๐Ÿ–ฅ๏ธ The workflow is now ready to test your code on the runner." - name: Installing Devbox โš™๏ธ - uses: jetpack-io/devbox-install-action@v0.8.0 + uses: jetify-com/devbox-install-action@v0.11.0 - name: Install all dependencies ๐Ÿ“ฆ run: devbox run install - name: Release Version ๐Ÿท diff --git a/devbox.json b/devbox.json index 972fda5..5bcd3b5 100644 --- a/devbox.json +++ b/devbox.json @@ -1,6 +1,6 @@ { "$schema": "https://raw.githubusercontent.com/jetpack-io/devbox/0.10.3/.schema/devbox.schema.json", - "packages": ["python@3.12.2"], + "packages": ["python@3.12"], "env": { "VENV_DIR": "$HOME/MyFiles/programming/OpenSource/yaml-to-markdown/.devbox/virtenv/python/.venv", "TWINE_USERNAME": "__token__" @@ -10,30 +10,15 @@ ". $VENV_DIR/bin/activate" ], "scripts": { - "install": [ - "pip install -r requirements.txt" - ], - "test": [ - "pytest src/" - ], - "test-cov": [ - "pytest --cov=src/ --cov-report=xml" - ], - "lint": [ - "flake8 src/" - ], - "format-check": [ - "black --check src/" - ], - "format": [ - "black src/" - ], - "build": [ - "rm -rf dist/* && python setup.py sdist bdist_wheel" - ], - "publish": [ - "twine upload dist/*" - ] + "audit": "safety check --short-report -r requirements.txt", + "build": "rm -rf dist/* && python setup.py sdist bdist_wheel", + "format": "ruff check --fix src/ && ruff format src/", + "install": "pip install -r requirements.txt", + "lint": "ruff check src/", + "publish": "twine upload dist/*", + "test": "pytest src/", + "test-cov": "pytest --cov=src/ --cov-report=xml", + "type-check": "mypy --config-file=src/pyproject.toml src/" } } } diff --git a/devbox.lock b/devbox.lock index a97650a..a07e81f 100644 --- a/devbox.lock +++ b/devbox.lock @@ -1,60 +1,60 @@ { "lockfile_version": "1", "packages": { - "python@3.12.2": { - "last_modified": "2024-03-22T11:26:23Z", + "python@3.12": { + "last_modified": "2024-07-07T07:43:47Z", "plugin_version": "0.0.3", - "resolved": "github:NixOS/nixpkgs/a3ed7406349a9335cb4c2a71369b697cecd9d351#python312", + "resolved": "github:NixOS/nixpkgs/b60793b86201040d9dee019a05089a9150d08b5b#python3", "source": "devbox-search", - "version": "3.12.2", + "version": "3.12.4", "systems": { "aarch64-darwin": { "outputs": [ { "name": "out", - "path": "/nix/store/jc5jlynlx561ibqxd6sy12hcqc8p39c9-python3-3.12.2", + "path": "/nix/store/3swy1vadi125g0c1vxqp8ykdr749803j-python3-3.12.4", "default": true } ], - "store_path": "/nix/store/jc5jlynlx561ibqxd6sy12hcqc8p39c9-python3-3.12.2" + "store_path": "/nix/store/3swy1vadi125g0c1vxqp8ykdr749803j-python3-3.12.4" }, "aarch64-linux": { "outputs": [ { "name": "out", - "path": "/nix/store/n5yvl08kxz5llrdiwwxfxyy6wiq2g6lc-python3-3.12.2", + "path": "/nix/store/sz2facg15yq3ziqkidb1dkkglwzkkg8a-python3-3.12.4", "default": true }, { "name": "debug", - "path": "/nix/store/bihg62nz0vqqski18cpyppwgqz62blrq-python3-3.12.2-debug" + "path": "/nix/store/19vjjqg7jbfblqapf63nm9ich1xdq9dx-python3-3.12.4-debug" } ], - "store_path": "/nix/store/n5yvl08kxz5llrdiwwxfxyy6wiq2g6lc-python3-3.12.2" + "store_path": "/nix/store/sz2facg15yq3ziqkidb1dkkglwzkkg8a-python3-3.12.4" }, "x86_64-darwin": { "outputs": [ { "name": "out", - "path": "/nix/store/41yqb3sxsx22drhza74icn4x1gfh3h8m-python3-3.12.2", + "path": "/nix/store/3y5wy1i9nq5293knm23mxsj5l6w41h2l-python3-3.12.4", "default": true } ], - "store_path": "/nix/store/41yqb3sxsx22drhza74icn4x1gfh3h8m-python3-3.12.2" + "store_path": "/nix/store/3y5wy1i9nq5293knm23mxsj5l6w41h2l-python3-3.12.4" }, "x86_64-linux": { "outputs": [ { "name": "out", - "path": "/nix/store/7yh2ax34jd7fgf17mjfd3c6niw1h2hsj-python3-3.12.2", + "path": "/nix/store/z7xxy35k7620hs6fn6la5fg2lgklv72l-python3-3.12.4", "default": true }, { "name": "debug", - "path": "/nix/store/mq8jh0sl1lcpk592whzw96n52grhq8wl-python3-3.12.2-debug" + "path": "/nix/store/3x6jqv5yw212v8rlwql88cn94dginq32-python3-3.12.4-debug" } ], - "store_path": "/nix/store/7yh2ax34jd7fgf17mjfd3c6niw1h2hsj-python3-3.12.2" + "store_path": "/nix/store/z7xxy35k7620hs6fn6la5fg2lgklv72l-python3-3.12.4" } } } diff --git a/requirements.txt b/requirements.txt index dddc0c7..c6d8d96 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,32 +1,24 @@ click==8.1.7 -jsonschema[format]==4.21.1 +jsonschema[format]==4.23.0 pyyaml==6.0.1 # Dev dependencies -coverage==7.4.4 +coverage==7.6.0 mock==5.1.0 -pytest==8.1.1 +pytest==8.3.2 pytest-cov==5.0.0 -bandit==1.7.8 -black==24.4.0 -flake8==7.0.0 -flake8-bandit==4.1.1 -flake8-black==0.3.6 -flake8-bugbear==24.4.21 -flake8-functions==0.0.8 -isort==5.13.2 -mypy==1.9.0 -pep8-naming==0.13.3 -safety +mypy==1.11.1 +ruff==0.5.5 +safety==3.2.4 # Packaging -setuptools==69.5.1 -twine==5.0.0 +setuptools==72.1.0 +twine==5.1.1 wheel==0.43.0 # Typing -types-mock==5.1.0.20240311 +types-mock==5.1.0.20240425 types-orjson==3.6.2 -types-PyYAML==6.0.12.20240311 -types-jsonschema==4.21.0.20240331 +types-PyYAML==6.0.12.20240724 +types-jsonschema==4.23.0.20240712 diff --git a/scripts/verify.sh b/scripts/verify.sh new file mode 100755 index 0000000..976e448 --- /dev/null +++ b/scripts/verify.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +# +# This script is used to verify python code by linting, type checking and checking dependencies. +set -euo pipefail + +echo "Linting python code ..." +devbox run lint + +echo "Type checking python code ..." +devbox run type-check + +echo "Checking dependencies of python code ..." +devbox run audit diff --git a/sonar-project.properties b/sonar-project.properties index 2af7c1a..f673e1f 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -8,3 +8,4 @@ sonar.projectName=yaml-to-markdown sonar.sources=src/ sonar.coverage.exclusions=./.pytest_cache/**,**_test.py,**.xml,**.yaml,**.yml sonar.python.coverage.reportPaths=coverage.xml +sonar.python.version=3.12 diff --git a/src/pyproject.toml b/src/pyproject.toml new file mode 100644 index 0000000..f57ee26 --- /dev/null +++ b/src/pyproject.toml @@ -0,0 +1,76 @@ +[project] +name = "yaml-to-markdown" +requires-python = ">=3.12" + +[tool.coverage.run] +branch = true +source = ["."] +omit = [ + # omit anything in a .local directory anywhere + "*/.local/*", + # Omit any test files + "*/*_test.py", + "setup.py", +] + +# [tool.coverage.xml] +# output = "./coverage.xml" + +[tool.mypy] +python_version="3.12" +platform="linux" + +show_column_numbers=true +show_error_context=false +error_summary=true + +# treat Optional per PEP 484 +strict_optional=true +strict_equality=true + +# https://mypy.readthedocs.io/en/stable/config_file.html#configuring-warnings +warn_redundant_casts=true +warn_unused_ignores=true +warn_unreachable=true +warn_no_return=true +warn_return_any=true + +# Be real strict on using types +check_untyped_defs=true +disallow_incomplete_defs=true +disallow_untyped_calls=true +disallow_untyped_decorators=false +disallow_untyped_defs=true + +[[tool.mypy.overrides]] +# Support for types is not yet available +ignore_missing_imports = true + +[tool.pytest.ini_options] +markers = [ + "integration: marks tests as integration (these are not run by default)." +] + +[tool.ruff] +line-length = 90 +preview = true + +[tool.ruff.lint] +select = [ + "F", "E", "W", "C90", "I", "N", "D2", "UP", "YTT", "ANN", "ASYNC", "S", + "B", "A", "C4", "DTZ", "T10", "FA", "ISC", "ICN", + "LOG", "G", "INP", "PIE", "PT", "Q", "RSE", "RET", "SLF", "SLOT", "SIM", + "TID", "TCH", "ARG", "PTH", "TD", "FIX", "ERA", "PL", "TRY", "FLY", + "PERF", "FURB", "RUF" +] + +ignore = ["E203", "E501", "S101", "S311", "D203", "D211", "D213", "ISC001", "ANN401", "TRY003", "TCH003"] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.pylint] +max-args = 6 + +[tool.ruff.lint.per-file-ignores] +"*_test.py" = ["S105", "FBT", "PLR2004", "PLR6301", "PLC2801"] diff --git a/src/yaml_to_markdown/convert.py b/src/yaml_to_markdown/convert.py index 5fbffd7..02e43a3 100644 --- a/src/yaml_to_markdown/convert.py +++ b/src/yaml_to_markdown/convert.py @@ -1,6 +1,7 @@ -import io import json -from typing import Dict, Any, Optional +import sys +from pathlib import Path +from typing import Any import click import yaml @@ -8,13 +9,13 @@ from yaml_to_markdown.md_converter import MDConverter -def _get_json_data(json_file: str) -> Dict[str, Any]: - with io.open(json_file, "r", encoding="utf-8") as j_file: +def _get_json_data(json_file: str) -> dict[str, Any]: + with Path(json_file).open("r", encoding="utf-8") as j_file: return json.load(j_file) -def _get_yaml_data(yaml_file: str) -> Dict[str, Any]: - with io.open(yaml_file, "r", encoding="utf-8") as y_file: +def _get_yaml_data(yaml_file: str) -> dict[str, Any]: + with Path(yaml_file).open("r", encoding="utf-8") as y_file: return yaml.safe_load(y_file) @@ -46,8 +47,8 @@ def _help() -> None: @click.option("-h", "--help", "show_help", default=False, is_flag=True) def main( output_file: str, - yaml_file: Optional[str], - json_file: Optional[str], + yaml_file: str | None, + json_file: str | None, show_help: bool, ) -> None: if show_help: @@ -59,20 +60,30 @@ def main( def _verify_inputs( - output_file: str, yaml_file: Optional[str], json_file: Optional[str] + output_file: str, yaml_file: str | None, json_file: str | None ) -> None: if (yaml_file is None and json_file is None) or output_file is None: _help() - exit(1) + sys.exit(1) + + +def _get_data(yaml_file: str | None, json_file: str | None) -> dict[str, Any]: + if json_file: + return _get_json_data(json_file) + if yaml_file: + return _get_yaml_data(yaml_file) + + _help() + sys.exit(1) def convert( - output_file: str, yaml_file: Optional[str] = None, json_file: Optional[str] = None + output_file: str, yaml_file: str | None = None, json_file: str | None = None ) -> None: _verify_inputs(output_file=output_file, yaml_file=yaml_file, json_file=json_file) - data = _get_json_data(json_file) if json_file else _get_yaml_data(yaml_file) - with io.open(output_file, "w", encoding="utf-8") as md_file: + data = _get_data(yaml_file=yaml_file, json_file=json_file) + with Path(output_file).open("w", encoding="utf-8") as md_file: MDConverter().convert(data=data, output_writer=md_file) diff --git a/src/yaml_to_markdown/convert_test.py b/src/yaml_to_markdown/convert_test.py index e49e9a5..2b7eab3 100644 --- a/src/yaml_to_markdown/convert_test.py +++ b/src/yaml_to_markdown/convert_test.py @@ -1,5 +1,6 @@ from io import StringIO -from unittest.mock import mock_open, patch, Mock +from pathlib import Path +from unittest.mock import Mock, mock_open, patch import pytest @@ -15,7 +16,7 @@ def test_convert_with_no_file() -> None: convert(output_file="some.md") -@patch("io.open", new_callable=mock_open(read_data=_JSON_DATA)) +@patch.object(Path, "open", new_callable=mock_open(read_data=_JSON_DATA)) def test_convert_with_json_data(mock_open_file: Mock) -> None: # Prepare mock_open_file.return_value.__enter__.return_value = StringIO(_JSON_DATA) @@ -24,11 +25,11 @@ def test_convert_with_json_data(mock_open_file: Mock) -> None: convert(output_file=_OUTPUT_FILE_NAME, json_file="test.json") # Assert - mock_open_file.assert_any_call("test.json", "r", encoding="utf-8") - mock_open_file.assert_any_call(_OUTPUT_FILE_NAME, "w", encoding="utf-8") + mock_open_file.assert_any_call("r", encoding="utf-8") + mock_open_file.assert_any_call("w", encoding="utf-8") -@patch("io.open", new_callable=mock_open()) +@patch.object(Path, "open", new_callable=mock_open()) def test_convert_with_yaml_data(mock_open_file: Mock) -> None: # Prepare data = "key: value" @@ -38,5 +39,5 @@ def test_convert_with_yaml_data(mock_open_file: Mock) -> None: convert(output_file=_OUTPUT_FILE_NAME, yaml_file="test.yaml") # Assert - mock_open_file.assert_any_call("test.yaml", "r", encoding="utf-8") - mock_open_file.assert_any_call(_OUTPUT_FILE_NAME, "w", encoding="utf-8") + mock_open_file.assert_any_call("r", encoding="utf-8") + mock_open_file.assert_any_call("w", encoding="utf-8") diff --git a/src/yaml_to_markdown/md_converter.py b/src/yaml_to_markdown/md_converter.py index 74a99fe..aef25d8 100644 --- a/src/yaml_to_markdown/md_converter.py +++ b/src/yaml_to_markdown/md_converter.py @@ -1,23 +1,22 @@ from __future__ import annotations -from typing import Any, Dict, IO, List, Optional, Union, Callable +from collections.abc import Callable +from typing import IO, Any from yaml_to_markdown.utils import convert_to_title_case class MDConverter: def __init__(self) -> None: - """ - Converter to convert a JSON object into Markdown. - """ - self._sections: Optional[List[str]] = None - self._custom_processors: Optional[ - Dict[str, Callable[[MDConverter, Optional[str], Any, int], str]] - ] = None + """Converter to convert a JSON object into Markdown.""" + self._sections: list[str] | None = None + self._custom_processors: ( + dict[str, Callable[[MDConverter, str | None, Any, int], str]] | None + ) = None + + def set_selected_sections(self, sections: list[str]) -> None: + """Set the sections (JSON keys) to include in the Markdown. - def set_selected_sections(self, sections: List[str]) -> None: - """ - Set the sections (JSON keys) to include in the Markdown. By default, all sections will be included. Args: @@ -27,13 +26,14 @@ def set_selected_sections(self, sections: List[str]) -> None: def set_custom_section_processors( self, - custom_processors: Dict[ - str, Callable[[MDConverter, Optional[str], Any, int], str] + custom_processors: dict[ + str, Callable[[MDConverter, str | None, Any, int], str] ], ) -> None: - """ - Set custom section processors, the key must match a section name/key - and the processor must take 4 arguments and return a Markdown string: + """Set custom section processors. + + The key must match a section name/key and the processor must take + 4 arguments and return a Markdown string: converter (MDConverter): The current converter object. section ([str]): The section key data (Union[List[Any], Dict[str, Any], str]): The data for the section @@ -46,69 +46,70 @@ def set_custom_section_processors( def convert( self, - data: Union[Dict[str, Union[List[Any], Dict[str, Any], str]], List[Any]], + data: ( + dict[str, str | list[Any] | list[dict[str, str]] | dict[str, Any]] + | list[Any] + ), output_writer: IO[str], ) -> None: - """ - Convert the given JSON object into Markdown. + """Convert the given JSON object into Markdown. Args: - data (Union[Dict[str, Union[List[Any], Dict[str, Any], str]], List[Any]]): + data (dict[str, str] | dict[str, list[Any]] | dict[str, list[dict[str, str]]] + | dict[str, dict[str, Any]] | list[Any]): The JSON object to convert, either a dictionary or a list. output_writer (IO[str]): The output stream object to write the Markdown to. """ if isinstance(data, dict): - self._process_dict(data, output_writer) + self._process_dict(data, output_writer) # type: ignore elif isinstance(data, list): self._process_dict({None: data}, output_writer) def _process_dict( self, - data: Dict[Optional[str], Any], + data: dict[str | None, str | list[Any] | list[dict[str, str]] | dict[str, Any]], output_writer: IO[str], ) -> None: - for section in self._sections if self._sections is not None else data.keys(): + for section in self._sections if self._sections is not None else data: if section in data: output_writer.write(self.process_section(section, data.get(section))) def process_section( self, - section: Optional[str], - data: Union[List[Any], Dict[str, Any], str], + section: str | None, + data: Any | list[Any] | dict[str, Any] | str, level: int = 2, ) -> str: section_title = ( f" {convert_to_title_case(section)}" if section is not None else "" ) + head_str = "#" * level if self._custom_processors and section in self._custom_processors: section_str = self._custom_processors[section](self, section, data, level) elif isinstance(data, list): - section_str = ( - f"{'#' * level}{section_title}\n{self._process_list(data=data)}" - ) + section_str = f"{head_str}{section_title}\n{self._process_list(data=data)}" elif isinstance(data, dict): - section_str = f"{'#' * level}{section_title}\n" - for section in data.keys(): - section_str += self.process_section( - section, data.get(section), level=level + 1 - ) + section_str = f"{head_str}{section_title}\n" + for sec in data: + section_str += self.process_section(sec, data.get(sec), level=level + 1) else: - section_str = self._get_str(section, data, level) + section_str = self._get_str( + section if section is not None else "", data, level + ) return f"{section_str}\n" - def _process_list(self, data: List[Any]) -> str: + def _process_list(self, data: list[Any]) -> str: if isinstance(data[0], dict): return self._process_table(data) - elif isinstance(data[0], list): + if isinstance(data[0], list): list_str = "" for item in data: list_str += f"{self._process_list(item)}\n" return list_str - else: - return "\n".join([f"* {item}" for item in data]) + return "\n".join([f"* {item}" for item in data]) - def _process_table(self, data: List[Dict[str, str]]) -> str: + def _process_table(self, data: list[dict[str, str]]) -> str: columns = self._get_columns(data) table_str = self._process_columns(columns) for row in data: @@ -118,16 +119,16 @@ def _process_table(self, data: List[Dict[str, str]]) -> str: return table_str @staticmethod - def _process_columns(columns: List[str]) -> str: + def _process_columns(columns: list[str]) -> str: column_titles = " | ".join([convert_to_title_case(col) for col in columns]) col_sep = " | ".join(["---" for _ in columns]) return f"| {column_titles} |\n| {col_sep} |" @staticmethod - def _get_columns(data: List[Dict[str, Any]]) -> List[str]: - columns: List[str] = [] + def _get_columns(data: list[dict[str, Any]]) -> list[str]: + columns: list[str] = [] for row in data: - for col in row.keys(): + for col in row: if col not in columns: columns.append(col) return columns @@ -138,29 +139,37 @@ def _get_str(self, text: str, data: Any, level: int) -> str: if isinstance(data, list): lst_str = "".join([f"
  • {item}
  • " for item in data]) return f"" - elif self._is_image(str_data): + if self._is_image(str_data): return f"{prefix}![{convert_to_title_case(text)}]({str_data})" - elif self._is_link(str_data): + if self._is_link(str_data): return f"{prefix}[{convert_to_title_case(text)}]({str_data})" - else: - value = str_data.replace("\n", "
    ") - if level > 0: - value = f"{'#' * level} {convert_to_title_case(text)}\n{value}" - return value + value = str_data.replace("\n", "
    ") + if level > 0: + head_str = "#" * level + value = f"{head_str} {convert_to_title_case(text)}\n{value}" + return value @staticmethod def _is_image(data: str) -> bool: file_ext = data.split(".")[-1] - return file_ext and file_ext.lower() in ("png", "jpg", "jpeg", "gif", "svg") + return file_ext is not None and file_ext.lower() in { + "png", + "jpg", + "jpeg", + "gif", + "svg", + } @staticmethod def _is_link(data: str) -> bool: file_ext = data.split(".")[-1] + min_file_ext_len = 3 + max_file_ext_len = 4 return ( "\n" not in data and "." in data and file_ext is not None - and (len(file_ext) == 4 or len(file_ext) == 3) + and (len(file_ext) == max_file_ext_len or len(file_ext) == min_file_ext_len) ) or ( data.lower().startswith("http") or data.lower().startswith("./") diff --git a/src/yaml_to_markdown/md_converter_test.py b/src/yaml_to_markdown/md_converter_test.py index d16efc2..966c6e3 100644 --- a/src/yaml_to_markdown/md_converter_test.py +++ b/src/yaml_to_markdown/md_converter_test.py @@ -1,9 +1,9 @@ from copy import deepcopy from io import StringIO -from typing import Dict, Any +from typing import Any +from unittest import mock import pytest -from mock import mock from yaml_to_markdown.md_converter import MDConverter @@ -41,7 +41,10 @@ def test_process_list(self) -> None: def test_process_list_of_list(self) -> None: output_writer = StringIO() md_converter = MDConverter() - data = [["list1 data1", "list1 data2"], ["list2 data1", "list2 data2"]] + data = [ + ["list1 data1", "list1 data2"], + ["list2 data1", "list2 data2"], + ] md_converter.convert(data, output_writer) output = output_writer.getvalue() @@ -60,7 +63,7 @@ def test_process_list_of_list(self) -> None: def test_process_section_with_str(self) -> None: output_writer = StringIO() md_converter = MDConverter() - data = {"section1": "data1"} + data: dict[str, Any] = {"section1": "data1"} md_converter.convert(data, output_writer) output = output_writer.getvalue() @@ -75,7 +78,7 @@ def test_process_section_with_str(self) -> None: def test_process_section_with_list_str(self) -> None: output_writer = StringIO() md_converter = MDConverter() - data = {"section1": _LIST_ITEMS} + data: dict[str, Any] = {"section1": _LIST_ITEMS} md_converter.convert(data, output_writer) output = output_writer.getvalue() @@ -90,7 +93,7 @@ def test_process_section_with_list_str(self) -> None: def test_process_section_with_list_dict(self) -> None: output_writer = StringIO() md_converter = MDConverter() - data = {"section1": _TABLE_ITEMS} + data: dict[str, Any] = {"section1": _TABLE_ITEMS} md_converter.convert(data, output_writer) output = output_writer.getvalue() @@ -107,7 +110,7 @@ def test_process_section_with_list_dict(self) -> None: def test_process_section_with_list_list(self) -> None: output_writer = StringIO() md_converter = MDConverter() - data = { + data: dict[str, Any] = { "section1": [ ["R1C1", "R1C2"], ["R2C1", "R2C2"], @@ -131,7 +134,7 @@ def test_process_section_skip_section(self) -> None: output_writer = StringIO() md_converter = MDConverter() md_converter.set_selected_sections(["sec-two"]) - data = {"sec-one": "First section", "sec-two": "Second Section"} + data: dict[str, Any] = {"sec-one": "First section", "sec-two": "Second Section"} md_converter.convert(data, output_writer) output = output_writer.getvalue() @@ -143,7 +146,7 @@ def test_process_section_skip_section(self) -> None: ) @pytest.mark.parametrize( - "extra_item,expected_output", + ("extra_item", "expected_output"), [ ( { @@ -181,13 +184,13 @@ def test_process_section_skip_section(self) -> None: ], ) def test_process_section( - self, extra_item: Dict[str, Any], expected_output: str + self, extra_item: dict[str, Any], expected_output: str ) -> None: output_writer = StringIO() md_converter = MDConverter() - _table_items = deepcopy(_TABLE_ITEMS) + _table_items = [deepcopy(itm) for itm in _TABLE_ITEMS] _table_items.append(extra_item) - data = { + data: dict[str, Any] = { "section-one": _table_items, "section-two": _LIST_ITEMS, } @@ -211,7 +214,7 @@ def test_process_section( def test_process_section_with_image(self) -> None: output_writer = StringIO() md_converter = MDConverter() - data = {"section1": "something.png"} + data: dict[str, Any] = {"section1": "something.png"} md_converter.convert(data, output_writer) output = output_writer.getvalue() @@ -225,7 +228,7 @@ def test_process_section_with_image(self) -> None: def test_process_section_with_http_link(self) -> None: output_writer = StringIO() md_converter = MDConverter() - data = {"section1": "https://something.html"} + data: dict[str, Any] = {"section1": "https://something.html"} md_converter.convert(data, output_writer) output = output_writer.getvalue() @@ -239,7 +242,7 @@ def test_process_section_with_http_link(self) -> None: def test_process_section_with_relative_link(self) -> None: output_writer = StringIO() md_converter = MDConverter() - data = { + data: dict[str, Any] = { "section0": "My section", "section1": "./something.puml", "section2": "/dit/something.puml", @@ -265,7 +268,7 @@ def test_process_section_different_section_order(self) -> None: output_writer = StringIO() md_converter = MDConverter() md_converter.set_selected_sections(["s3", "s2", "s1", "s4"]) - data = { + data: dict[str, Any] = { "s1": "Sec 1", "s2": "Sec 2", "s3": "Sec 3", @@ -287,7 +290,7 @@ def test_process_section_different_section_order(self) -> None: def test_process_section_with_dict(self) -> None: output_writer = StringIO() md_converter = MDConverter() - data = {"section1": {"key1": "value1", "key2": "value2"}} + data: dict[str, Any] = {"section1": {"key1": "value1", "key2": "value2"}} md_converter.convert(data, output_writer) output = output_writer.getvalue() @@ -311,7 +314,7 @@ def test_process_section_custom_processor(self) -> None: md_converter.set_custom_section_processors( custom_processors={section_name: mock_function} ) - data = {section_name: section_value} + data: dict[str, Any] = {section_name: section_value} md_converter.convert(data, output_writer) output_writer.getvalue() @@ -321,7 +324,7 @@ def test_process_section_custom_processor(self) -> None: def test_dummy() -> None: - data = { + data: dict[str, Any] = { "name": "John Doe", "age": 30, "city": "Sydney",