Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[GraphBolt] Modify __repr__ #6953

Merged
merged 7 commits into from
Jan 16, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 17 additions & 23 deletions python/dgl/graphbolt/impl/ondisk_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import os
import shutil
import textwrap
from copy import deepcopy
from typing import Dict, List, Union

Expand Down Expand Up @@ -308,7 +309,22 @@ def test_set(self) -> Union[ItemSet, ItemSetDict]:
return self._test_set

def __repr__(self) -> str:
return _ondisk_task_str(self)
ret = "OnDiskTask({attributes})"

attributes_str = ""

attributes = get_attributes(self)
attributes.reverse()
for attribute in attributes:
if attribute[0] == "_":
continue
value = getattr(self, attribute)
attributes_str += f"{attribute}={value},\n"
attributes_str = textwrap.indent(
attributes_str, " " * len("OnDiskTask(")
).strip()

return ret.format(attributes=attributes_str)


class OnDiskDataset(Dataset):
Expand Down Expand Up @@ -721,25 +737,3 @@ def __init__(self, name: str, root: str = "datasets") -> OnDiskDataset:
extract_archive(zip_file_path, root, overwrite=True)
os.remove(zip_file_path)
super().__init__(dataset_dir)


def _ondisk_task_str(task: OnDiskTask) -> str:
final_str = "OnDiskTask("
indent_len = len(final_str)

def _add_indent(_str, indent):
lines = _str.split("\n")
lines = [lines[0]] + [" " * indent + line for line in lines[1:]]
return "\n".join(lines)

attributes = get_attributes(task)
attributes.reverse()
for name in attributes:
if name[0] == "_":
continue
val = getattr(task, name)
final_str += (
f"{name}={_add_indent(str(val), indent_len + len(name) + 1)},\n"
+ " " * indent_len
)
return final_str[:-indent_len] + ")"
41 changes: 7 additions & 34 deletions python/dgl/graphbolt/impl/torch_based_feature_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,28 +178,12 @@ def __repr__(self) -> str:
")"
)

feature_str = str(self._tensor)
feature_str_lines = feature_str.splitlines()
if len(feature_str_lines) > 1:
feature_str = (
feature_str_lines[0]
+ "\n"
+ textwrap.indent(
"\n".join(feature_str_lines[1:]), " " * len(" feature=")
)
)

metadata_str = str(self.metadata())
metadata_str_lines = metadata_str.splitlines()
if len(metadata_str_lines) > 1:
metadata_str = (
metadata_str_lines[0]
+ "\n"
+ textwrap.indent(
"\n".join(metadata_str_lines[1:]),
" " * len(" metadata="),
)
)
feature_str = textwrap.indent(
str(self._tensor), " " * len(" feature=")
).strip()
metadata_str = textwrap.indent(
str(self.metadata()), " " * len(" metadata=")
).strip()

return ret.format(feature=feature_str, metadata=metadata_str)

Expand Down Expand Up @@ -269,16 +253,5 @@ def pin_memory_(self):

def __repr__(self) -> str:
ret = "TorchBasedFeatureStore(\n" + " {features}\n" + ")"

features_str = str(self._features)
features_str_lines = features_str.splitlines()
if len(features_str_lines) > 1:
features_str = (
features_str_lines[0]
+ "\n"
+ textwrap.indent(
"\n".join(features_str_lines[1:]), " " * len(" ")
)
)

features_str = textwrap.indent(str(self._features), " ").strip()
return ret.format(features=features_str)
10 changes: 3 additions & 7 deletions python/dgl/graphbolt/itemset.py
Original file line number Diff line number Diff line change
Expand Up @@ -345,12 +345,8 @@ def __repr__(self) -> str:
")"
)

itemsets_str = repr(self._itemsets)
lines = itemsets_str.splitlines()
itemsets_str = (
lines[0]
+ "\n"
+ textwrap.indent("\n".join(lines[1:]), " " * len(" itemsets="))
)
itemsets_str = textwrap.indent(
repr(self._itemsets), " " * len(" itemsets=")
).strip()

return ret.format(itemsets=itemsets_str, names=self._names)
82 changes: 40 additions & 42 deletions tests/python/pytorch/graphbolt/impl/test_ondisk_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2350,21 +2350,20 @@ def test_OnDiskTask_repr_homogeneous():
task = gb.OnDiskTask(metadata, item_set, item_set, item_set)
expected_str = (
"OnDiskTask(validation_set=ItemSet(\n"
" items=(tensor([0, 1, 2, 3, 4]), tensor([5, 6, 7, 8, 9])),\n"
" names=('seed_nodes', 'labels'),\n"
" ),\n"
" items=(tensor([0, 1, 2, 3, 4]), tensor([5, 6, 7, 8, 9])),\n"
" names=('seed_nodes', 'labels'),\n"
" ),\n"
" train_set=ItemSet(\n"
" items=(tensor([0, 1, 2, 3, 4]), tensor([5, 6, 7, 8, 9])),\n"
" names=('seed_nodes', 'labels'),\n"
" ),\n"
" items=(tensor([0, 1, 2, 3, 4]), tensor([5, 6, 7, 8, 9])),\n"
" names=('seed_nodes', 'labels'),\n"
" ),\n"
" test_set=ItemSet(\n"
" items=(tensor([0, 1, 2, 3, 4]), tensor([5, 6, 7, 8, 9])),\n"
" names=('seed_nodes', 'labels'),\n"
" ),\n"
" metadata={'name': 'node_classification'},\n"
")"
" items=(tensor([0, 1, 2, 3, 4]), tensor([5, 6, 7, 8, 9])),\n"
" names=('seed_nodes', 'labels'),\n"
" ),\n"
" metadata={'name': 'node_classification'},)"
)
assert str(task) == expected_str, print(task)
assert repr(task) == expected_str, task


def test_OnDiskTask_repr_heterogeneous():
Expand All @@ -2378,39 +2377,38 @@ def test_OnDiskTask_repr_heterogeneous():
task = gb.OnDiskTask(metadata, item_set, item_set, item_set)
expected_str = (
"OnDiskTask(validation_set=ItemSetDict(\n"
" itemsets={'user': ItemSet(\n"
" items=(tensor([0, 1, 2, 3, 4]),),\n"
" names=('seed_nodes',),\n"
" ), 'item': ItemSet(\n"
" items=(tensor([5, 6, 7, 8, 9]),),\n"
" names=('seed_nodes',),\n"
" )},\n"
" names=('seed_nodes',),\n"
" ),\n"
" itemsets={'user': ItemSet(\n"
" items=(tensor([0, 1, 2, 3, 4]),),\n"
" names=('seed_nodes',),\n"
" ), 'item': ItemSet(\n"
" items=(tensor([5, 6, 7, 8, 9]),),\n"
" names=('seed_nodes',),\n"
" )},\n"
" names=('seed_nodes',),\n"
" ),\n"
" train_set=ItemSetDict(\n"
" itemsets={'user': ItemSet(\n"
" items=(tensor([0, 1, 2, 3, 4]),),\n"
" names=('seed_nodes',),\n"
" ), 'item': ItemSet(\n"
" items=(tensor([5, 6, 7, 8, 9]),),\n"
" names=('seed_nodes',),\n"
" )},\n"
" names=('seed_nodes',),\n"
" ),\n"
" itemsets={'user': ItemSet(\n"
" items=(tensor([0, 1, 2, 3, 4]),),\n"
" names=('seed_nodes',),\n"
" ), 'item': ItemSet(\n"
" items=(tensor([5, 6, 7, 8, 9]),),\n"
" names=('seed_nodes',),\n"
" )},\n"
" names=('seed_nodes',),\n"
" ),\n"
" test_set=ItemSetDict(\n"
" itemsets={'user': ItemSet(\n"
" items=(tensor([0, 1, 2, 3, 4]),),\n"
" names=('seed_nodes',),\n"
" ), 'item': ItemSet(\n"
" items=(tensor([5, 6, 7, 8, 9]),),\n"
" names=('seed_nodes',),\n"
" )},\n"
" names=('seed_nodes',),\n"
" ),\n"
" metadata={'name': 'node_classification'},\n"
")"
" itemsets={'user': ItemSet(\n"
" items=(tensor([0, 1, 2, 3, 4]),),\n"
" names=('seed_nodes',),\n"
" ), 'item': ItemSet(\n"
" items=(tensor([5, 6, 7, 8, 9]),),\n"
" names=('seed_nodes',),\n"
" )},\n"
" names=('seed_nodes',),\n"
" ),\n"
" metadata={'name': 'node_classification'},)"
)
assert str(task) == expected_str, print(task)
assert repr(task) == expected_str, task


def test_OnDiskDataset_load_tasks_selectively():
Expand Down