Skip to content

Commit 59ca252

Browse files
committed
refactor: formatter and linter all files
1 parent 4a3ad01 commit 59ca252

File tree

6 files changed

+20
-11
lines changed

6 files changed

+20
-11
lines changed

graph_datasets/datasets/linkx.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""Datasets from the paper `LINKX <https://github.com/CUAI/Non-Homophily-Large-Scale>`_.
22
"""
33

4-
# pylint:disable=invalid-name,duplicate-code
4+
# pylint:disable=invalid-name,duplicate-code,too-many-branches
55
import os
66
from typing import Tuple
77

@@ -49,7 +49,7 @@ def even_quantile_labels(vals, n_classes, verbosity: int = 0):
4949
if verbosity and verbosity > 1:
5050
logger.info("Generated Class Label Intervals:")
5151
for class_idx, interval in enumerate(interval_lst):
52-
logger.info(f"Class {class_idx}: [{interval[0]}, {interval[1]})]")
52+
logger.info("%s", f"Class {class_idx}: [{interval[0]}, {interval[1]})]")
5353
return label
5454

5555

@@ -138,6 +138,8 @@ def load_linkx_data(
138138
directory=directory,
139139
verbosity=verbosity,
140140
)
141+
else:
142+
raise ValueError(f"{dataset_name} in LINKX not supported.")
141143

142144
if verbosity and verbosity > 1:
143145
print_dataset_info(
@@ -284,9 +286,8 @@ def load_twitch_gamers_data(
284286
one_hot = {k: v for v, k in enumerate(nodes["language"].unique())}
285287
nodes["language"] = [one_hot[lang] for lang in nodes["language"]]
286288

287-
if task is not None:
288-
label = torch.tensor(nodes[task].to_numpy())
289-
features = torch.tensor(nodes.drop(task, axis=1).to_numpy(), dtype=torch.float)
289+
label = torch.tensor(nodes[task].to_numpy())
290+
features = torch.tensor(nodes.drop(task, axis=1).to_numpy(), dtype=torch.float)
290291

291292
if normalize:
292293
features = features - features.mean(dim=0, keepdim=True)

graph_datasets/datasets/ogb.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""Datasets from `OGB <https://github.com/snap-stanford/ogb>`_.
22
"""
33

4+
# pylint: disable=invalid-name,
45
import traceback
56
from typing import Tuple
67

@@ -59,7 +60,7 @@ def load_ogb_data(
5960

6061
try:
6162
splits = dataset.get_idx_split()
62-
except Exception as _:
63+
except RuntimeError as _:
6364
traceback.print_exc()
6465
splits = None
6566

@@ -98,7 +99,7 @@ def load_ogb_data(
9899
def to_sparse_tensor(edge_index, edge_feat, num_nodes):
99100
"""converts the edge_index into SparseTensor"""
100101
num_edges = edge_index.size(1)
101-
(row, col), N, E = edge_index, num_nodes, num_edges
102+
(row, col), N, _ = edge_index, num_nodes, num_edges
102103
perm = (col * N + row).argsort()
103104
row, col = row[perm], col[perm]
104105
value = edge_feat[perm]

graph_datasets/load_data.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""Load Graph Datasets
22
"""
33

4-
# pylint:disable=protected-access
4+
# pylint:disable=protected-access,too-many-locals,too-many-branches,too-many-statements,
55
import ssl
66
from typing import Tuple, Union
77

@@ -188,8 +188,12 @@ def load_data(
188188
graph.directed = not to_simple
189189

190190
if verbosity:
191+
dm = (
192+
f"{source.upper()} undirected {dataset_name}\n"
193+
f"add_self_loop={add_self_loop} rm_self_loop={rm_self_loop}"
194+
)
191195
print_dataset_info(
192-
dataset_name=f"{source.upper()} undirected {dataset_name}\n add_self_loop={add_self_loop} rm_self_loop={rm_self_loop}",
196+
dataset_name=dm,
193197
n_nodes=graph.num_nodes(),
194198
n_edges=graph.num_edges(),
195199
n_feats=graph.ndata["feat"].shape[1],

graph_datasets/utils/common.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ def tab_printer(
7171
table.set_cols_dtype(cols_dtype)
7272
table.add_rows(params)
7373

74-
logger.info(f"\n{table.draw()}")
74+
logger.info("%s", f"\n{table.draw()}")
7575

7676
return table.draw()
7777

graph_datasets/utils/logging.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""Logger: Adapted from the optuna.logging.
22
"""
33

4+
# pylint: disable=global-statement,
45
from __future__ import annotations
56

67
import logging

tests/test.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
"""Test
22
"""
33

4+
import sys
5+
46
# pylint:disable=duplicate-code
57
from graph_datasets import load_data
68
from graph_datasets.data_info import DATASETS
@@ -65,4 +67,4 @@ def main(_source, _dataset):
6567
for source, datasets in DATASETS.items():
6668
for dataset in datasets:
6769
main(source, dataset)
68-
exit(1)
70+
sys.exit(1)

0 commit comments

Comments
 (0)