Skip to content

Commit a36d3e5

Browse files
Migrate to correct logger interface (#9191)
Signed-off-by: Emmanuel Ferdman <[email protected]>
1 parent 01db65d commit a36d3e5

File tree

2 files changed

+4
-3
lines changed

2 files changed

+4
-3
lines changed

torch_xla/distributed/xla_backend.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def __init__(self, prefix_store, rank, size, timeout):
4646
def getBackendName(self):
4747
return 'xla'
4848

49-
# pytorch's process group is unable to retrive the group size from python level. It should
49+
# pytorch's process group is unable to retrieve the group size from python level. It should
5050
# already been support in C++ level: https://github.com/pytorch/pytorch/blob/7b1988f9222f3dec5cc2012afce84218199748ae/torch/csrc/distributed/c10d/ProcessGroup.cpp#L148-L152
5151
# For now we manually set the group name property as a temporary solution.
5252
def _set_group_name(self, name: str) -> None:
@@ -426,7 +426,7 @@ def new_xla_process_group(ranks=None,
426426
else:
427427
pg._mesh = [ranks]
428428
else:
429-
logging.warn(
429+
logging.warning(
430430
f'Can\'t infer process group mesh from given ranks "{str(ranks)}". '
431431
'The process group will use the entire world as its collective comm group.'
432432
)

torchax/torchax/tensor.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,8 @@ def jax_device(self):
145145

146146
@property
147147
def data(self):
148-
logger.warn("In-place to .data modifications still results a copy on TPU")
148+
logger.warning(
149+
"In-place to .data modifications still results a copy on TPU")
149150
return self
150151

151152
@data.setter

0 commit comments

Comments
 (0)