Skip to content

Commit

Permalink
fix: Simplified Chained Comparison
Browse files Browse the repository at this point in the history
  • Loading branch information
digger-yu committed Jul 14, 2023
1 parent 27ddd62 commit c71faeb
Show file tree
Hide file tree
Showing 12 changed files with 21 additions and 23 deletions.
2 changes: 1 addition & 1 deletion python/oneflow/framework/check_point_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -728,7 +728,7 @@ def write_file():
global_dst_rank, int
), f"global_dst_rank expected type int, but got {type(global_dst_rank)}."
assert (
global_dst_rank >= 0 and global_dst_rank < flow.env.get_world_size()
0 <= global_dst_rank < flow.env.get_world_size()
), f"out of range (expected to be in range of [0, {flow.env.get_world_size()}), but got {global_dst_rank})."
if flow.env.get_rank() == global_dst_rank:
write_file()
Expand Down
4 changes: 2 additions & 2 deletions python/oneflow/nn/graph/graph_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,11 +123,11 @@ def build(self, x):
if not mode:
self.proto.optimizer_placement_optimization_mode = "none"
return
assert stage >= 1 and stage <= 3, "ZeRO stage must range from 1 to 3."
assert 1 <= stage <= 3, "ZeRO stage must range from 1 to 3."
assert (
shard_min_size > 0
), "ZeRO min size of a sharded optimizer state must > 0."
assert stage >= 1 and stage <= 3, "ZeRO stage must range from 1 to 3."
assert 1 <= stage <= 3, "ZeRO stage must range from 1 to 3."
if stage >= 1:
self.proto.optimizer_placement_optimization_mode = "distributed_split"
self.proto.optimizer_placement_optimization_threshold = shard_min_size
Expand Down
4 changes: 2 additions & 2 deletions python/oneflow/nn/optimizer/adam.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,10 +128,10 @@ def __init__(
assert lr >= 0.0, f"Invalid learning rate: {lr}"
assert eps >= 0.0, f"Invalid epsilon value: {eps}"
assert (
betas[0] >= 0.0 and betas[0] < 1.0
0.0 <= betas[0] < 1.0
), f"Invalid beta parameter at index 0: {betas[0]}"
assert (
betas[1] >= 0.0 and betas[1] < 1.0
0.0 <= betas[1] < 1.0
), f"Invalid beta parameter at index 1: {betas[1]}"
assert weight_decay >= 0.0, f"Invalid weight_decay value: {weight_decay}"
options = dict()
Expand Down
4 changes: 2 additions & 2 deletions python/oneflow/nn/optimizer/adamw.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,10 +130,10 @@ def __init__(
assert lr >= 0.0, f"Invalid learning rate: {lr}"
assert eps >= 0.0, f"Invalid epsilon value: {eps}"
assert (
betas[0] >= 0.0 and betas[0] < 1.0
0.0 <= betas[0] < 1.0
), f"Invalid beta parameter at index 0: {betas[0]}"
assert (
betas[1] >= 0.0 and betas[1] < 1.0
0.0 <= betas[1] < 1.0
), f"Invalid beta parameter at index 1: {betas[1]}"
assert weight_decay >= 0.0, f"Invalid weight_decay value: {weight_decay}"
options = dict()
Expand Down
4 changes: 2 additions & 2 deletions python/oneflow/nn/optimizer/lamb.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,10 +124,10 @@ def __init__(
assert lr >= 0.0, f"Invalid learning rate: {lr}"
assert eps >= 0.0, f"Invalid epsilon value: {eps}"
assert (
betas[0] >= 0.0 and betas[0] < 1.0
0.0 <= betas[0] < 1.0
), f"Invalid beta parameter at index 0: {betas[0]}"
assert (
betas[1] >= 0.0 and betas[1] < 1.0
0.0 <= betas[1] < 1.0
), f"Invalid beta parameter at index 1: {betas[1]}"
assert weight_decay >= 0.0, f"Invalid weight_decay value: {weight_decay}"

Expand Down
2 changes: 1 addition & 1 deletion python/oneflow/support/func_inspect_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import inspect
import sys

if sys.version_info > (2, 7) and sys.version_info < (3, 0):
if (2, 7) < sys.version_info < (3, 0):

def GetArgNameAndDefaultTuple(func):
"""
Expand Down
4 changes: 2 additions & 2 deletions python/oneflow/test/graph/test_graph_clip_grad_norm.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def __init__(
parallel_mode = [parallel_mode]

assert all(p.upper() in ("DP", "MP", "PP") for p in parallel_mode)
assert len(parallel_mode) > 0 and len(parallel_mode) <= 2
assert 0 < len(parallel_mode) <= 2

self.input_sbp = []
self.target_sbp = []
Expand Down Expand Up @@ -254,7 +254,7 @@ def _compare_with_eager(
# print(f"[rank{rank}] local_loss.numel(): {local_loss.numel()}")
# print(f"[rank{rank}] local_loss: {local_loss}")

if acc > 1 and graph_loss.numel() == acc:
if 1 < acc == graph_loss.numel():
graph_loss = graph_loss.mean()

if parallel_mode is None:
Expand Down
4 changes: 2 additions & 2 deletions python/oneflow/test/modules/test_reflection_pad.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,13 +58,13 @@ def _np_reflection_pad2d_grad(src, dest):
ip_x = ip_y = 0
if j < pad_left:
ip_x = pad_left * 2 - j
elif j >= pad_left and j < dx_width + pad_left:
elif pad_left <= j < dx_width + pad_left:
ip_x = j
else:
ip_x = (dx_width + pad_left - 1) * 2 - j
if i < pad_top:
ip_y = pad_top * 2 - i
elif i >= pad_top and i < dx_height + pad_top:
elif pad_top <= i < dx_height + pad_top:
ip_y = i
else:
ip_y = (dx_height + pad_top - 1) * 2 - i
Expand Down
4 changes: 2 additions & 2 deletions python/oneflow/test/modules/test_replication_pad.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,13 +52,13 @@ def _np_replication_pad2d_grad(src, dest, padding):
ip_x = ip_y = 0
if j < pad_left:
ip_x = pad_left
elif j >= pad_left and j < dx_width + pad_left:
elif pad_left <= j < dx_width + pad_left:
ip_x = j
else:
ip_x = dx_width + pad_left - 1
if i < pad_top:
ip_y = pad_top
elif i >= pad_top and i < dx_height + pad_top:
elif pad_top <= i < dx_height + pad_top:
ip_y = i
else:
ip_y = dx_height + pad_top - 1
Expand Down
5 changes: 2 additions & 3 deletions python/oneflow/test/modules/test_zeropad2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,8 @@ def _np_zero_pad2d_grad(src, dest, padding):
(n, c, i, j) = (coords[0], coords[c_idx], coords[h_idx], coords[w_idx])
ip_x = ip_y = 0
if (
j >= pad_left
and j < dx_width + pad_left
and (i >= pad_top)
pad_left <= j < dx_width + pad_left
and (i >= pad_top)
and (i < dx_height + pad_top)
):
ip_x = j - pad_left
Expand Down
5 changes: 2 additions & 3 deletions python/oneflow/test/tensor/test_complex.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,8 @@ def _np_zero_pad2d_grad(src, dest, padding):
(n, c, i, j) = (coords[0], coords[c_idx], coords[h_idx], coords[w_idx])
ip_x = ip_y = 0
if (
j >= pad_left
and j < dx_width + pad_left
and (i >= pad_top)
pad_left <= j < dx_width + pad_left
and (i >= pad_top)
and (i < dx_height + pad_top)
):
ip_x = j - pad_left
Expand Down
2 changes: 1 addition & 1 deletion tools/functional/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,7 @@ def to_string(self, to_cpp=False, drop_name=False):
fmt = "{0} {1}(".format(self._ret.to_string(to_cpp=to_cpp), self._name)
keyword_start = False
for i, arg in enumerate(self._args):
if i > 0 and i < len(self._args):
if 0 < i < len(self._args):
fmt += ", "
if not keyword_start and arg._keyword_only:
keyword_start = True
Expand Down

0 comments on commit c71faeb

Please sign in to comment.