Skip to content

Commit

Permalink
Bump pylint (#408)
Browse files Browse the repository at this point in the history
This patch bumps pylint to the latest version. This is in preparation
for bumping the rest of the deps.
  • Loading branch information
boomanaiden154 authored Dec 19, 2024
1 parent 822656a commit b6ee932
Show file tree
Hide file tree
Showing 10 changed files with 58 additions and 52 deletions.
10 changes: 7 additions & 3 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -104,12 +104,14 @@ disable=abstract-method,
no-self-use,
no-value-for-parameter, # gin causes false positives
nonzero-method,
not-callable, # lots of false positives
oct-method,
old-division,
old-ne-operator,
old-octal-literal,
old-raise-syntax,
parameter-unpacking,
possibly-used-before-assignment, # false positives with control flow
print-statement,
raising-string,
range-builtin-not-iterating,
Expand All @@ -132,6 +134,7 @@ disable=abstract-method,
too-many-instance-attributes,
too-many-locals,
too-many-nested-blocks,
too-many-positional-arguments,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
Expand All @@ -140,6 +143,7 @@ disable=abstract-method,
unicode-builtin,
unnecessary-pass,
unpacking-in-except,
used-before-assignment,
useless-else-on-loop,
useless-object-inheritance,
useless-suppression,
Expand Down Expand Up @@ -425,7 +429,7 @@ valid-metaclass-classmethod-first-arg=mcs

# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=StandardError,
Exception,
BaseException
overgeneral-exceptions=builtins.StandardError,
builtins.Exception,
builtins.BaseException

2 changes: 1 addition & 1 deletion Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ tensorflow = "==2.12.0"
dm-reverb = "==0.11.0"

[dev-packages]
pylint = "==2.14.1"
pylint = "==3.3.2"
pytest = "==7.1.2"
pytype = "==2022.06.06"
yapf = "==0.43.0"
Expand Down
47 changes: 24 additions & 23 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 6 additions & 4 deletions compiler_opt/distributed/buffered_scheduler_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,16 +63,18 @@ def square(self, the_value, extra_factor=1):
[x * x for x in range(10)])

_, futures = buffered_scheduler.schedule_on_worker_pool(
lambda w, v: w.square(**v), [dict(the_value=v) for v in range(10)],
pool)
lambda w, v: w.square(**v), [{
'the_value': v
} for v in range(10)], pool)
worker.wait_for(futures)
self.assertListEqual([f.result() for f in futures],
[x * x for x in range(10)])

# same idea, but mix some kwargs
_, futures = buffered_scheduler.schedule_on_worker_pool(
lambda w, v: w.square(v[0], **v[1]),
[(v, dict(extra_factor=10)) for v in range(10)], pool)
lambda w, v: w.square(v[0], **v[1]), [(v, {
'extra_factor': 10
}) for v in range(10)], pool)
worker.wait_for(futures)
self.assertListEqual([f.result() for f in futures],
[x * x * 10 for x in range(10)])
Expand Down
7 changes: 5 additions & 2 deletions compiler_opt/distributed/worker_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,11 @@ def test_gin_args(self):
gin.bind_parameter('_test.SomeType.argument', 42)
real_args = worker.get_full_worker_args(
SomeType, more_args=2, even_more_args='hi')
self.assertDictEqual(real_args,
dict(argument=42, more_args=2, even_more_args='hi'))
self.assertDictEqual(real_args, {
'argument': 42,
'more_args': 2,
'even_more_args': 'hi'
})


if __name__ == '__main__':
Expand Down
2 changes: 1 addition & 1 deletion compiler_opt/es/blackbox_learner.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def _get_perturbations(self) -> List[npt.NDArray[np.float32]]:
rng = np.random.default_rng(seed=self._seed)
for _ in range(self._config.total_num_perturbations):
perturbations.append(
rng.normal(size=(len(self._model_weights))) *
rng.normal(size=len(self._model_weights)) *
self._config.precision_parameter)
return perturbations

Expand Down
6 changes: 2 additions & 4 deletions compiler_opt/es/blackbox_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -983,16 +983,14 @@ def trust_region_test(self, current_input: FloatArray,
str(tr_imp_ratio))
if should_reject:
self.radius *= self.params['reject_factor']
if self.radius < self.params['minimum_radius']:
self.radius = self.params['minimum_radius']
self.radius = max(self.radius, self.params['minimum_radius'])
self.is_returned_step = True
print('Step rejected. Shrink: ' + str(self.radius) + log_message)
return False
else: # accept step
if should_shrink:
self.radius *= self.params['shrink_factor']
if self.radius < self.params['minimum_radius']:
self.radius = self.params['minimum_radius']
self.radius = max(self.radius, self.params['minimum_radius'])
print('Shrink: ' + str(self.radius) + log_message)
elif should_grow:
self.radius *= self.params['grow_factor']
Expand Down
4 changes: 2 additions & 2 deletions compiler_opt/rl/compilation_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,15 +122,15 @@ def __init__(self):
Exception.__init__(self)


def kill_process_ignore_exceptions(p: 'subprocess.Popen[bytes]'):
def kill_process_ignore_exceptions(p: 'subprocess.Popen[bytes]'): # pylint: disable=useless-return
# kill the process and ignore exceptions. Exceptions would be thrown if the
# process has already been killed/finished (which is inherently in a race
# condition with us killing it)
try:
p.kill()
p.wait()
finally:
return # pylint: disable=lost-exception
return # pylint: disable=lost-exception,return-in-finally


class WorkerCancellationManager:
Expand Down
13 changes: 6 additions & 7 deletions compiler_opt/rl/local_data_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,13 +105,12 @@ def _schedule_jobs(self, policy: policy_saver.Policy, model_id: int,
# by now, all the pending work, which was signaled to cancel, must've
# finished
self._join_pending_jobs()
jobs = [
dict(
loaded_module_spec=loaded_module_spec,
policy=policy,
reward_stat=self._reward_stat_map[loaded_module_spec.name],
model_id=model_id) for loaded_module_spec in sampled_modules
]
jobs = [{
'loaded_module_spec': loaded_module_spec,
'policy': policy,
'reward_stat': self._reward_stat_map[loaded_module_spec.name],
'model_id': model_id
} for loaded_module_spec in sampled_modules]

(self._workers,
self._current_futures) = buffered_scheduler.schedule_on_worker_pool(
Expand Down
9 changes: 4 additions & 5 deletions compiler_opt/type_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,10 @@
from typing import List, Tuple, Union
import tensorflow as tf

ScalarCType = Union['type[ctypes.c_float]', 'type[ctypes.c_double]',
'type[ctypes.c_int8]', 'type[ctypes.c_int16]',
'type[ctypes.c_uint16]', 'type[ctypes.c_int32]',
'type[ctypes.c_uint32]', 'type[ctypes.c_int64]',
'type[ctypes.c_uint64]']
ScalarCType = Union[ # pylint: disable=invalid-name
'type[ctypes.c_float]', 'type[ctypes.c_double]', 'type[ctypes.c_int8]',
'type[ctypes.c_int16]', 'type[ctypes.c_uint16]', 'type[ctypes.c_int32]',
'type[ctypes.c_uint32]', 'type[ctypes.c_int64]', 'type[ctypes.c_uint64]']

TYPE_ASSOCIATIONS: List[Tuple[str, ScalarCType,
tf.DType]] = [
Expand Down

0 comments on commit b6ee932

Please sign in to comment.