Skip to content

Commit

Permalink
Fix for issues related to mypy (#2435)
Browse files Browse the repository at this point in the history
* fixed mypy failing errors

* fixed errors related to mypy

* removed whitespace that was failing flake tests

* removed extra whitespace for failing flake test

Co-authored-by: vfdev <[email protected]>
  • Loading branch information
sayantan1410 and vfdev-5 authored Jan 20, 2022
1 parent 9bbd633 commit 111fc7a
Show file tree
Hide file tree
Showing 6 changed files with 7 additions and 7 deletions.
2 changes: 1 addition & 1 deletion ignite/contrib/handlers/neptune_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ def __call__(self, checkpoint: Mapping, filename: str, metadata: Optional[Mappin
with tempfile.NamedTemporaryFile() as tmp:
# we can not use tmp.name to open tmp.file twice on Win32
# https://docs.python.org/3/library/tempfile.html#tempfile.NamedTemporaryFile
torch.save(checkpoint, tmp.file) # type: ignore[attr-defined]
torch.save(checkpoint, tmp.file)
self._logger.log_artifact(tmp.name, filename)

@idist.one_rank_only(with_barrier=True)
Expand Down
4 changes: 2 additions & 2 deletions ignite/engine/deterministic.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,12 +251,12 @@ def _from_iteration(self, iteration: int) -> Iterator:
if isinstance(data, DataLoader):
try:
# following is unsafe for IterableDatasets
iteration %= len(data.batch_sampler) # type: ignore[attr-defined, arg-type]
iteration %= len(data.batch_sampler) # type: ignore[arg-type]
# Synchronize dataflow according to state.iteration
self._setup_seed()
if iteration > 0:
# batch sampler is ReproducibleBatchSampler
data.batch_sampler.start_iteration = iteration # type: ignore[attr-defined, union-attr]
data.batch_sampler.start_iteration = iteration # type: ignore[union-attr]
return iter(data)
except TypeError as e:
# Probably we can do nothing with DataLoader built upon IterableDatasets
Expand Down
2 changes: 1 addition & 1 deletion ignite/handlers/checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -718,7 +718,7 @@ def _save_func(self, checkpoint: Mapping, path: str, func: Callable, rank: int =
tmp: Optional[IO[bytes]] = None
if rank == 0:
tmp = tempfile.NamedTemporaryFile(delete=False, dir=self.dirname)
tmp_file = tmp.file # type: ignore
tmp_file = tmp.file
tmp_name = tmp.name
try:
func(checkpoint, tmp_file, **self.kwargs)
Expand Down
2 changes: 1 addition & 1 deletion ignite/handlers/param_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -893,7 +893,7 @@ def simulate_values( # type: ignore[override]
torch.save(obj, cache_filepath.as_posix())

values = []
scheduler = cls(save_history=False, lr_scheduler=lr_scheduler, **kwargs) # type: ignore[call-arg]
scheduler = cls(save_history=False, lr_scheduler=lr_scheduler, **kwargs)
for i in range(num_events):
params = [p[scheduler.param_name] for p in scheduler.optimizer_param_groups]
values.append([i] + params)
Expand Down
2 changes: 1 addition & 1 deletion ignite/handlers/time_profilers.py
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ def attach(self, engine: Engine) -> None:
if not engine.has_event_handler(self._as_first_started):
engine._event_handlers[Events.STARTED].insert(0, (self._as_first_started, (engine,), {}))

def get_results(self) -> List[List[Union[str, float]]]:
def get_results(self) -> List[List[Union[str, float, Tuple[Union[str, float], Union[str, float]]]]]:
"""
Method to fetch the aggregated profiler results after the engine is run
Expand Down
2 changes: 1 addition & 1 deletion tests/run_code_style.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@ elif [ $1 = "fmt" ]; then
elif [ $1 = "mypy" ]; then
mypy --config-file mypy.ini
elif [ $1 = "install" ]; then
pip install flake8 "black==21.12b0" "isort==5.7.0" "mypy==0.910"
pip install flake8 "black==21.12b0" "isort==5.7.0" "mypy"
fi

0 comments on commit 111fc7a

Please sign in to comment.