Skip to content

Commit c80e5fd

Browse files
Pyre Bot Jrfacebook-github-bot
Pyre Bot Jr
authored andcommitted
suppress errors in vision/fair/pytorch3d
Reviewed By: kjchalup Differential Revision: D39198333 fbshipit-source-id: 3f4ebcf625215f21d165073837578ff69b05f72d
1 parent d19e624 commit c80e5fd

File tree

9 files changed

+41
-4
lines changed

9 files changed

+41
-4
lines changed

projects/implicitron_trainer/experiment.py

+1
Original file line numberDiff line numberDiff line change
@@ -207,6 +207,7 @@ def run(self) -> None:
207207
val_loader,
208208
) = accelerator.prepare(model, optimizer, train_loader, val_loader)
209209

210+
# pyre-fixme[16]: Optional type has no attribute `is_multisequence`.
210211
if not self.training_loop.evaluator.is_multisequence:
211212
all_train_cameras = self.data_source.all_train_cameras
212213
else:

projects/implicitron_trainer/impl/training_loop.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
logger = logging.getLogger(__name__)
3030

3131

32+
# pyre-fixme[13]: Attribute `evaluator` is never initialized.
3233
class TrainingLoopBase(ReplaceableBase):
3334
"""
3435
Members:
@@ -62,7 +63,7 @@ def load_stats(
6263

6364

6465
@registry.register
65-
class ImplicitronTrainingLoop(TrainingLoopBase): # pyre-ignore [13]
66+
class ImplicitronTrainingLoop(TrainingLoopBase):
6667
"""
6768
Members:
6869
eval_only: If True, only run evaluation using the test dataloader.
@@ -137,6 +138,7 @@ def run(
137138
# only run evaluation on the test dataloader
138139
if self.eval_only:
139140
if test_loader is not None:
141+
# pyre-fixme[16]: `Optional` has no attribute `run`.
140142
self.evaluator.run(
141143
all_train_cameras=all_train_cameras,
142144
dataloader=test_loader,

pytorch3d/implicitron/models/implicit_function/decoding_functions.py

+6
Original file line numberDiff line numberDiff line change
@@ -158,9 +158,14 @@ def forward(self, x: torch.Tensor, z: Optional[torch.Tensor] = None):
158158
# if the skip tensor is None, we use `x` instead.
159159
z = x
160160
skipi = 0
161+
# pyre-fixme[6]: For 1st param expected `Iterable[Variable[_T]]` but got
162+
# `Union[Tensor, Module]`.
161163
for li, layer in enumerate(self.mlp):
164+
# pyre-fixme[58]: `in` is not supported for right operand type
165+
# `Union[torch._tensor.Tensor, torch.nn.modules.module.Module]`.
162166
if li in self._input_skips:
163167
if self._skip_affine_trans:
168+
# pyre-fixme[29]: `Union[BoundMethod[typing.Callable(torch._C._Te...
164169
y = self._apply_affine_layer(self.skip_affines[skipi], y, z)
165170
else:
166171
y = torch.cat((y, z), dim=-1)
@@ -170,6 +175,7 @@ def forward(self, x: torch.Tensor, z: Optional[torch.Tensor] = None):
170175

171176

172177
@registry.register
178+
# pyre-fixme[13]: Attribute `network` is never initialized.
173179
class MLPDecoder(DecoderFunctionBase):
174180
"""
175181
Decoding function which uses `MLPWithIputSkips` to convert the embedding to output.

pytorch3d/implicitron/models/implicit_function/voxel_grid.py

+15-3
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ def evaluate_world(
8989
torch.Tensor: shape (n_grids, n_points, n_features)
9090
"""
9191
points_local = locator.world_to_local_coords(points)
92-
# pyre-ignore[29]
9392
return self.evaluate_local(points_local, grid_values)
9493

9594
def evaluate_local(
@@ -139,6 +138,8 @@ class FullResolutionVoxelGrid(VoxelGridBase):
139138
# the type of grid_values argument needed to run evaluate_local()
140139
values_type: ClassVar[Type[VoxelGridValuesBase]] = FullResolutionVoxelGridValues
141140

141+
# pyre-fixme[14]: `evaluate_local` overrides method defined in `VoxelGridBase`
142+
# inconsistently.
142143
def evaluate_local(
143144
self, points: torch.Tensor, grid_values: FullResolutionVoxelGridValues
144145
) -> torch.Tensor:
@@ -213,6 +214,8 @@ class CPFactorizedVoxelGrid(VoxelGridBase):
213214
n_components: int = 24
214215
matrix_reduction: bool = True
215216

217+
# pyre-fixme[14]: `evaluate_local` overrides method defined in `VoxelGridBase`
218+
# inconsistently.
216219
def evaluate_local(
217220
self, points: torch.Tensor, grid_values: CPFactorizedVoxelGridValues
218221
) -> torch.Tensor:
@@ -318,6 +321,8 @@ class VMFactorizedVoxelGrid(VoxelGridBase):
318321
distribution_of_components: Optional[Tuple[int, int, int]] = None
319322
matrix_reduction: bool = True
320323

324+
# pyre-fixme[14]: `evaluate_local` overrides method defined in `VoxelGridBase`
325+
# inconsistently.
321326
def evaluate_local(
322327
self, points: torch.Tensor, grid_values: VMFactorizedVoxelGridValues
323328
) -> torch.Tensor:
@@ -392,9 +397,11 @@ def get_shapes(self) -> Dict[str, Tuple]:
392397
if self.distribution_of_components is None and self.n_components % 3 != 0:
393398
raise ValueError("n_components must be divisible by 3")
394399
if self.distribution_of_components is None:
395-
# pyre-ignore[58]
396400
calculated_distribution_of_components = [
397-
self.n_components // 3 for _ in range(3)
401+
# pyre-fixme[58]: `//` is not supported for operand types
402+
# `Optional[int]` and `int`.
403+
self.n_components // 3
404+
for _ in range(3)
398405
]
399406
else:
400407
calculated_distribution_of_components = self.distribution_of_components
@@ -437,6 +444,7 @@ def get_shapes(self) -> Dict[str, Tuple]:
437444
return shape_dict
438445

439446

447+
# pyre-fixme[13]: Attribute `voxel_grid` is never initialized.
440448
class VoxelGridModule(Configurable, torch.nn.Module):
441449
"""
442450
A wrapper torch.nn.Module for the VoxelGrid classes, which
@@ -459,6 +467,7 @@ class VoxelGridModule(Configurable, torch.nn.Module):
459467
voxel_grid_class_type: str = "FullResolutionVoxelGrid"
460468
voxel_grid: VoxelGridBase
461469

470+
# pyre-fixme[8]: Attribute has type `Tuple[float, float, float]`; used as `float`.
462471
extents: Tuple[float, float, float] = 1.0
463472
translation: Tuple[float, float, float] = (0.0, 0.0, 0.0)
464473

@@ -505,8 +514,11 @@ def forward(self, points: torch.Tensor) -> torch.Tensor:
505514
# voxel size and translation.
506515
voxel_size=self.extents,
507516
volume_translation=self.translation,
517+
# pyre-fixme[29]: `Union[BoundMethod[typing.Callable(torch._C._TensorBase...
508518
device=next(self.params.values()).device,
509519
)
520+
# pyre-fixme[29]: `Union[torch._tensor.Tensor,
521+
# torch.nn.modules.module.Module]` is not a function.
510522
grid_values = self.voxel_grid.values_type(**self.params)
511523
# voxel grids operate with extra n_grids dimension, which we fix to one
512524
return self.voxel_grid.evaluate_world(points[None], grid_values, locator)[0]

pytorch3d/implicitron/tools/eval_video_trajectory.py

+3
Original file line numberDiff line numberDiff line change
@@ -185,11 +185,14 @@ def _remove_outlier_cameras(
185185
keep_indices = utils.get_inlier_indicators(
186186
cameras.get_camera_center(), dim=0, outlier_rate=outlier_rate
187187
)
188+
# pyre-fixme[6]: For 1st param expected `Union[List[int], int, BoolTensor,
189+
# LongTensor]` but got `Tensor`.
188190
clean_cameras = cameras[keep_indices]
189191
logger.info(
190192
"Filtered outlier cameras when estimating the trajectory: "
191193
f"{len(cameras)}{len(clean_cameras)}"
192194
)
195+
# pyre-fixme[7]: Expected `PerspectiveCameras` but got `CamerasBase`.
193196
return clean_cameras
194197

195198

pytorch3d/renderer/cameras.py

+8
Original file line numberDiff line numberDiff line change
@@ -401,7 +401,9 @@ def __getitem__(
401401
kwargs = {}
402402

403403
tensor_types = {
404+
# pyre-fixme[16]: Module `cuda` has no attribute `BoolTensor`.
404405
"bool": (torch.BoolTensor, torch.cuda.BoolTensor),
406+
# pyre-fixme[16]: Module `cuda` has no attribute `LongTensor`.
405407
"long": (torch.LongTensor, torch.cuda.LongTensor),
406408
}
407409
if not isinstance(
@@ -419,8 +421,14 @@ def __getitem__(
419421
index = [index]
420422

421423
if isinstance(index, tensor_types["bool"]):
424+
# pyre-fixme[16]: Item `List` of `Union[List[int], BoolTensor,
425+
# LongTensor]` has no attribute `ndim`.
426+
# pyre-fixme[16]: Item `List` of `Union[List[int], BoolTensor,
427+
# LongTensor]` has no attribute `shape`.
422428
if index.ndim != 1 or index.shape[0] != len(self):
423429
raise ValueError(
430+
# pyre-fixme[16]: Item `List` of `Union[List[int], BoolTensor,
431+
# LongTensor]` has no attribute `shape`.
424432
f"Boolean index of shape {index.shape} does not match cameras"
425433
)
426434
elif max(index) >= len(self):

pytorch3d/renderer/mesh/rasterizer.py

+1
Original file line numberDiff line numberDiff line change
@@ -204,6 +204,7 @@ def transform(self, meshes_world, **kwargs) -> torch.Tensor:
204204
to_ndc_transform = cameras.get_ndc_camera_transform(**kwargs)
205205
projection_transform = try_get_projection_transform(cameras, kwargs)
206206
if projection_transform is not None:
207+
# pyre-fixme[16]: Anonymous callable has no attribute `compose`.
207208
projection_transform = projection_transform.compose(to_ndc_transform)
208209
verts_ndc = projection_transform.transform_points(verts_view, eps=eps)
209210
else:

pytorch3d/renderer/points/rasterizer.py

+1
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,7 @@ def transform(self, point_clouds, **kwargs) -> Pointclouds:
107107
to_ndc_transform = cameras.get_ndc_camera_transform(**kwargs)
108108
projection_transform = try_get_projection_transform(cameras, kwargs)
109109
if projection_transform is not None:
110+
# pyre-fixme[16]: Anonymous callable has no attribute `compose`.
110111
projection_transform = projection_transform.compose(to_ndc_transform)
111112
pts_ndc = projection_transform.transform_points(pts_view, eps=eps)
112113
else:

pytorch3d/structures/volumes.py

+3
Original file line numberDiff line numberDiff line change
@@ -981,6 +981,9 @@ def _copy_transform_and_sizes(
981981
device = device if device is not None else self.device
982982
other._grid_sizes = self._grid_sizes[index].to(device)
983983
other._local_to_world_transform = self.get_local_to_world_coords_transform()[
984+
# pyre-fixme[6]: For 1st param expected `Union[List[int], int, slice,
985+
# BoolTensor, LongTensor]` but got `Union[None, List[int], Tuple[int],
986+
# int, slice, Tensor]`.
984987
index
985988
].to(device)
986989

0 commit comments

Comments
 (0)