Skip to content

Commit 7db20ff

Browse files
mikaylagawareckipytorchmergebot
authored andcommitted
Remove public_allowlist from TestPublicBindings.test_correct_module_names and ensure private_allowlist-ed things are actually private (pytorch#145620)
This passes locally, also sanity checked importing these modules on [colab](https://colab.research.google.com/drive/1edynWX1mlQNZIBxtb3g81_ZeTpAqWi19?usp=sharing) Pull Request resolved: pytorch#145620 Approved by: https://github.com/albanD
1 parent 5d01a28 commit 7db20ff

File tree

1 file changed

+3
-43
lines changed

1 file changed

+3
-43
lines changed

test/test_public_bindings.py

+3-43
Original file line numberDiff line numberDiff line change
@@ -288,6 +288,7 @@ def onerror(modname):
288288

289289
# It is ok to add new entries here but please be careful that these modules
290290
# do not get imported by public code.
291+
# DO NOT add public modules here.
291292
private_allowlist = {
292293
"torch._inductor.codegen.cuda.cuda_kernel",
293294
# TODO(#133647): Remove the onnx._internal entries after
@@ -404,52 +405,11 @@ def onerror(modname):
404405
"torch.utils.tensorboard._utils",
405406
}
406407

407-
# No new entries should be added to this list.
408-
# All public modules should be importable on all platforms.
409-
public_allowlist = {
410-
"torch.distributed.algorithms.ddp_comm_hooks",
411-
"torch.distributed.algorithms.model_averaging.averagers",
412-
"torch.distributed.algorithms.model_averaging.hierarchical_model_averager",
413-
"torch.distributed.algorithms.model_averaging.utils",
414-
"torch.distributed.checkpoint",
415-
"torch.distributed.constants",
416-
"torch.distributed.distributed_c10d",
417-
"torch.distributed.elastic.agent.server",
418-
"torch.distributed.elastic.rendezvous",
419-
"torch.distributed.fsdp",
420-
"torch.distributed.launch",
421-
"torch.distributed.launcher",
422-
"torch.distributed.nn",
423-
"torch.distributed.nn.api.remote_module",
424-
"torch.distributed.optim",
425-
"torch.distributed.optim.optimizer",
426-
"torch.distributed.rendezvous",
427-
"torch.distributed.rpc.api",
428-
"torch.distributed.rpc.backend_registry",
429-
"torch.distributed.rpc.constants",
430-
"torch.distributed.rpc.internal",
431-
"torch.distributed.rpc.options",
432-
"torch.distributed.rpc.rref_proxy",
433-
"torch.distributed.rpc.server_process_global_profiler",
434-
"torch.distributed.run",
435-
"torch.distributed.tensor.parallel",
436-
"torch.distributed.utils",
437-
"torch.utils.tensorboard",
438-
"torch.utils.tensorboard.summary",
439-
"torch.utils.tensorboard.writer",
440-
"torch.ao.quantization.experimental.fake_quantize",
441-
"torch.ao.quantization.experimental.linear",
442-
"torch.ao.quantization.experimental.observer",
443-
"torch.ao.quantization.experimental.qconfig",
444-
}
445-
446408
errors = []
447409
for mod, exc in failures:
448-
if mod in public_allowlist:
449-
# TODO: Ensure this is the right error type
450-
451-
continue
452410
if mod in private_allowlist:
411+
# make sure mod is actually private
412+
assert any(t.startswith("_") for t in mod.split("."))
453413
continue
454414
errors.append(
455415
f"{mod} failed to import with error {type(exc).__qualname__}: {str(exc)}"

0 commit comments

Comments
 (0)