working lazy imports lmao this is so dumb and bad
This commit is contained in:
parent
921bc5e870
commit
82055e16c4
10 changed files with 546 additions and 50 deletions
|
@ -6,6 +6,6 @@ hell ya it's got side effects
|
||||||
|
|
||||||
## TODO
|
## TODO
|
||||||
|
|
||||||
- oh frick we actually need to conert to a NodeTransformer so that
|
- oh frick we actually need to convert to a NodeTransformer so that
|
||||||
we can control the rendering of child nodes (aka we can recursively
|
we can control the rendering of child nodes (aka we can recursively
|
||||||
get the names in the namespace rather than having an entry-only visitor)
|
get the names in the namespace rather than having an entry-only visitor)
|
154
pdm.lock
154
pdm.lock
|
@ -2,10 +2,160 @@
|
||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
groups = ["default"]
|
groups = ["default", "tests"]
|
||||||
strategy = ["inherit_metadata"]
|
strategy = ["inherit_metadata"]
|
||||||
lock_version = "4.5.0"
|
lock_version = "4.5.0"
|
||||||
content_hash = "sha256:6e3746c7427353cd94cfd6c1a5a6309fdfd871714e3b1213ded2befc6ef522a0"
|
content_hash = "sha256:7fbe8fcf82251321bd2b6ad10d735dd1e657cbf01fed87457fcd3e33a453592a"
|
||||||
|
|
||||||
[[metadata.targets]]
|
[[metadata.targets]]
|
||||||
requires_python = ">=3.10"
|
requires_python = ">=3.10"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colorama"
|
||||||
|
version = "0.4.6"
|
||||||
|
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||||
|
summary = "Cross-platform colored terminal text."
|
||||||
|
groups = ["tests"]
|
||||||
|
marker = "sys_platform == \"win32\""
|
||||||
|
files = [
|
||||||
|
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||||
|
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "exceptiongroup"
|
||||||
|
version = "1.2.2"
|
||||||
|
requires_python = ">=3.7"
|
||||||
|
summary = "Backport of PEP 654 (exception groups)"
|
||||||
|
groups = ["tests"]
|
||||||
|
marker = "python_version < \"3.11\""
|
||||||
|
files = [
|
||||||
|
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
|
||||||
|
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "iniconfig"
|
||||||
|
version = "2.0.0"
|
||||||
|
requires_python = ">=3.7"
|
||||||
|
summary = "brain-dead simple config-ini parsing"
|
||||||
|
groups = ["tests"]
|
||||||
|
files = [
|
||||||
|
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
|
||||||
|
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "numpy"
|
||||||
|
version = "2.1.2"
|
||||||
|
requires_python = ">=3.10"
|
||||||
|
summary = "Fundamental package for array computing in Python"
|
||||||
|
groups = ["tests"]
|
||||||
|
files = [
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee"},
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884"},
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648"},
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d"},
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86"},
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7"},
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03"},
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466"},
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-win32.whl", hash = "sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb"},
|
||||||
|
{file = "numpy-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c"},
|
||||||
|
{file = "numpy-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550"},
|
||||||
|
{file = "numpy-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-win32.whl", hash = "sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62"},
|
||||||
|
{file = "numpy-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a"},
|
||||||
|
{file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952"},
|
||||||
|
{file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5"},
|
||||||
|
{file = "numpy-2.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7"},
|
||||||
|
{file = "numpy-2.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e"},
|
||||||
|
{file = "numpy-2.1.2.tar.gz", hash = "sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "packaging"
|
||||||
|
version = "24.1"
|
||||||
|
requires_python = ">=3.8"
|
||||||
|
summary = "Core utilities for Python packages"
|
||||||
|
groups = ["tests"]
|
||||||
|
files = [
|
||||||
|
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
|
||||||
|
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pluggy"
|
||||||
|
version = "1.5.0"
|
||||||
|
requires_python = ">=3.8"
|
||||||
|
summary = "plugin and hook calling mechanisms for python"
|
||||||
|
groups = ["tests"]
|
||||||
|
files = [
|
||||||
|
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
|
||||||
|
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest"
|
||||||
|
version = "8.3.3"
|
||||||
|
requires_python = ">=3.8"
|
||||||
|
summary = "pytest: simple powerful testing with Python"
|
||||||
|
groups = ["tests"]
|
||||||
|
dependencies = [
|
||||||
|
"colorama; sys_platform == \"win32\"",
|
||||||
|
"exceptiongroup>=1.0.0rc8; python_version < \"3.11\"",
|
||||||
|
"iniconfig",
|
||||||
|
"packaging",
|
||||||
|
"pluggy<2,>=1.5",
|
||||||
|
"tomli>=1; python_version < \"3.11\"",
|
||||||
|
]
|
||||||
|
files = [
|
||||||
|
{file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
|
||||||
|
{file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tomli"
|
||||||
|
version = "2.0.2"
|
||||||
|
requires_python = ">=3.8"
|
||||||
|
summary = "A lil' TOML parser"
|
||||||
|
groups = ["tests"]
|
||||||
|
marker = "python_version < \"3.11\""
|
||||||
|
files = [
|
||||||
|
{file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
|
||||||
|
{file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
|
||||||
|
]
|
||||||
|
|
|
@ -10,6 +10,11 @@ requires-python = ">=3.10"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
license = {text = "EUPL-1.2"}
|
license = {text = "EUPL-1.2"}
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
tests = [
|
||||||
|
"pytest>=8.3.3",
|
||||||
|
"numpy>=2.1.2",
|
||||||
|
]
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["pdm-backend"]
|
requires = ["pdm-backend"]
|
||||||
build-backend = "pdm.backend"
|
build-backend = "pdm.backend"
|
||||||
|
|
|
@ -1,4 +1,2 @@
|
||||||
import sys
|
from lazy_import.importer import install
|
||||||
|
install()
|
||||||
def getattr(name):
|
|
||||||
pass
|
|
||||||
|
|
|
@ -1,44 +1,86 @@
|
||||||
|
from typing import Literal, overload
|
||||||
import ast
|
import ast
|
||||||
|
import pdb
|
||||||
|
from collections.abc import Container
|
||||||
from collections import ChainMap
|
from collections import ChainMap
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@dataclass
|
@dataclass(eq=True)
|
||||||
class Name:
|
class Name:
|
||||||
|
|
||||||
module: str
|
module: str
|
||||||
name: str | None = None
|
name: str | None = None
|
||||||
aliases: set = field(default_factory=set)
|
aliases: set[str] = field(default_factory=set)
|
||||||
|
|
||||||
def __contains__(self, item: str):
|
def __contains__(self, item: str):
|
||||||
return item in self.aliases or (
|
return item in self.aliases or (
|
||||||
item == self.module
|
item == self.module
|
||||||
if self.name is None else
|
if self.name is None else
|
||||||
item == self.name
|
item in (self.name, self.id)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self) -> str:
|
||||||
|
if self.name is None:
|
||||||
|
return self.module
|
||||||
|
else:
|
||||||
|
return '.'.join([self.module, self.name])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parts(self) -> list[str]:
|
||||||
|
"""
|
||||||
|
All the subparts of the fully qualified name
|
||||||
|
|
||||||
|
Eg if we were `module.submodule.subsubmodule.A`
|
||||||
|
|
||||||
|
this would be
|
||||||
|
- `module`
|
||||||
|
- `module.submodule`
|
||||||
|
- `module.submodule.submodule`
|
||||||
|
- `module.submodule.subsubmodule.A`
|
||||||
|
|
||||||
|
"""
|
||||||
|
subparts = self.module.split('.')
|
||||||
|
if self.name:
|
||||||
|
subparts.append(self.name)
|
||||||
|
return ['.'.join(subparts[:i+1]) for i in range(len(subparts))]
|
||||||
|
|
||||||
|
def in_dict(self, other: Container):
|
||||||
|
return any(part in other for part in self.parts)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_ast_name(cls, name: ast.Name) -> 'Name':
|
def from_ast_name(cls, name: ast.Name) -> 'Name':
|
||||||
return cls.from_str(name.id)
|
return cls.from_str(name.id)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_str(cls, name: str) -> 'Name':
|
def from_str(cls, name: str) -> 'Name':
|
||||||
if len(name_parts := name.rsplit(".")) > 1:
|
if len(name_parts := name.rsplit(".", maxsplit=1)) > 1:
|
||||||
return Name(module=name_parts[0], name=name_parts[1])
|
return Name(module=name_parts[0], name=name_parts[1])
|
||||||
else:
|
else:
|
||||||
return Name(module=name)
|
return Name(module=name)
|
||||||
|
|
||||||
@dataclass
|
@dataclass(eq=True)
|
||||||
class NameCollection:
|
class NameCollection:
|
||||||
|
|
||||||
names: list[Name] = field(default_factory=set)
|
names: list[Name] = field(default_factory=list)
|
||||||
|
|
||||||
|
def add(self, new_name: ast.Name | Name):
|
||||||
|
if isinstance(new_name, ast.Name):
|
||||||
|
new_name = Name.from_ast_name(new_name)
|
||||||
|
|
||||||
def add(self, new_name: ast.Name):
|
|
||||||
for name in self.names:
|
for name in self.names:
|
||||||
if new_name.id in name:
|
|
||||||
name.aliases.add(new_name.id)
|
if new_name.module == name.name and not new_name.name:
|
||||||
|
# Make an alias if we have something that exists for this already
|
||||||
|
name.aliases.add(new_name.module)
|
||||||
return
|
return
|
||||||
self.names.append(Name.from_ast_name(new_name))
|
elif new_name.module == name.module:
|
||||||
|
# Otherwise we have something that we're going to import already and skip
|
||||||
|
return
|
||||||
|
|
||||||
|
self.names.append(new_name)
|
||||||
|
|
||||||
|
|
||||||
class NameVisitor(ast.NodeVisitor):
|
class NameVisitor(ast.NodeVisitor):
|
||||||
"""
|
"""
|
||||||
|
@ -46,75 +88,215 @@ class NameVisitor(ast.NodeVisitor):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.real_names = ChainMap()
|
self.real_names = ChainMap({'self': None}, globals()['__builtins__'])
|
||||||
self.fake_names = NameCollection()
|
self.fake_names = NameCollection()
|
||||||
|
|
||||||
def pop_ctx(self):
|
def pop_ctx(self):
|
||||||
self.real_names = self.real_names.parents
|
self.real_names = self.real_names.parents
|
||||||
|
self.filter_fake_names()
|
||||||
|
|
||||||
def push_ctx(self):
|
def push_ctx(self):
|
||||||
self.real_names = self.real_names.new_child()
|
self.real_names = self.real_names.new_child()
|
||||||
|
|
||||||
def visit_Import(self, node: ast.Import | ast.ImportFrom) -> None:
|
def visit_Import(self, node: ast.Import | ast.ImportFrom) -> None:
|
||||||
"""Add to names"""
|
"""Add to names"""
|
||||||
|
# print(ast.dump(node))
|
||||||
for alias in node.names:
|
for alias in node.names:
|
||||||
name = alias.asname if alias.asname else alias.name
|
name = alias.asname if alias.asname else alias.name
|
||||||
self.real_names[name] = node
|
self.real_names[name] = node
|
||||||
|
|
||||||
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
|
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
|
||||||
"""Add to names"""
|
"""Add to names"""
|
||||||
self.visit_Import(node)
|
return self.visit_Import(node)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def visit_Name(self, node: ast.Name):
|
def visit_Name(self, node: ast.Name):
|
||||||
"""Either add to real names or fake names depending on ctx"""
|
"""Either add to real names or fake names depending on ctx"""
|
||||||
if node.ctx == ast.Store():
|
# print(ast.dump(node))
|
||||||
|
if isinstance(node.ctx, ast.Store):
|
||||||
self.real_names[node.id] = node
|
self.real_names[node.id] = node
|
||||||
elif node.ctx == ast.Load() and node.id not in self.real_names:
|
elif isinstance(node.ctx, ast.Load):
|
||||||
self.fake_names.add(node)
|
name = Name.from_ast_name(node)
|
||||||
elif node.ctx == ast.Del() and node.id in self.real_names:
|
if not name.in_dict(self.real_names):
|
||||||
|
self.fake_names.add(name)
|
||||||
|
elif isinstance(node.ctx, ast.Del) and node.id in self.real_names:
|
||||||
del self.real_names[node.id]
|
del self.real_names[node.id]
|
||||||
else: # pragma: no cover
|
else: # pragma: no cover
|
||||||
if node.ctx not in (ast.Del(), ast.Store(), ast.Load()):
|
if type(node.ctx) not in (ast.Del, ast.Store, ast.Load):
|
||||||
raise ValueError('How did this happen!? wrong node ctx type?')
|
raise ValueError(f'How did this happen!? wrong node ctx type? {node.ctx}')
|
||||||
|
|
||||||
def visit_Attribute(self, node: ast.Attribute):
|
def visit_Attribute(self, node: ast.Attribute):
|
||||||
|
# print(ast.dump(node))
|
||||||
attr_name = flatten_attribute(node)
|
attr_name = flatten_attribute(node)
|
||||||
if node.ctx == ast.Load():
|
if attr_name is None:
|
||||||
if attr_name not in self.real_names:
|
return
|
||||||
self.fake_names.add(Name.from_str(attr_name))
|
|
||||||
elif node.ctx == ast.Store():
|
if isinstance(node.ctx, ast.Load):
|
||||||
|
name = Name.from_str(attr_name)
|
||||||
|
if not name.in_dict(self.real_names):
|
||||||
|
self.fake_names.add(name)
|
||||||
|
elif isinstance(node.ctx, ast.Store):
|
||||||
self.real_names[attr_name] = node
|
self.real_names[attr_name] = node
|
||||||
elif node.ctx == ast.Del() and attr_name in self.real_names:
|
elif isinstance(node.ctx, ast.Del) and attr_name in self.real_names:
|
||||||
del self.real_names[attr_name]
|
del self.real_names[attr_name]
|
||||||
else: # pragma: no cover
|
else: # pragma: no cover
|
||||||
if node.ctx not in (ast.Del(), ast.Store(), ast.Load()):
|
if type(node.ctx) not in (ast.Del, ast.Store, ast.Load):
|
||||||
raise ValueError('How did this happen!? wrong node ctx type?')
|
raise ValueError(f'How did this happen!? wrong node ctx type? {node.ctx}')
|
||||||
|
|
||||||
def visit_FunctionDef(self, node):
|
def visit_FunctionDef(self, node: ast.FunctionDef | ast.AsyncFunctionDef | ast.Lambda):
|
||||||
"""push context"""
|
"""push context"""
|
||||||
|
# print(ast.dump(node))
|
||||||
|
|
||||||
|
# names that should be defined in the parent scope
|
||||||
|
if hasattr(node, 'returns') and node.returns:
|
||||||
|
self._handle_annotation(node.returns)
|
||||||
|
|
||||||
|
if hasattr(node, 'name'):
|
||||||
|
self.real_names[node.name] = node
|
||||||
|
|
||||||
|
# enter function scope
|
||||||
self.push_ctx()
|
self.push_ctx()
|
||||||
args = node.args
|
args = node.args
|
||||||
for arg in args.args:
|
for arg in args.args:
|
||||||
self.real_names[arg.arg] = arg
|
self.real_names[arg.arg] = arg
|
||||||
if args.vararg:
|
if arg.annotation:
|
||||||
|
self._handle_annotation(arg.annotation)
|
||||||
|
if hasattr(args, 'vararg') and args.vararg:
|
||||||
self.real_names[args.vararg.arg] = args.vararg
|
self.real_names[args.vararg.arg] = args.vararg
|
||||||
if args.kwarg:
|
if hasattr(args, 'kwarg') and args.kwarg:
|
||||||
self.real_names[args.kwarg.arg] = args.kwarg
|
self.real_names[args.kwarg.arg] = args.kwarg
|
||||||
|
|
||||||
|
self.generic_visit(node)
|
||||||
|
|
||||||
|
# exit function scope
|
||||||
|
self.pop_ctx()
|
||||||
|
|
||||||
|
def _handle_annotation(self, annotation: ast.Attribute | ast.Constant):
|
||||||
|
return_name = None
|
||||||
|
if isinstance(annotation, ast.Attribute):
|
||||||
|
return_name = flatten_attribute(annotation)
|
||||||
|
elif isinstance(annotation, ast.Constant):
|
||||||
|
return_name = annotation.value
|
||||||
|
else:
|
||||||
|
TypeError(f"Dont know how to handle annotation type: {ast.dump(annotation)}")
|
||||||
|
|
||||||
|
if return_name is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
name = Name.from_str(return_name)
|
||||||
|
if not name.in_dict(self.real_names):
|
||||||
|
self.fake_names.add(name)
|
||||||
|
|
||||||
def visit_AsyncFunctionDef(self, node):
|
def visit_AsyncFunctionDef(self, node):
|
||||||
"""push context"""
|
"""push context"""
|
||||||
self.push_ctx()
|
self.visit_FunctionDef(node)
|
||||||
|
|
||||||
|
def visit_Lambda(self, node: ast.Lambda):
|
||||||
|
self.visit_FunctionDef(node)
|
||||||
|
|
||||||
def visit_ClassDef(self, node):
|
def visit_ClassDef(self, node):
|
||||||
"""push context"""
|
"""push context"""
|
||||||
|
# print(ast.dump(node))
|
||||||
|
self.real_names[node.name] = node
|
||||||
self.push_ctx()
|
self.push_ctx()
|
||||||
|
self.generic_visit(node)
|
||||||
def visit_Return(self, node):
|
|
||||||
"""pop context"""
|
|
||||||
self.pop_ctx()
|
self.pop_ctx()
|
||||||
|
|
||||||
|
def visit_ListComp(self, node: ast.ListComp | ast.DictComp | ast.GeneratorExp | ast.SetComp):
|
||||||
|
self.push_ctx()
|
||||||
|
for gen in node.generators:
|
||||||
|
if isinstance(gen.target, ast.Tuple):
|
||||||
|
for name in gen.target.elts:
|
||||||
|
self.real_names[name.id] = name
|
||||||
|
else:
|
||||||
|
self.real_names[gen.target.id] = gen.target
|
||||||
|
self.generic_visit(node)
|
||||||
|
self.pop_ctx()
|
||||||
|
# if isinstance(gen.iter, ast.Name):
|
||||||
|
# self.visit_Name(gen.iter)
|
||||||
|
# elif isinstance(gen.iter, ast.Call):
|
||||||
|
# self.visit_Attribute(gen.iter.func)
|
||||||
|
|
||||||
|
def visit_DictComp(self, node: ast.DictComp):
|
||||||
|
self.visit_ListComp(node)
|
||||||
|
|
||||||
|
def visit_GeneratorExp(self, node: ast.GeneratorExp):
|
||||||
|
self.visit_ListComp(node)
|
||||||
|
|
||||||
|
def visit_SetComp(self, node: ast.SetComp):
|
||||||
|
self.visit_ListComp(node)
|
||||||
|
|
||||||
|
def visit_For(self, node: ast.For):
|
||||||
|
# for loops don't have scope, so we don't push/pop here
|
||||||
|
# self.push_ctx()
|
||||||
|
if isinstance(node.target, ast.Tuple):
|
||||||
|
for name in node.target.elts:
|
||||||
|
self.real_names[name.id] = name
|
||||||
|
else:
|
||||||
|
self.real_names[node.target.id] = node.target
|
||||||
|
self.generic_visit(node)
|
||||||
|
# self.pop_ctx()
|
||||||
|
|
||||||
|
def visit_ExceptHandler(self, node: ast.ExceptHandler):
|
||||||
|
self.push_ctx()
|
||||||
|
if node.name:
|
||||||
|
self.real_names[node.name] = node
|
||||||
|
self.generic_visit(node)
|
||||||
|
self.pop_ctx()
|
||||||
|
|
||||||
|
def filter_fake_names(self):
|
||||||
|
"""
|
||||||
|
After visiting, we remove top-level module level definitions from
|
||||||
|
fake names, since it's possible to refer to things out of order in scopes
|
||||||
|
"""
|
||||||
|
self.fake_names.names = [n for n in self.fake_names.names if n.module not in self.real_names]
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def generate_frontmatter(node: ast.AST, mode: Literal['ast'] = 'ast') -> list[ast.Import | ast.Assign]: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def generate_frontmatter(node: ast.AST, mode: Literal['str'] = 'str') -> str: ...
|
||||||
|
|
||||||
|
|
||||||
|
def generate_frontmatter(node: ast.AST, mode: Literal['ast', 'str'] = 'ast') -> list[ast.Import | ast.Assign] | str:
|
||||||
|
visitor = NameVisitor()
|
||||||
|
visitor.visit(node)
|
||||||
|
|
||||||
|
modules = list(dict.fromkeys([name.module for name in visitor.fake_names.names]))
|
||||||
|
|
||||||
|
if mode == 'ast':
|
||||||
|
return _frontmatter_ast(modules, visitor)
|
||||||
|
elif mode == 'str':
|
||||||
|
return _frontmatter_str(modules, visitor)
|
||||||
|
else:
|
||||||
|
raise ValueError("Unknown frontmatter mode")
|
||||||
|
|
||||||
|
|
||||||
|
def _frontmatter_ast(modules: list[str], visitor: NameVisitor) -> list[ast.Import | ast.Assign]:
|
||||||
|
imports = [ast.Import(names=[ast.alias(name)]) for name in modules]
|
||||||
|
assignments = []
|
||||||
|
for name in visitor.fake_names.names:
|
||||||
|
for alias in name.aliases:
|
||||||
|
assignments.append(
|
||||||
|
ast.Assign(targets=[ast.Name(id=alias, ctx=ast.Store())],
|
||||||
|
value=ast.Name(id=name.id, ctx=ast.Load()))
|
||||||
|
)
|
||||||
|
|
||||||
|
return imports + assignments
|
||||||
|
|
||||||
|
def _frontmatter_str(modules: list[str], visitor: NameVisitor) -> str:
|
||||||
|
imports = [f'import {mod}' for mod in modules]
|
||||||
|
|
||||||
|
assignments = []
|
||||||
|
for name in visitor.fake_names.names:
|
||||||
|
for alias in name.aliases:
|
||||||
|
assignments.append(f'{alias} = {name.id}')
|
||||||
|
|
||||||
|
return '\n'.join(imports + assignments)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def flatten_attribute(attr: ast.Attribute) -> str:
|
def flatten_attribute(attr: ast.Attribute) -> str:
|
||||||
if isinstance(attr.value, ast.Attribute):
|
if isinstance(attr.value, ast.Attribute):
|
||||||
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
import inspect
|
||||||
|
import pdb
|
||||||
|
import sys
|
||||||
|
import ast
|
||||||
|
from typing import Optional
|
||||||
|
from types import ModuleType
|
||||||
|
import os
|
||||||
|
from importlib.abc import MetaPathFinder, Loader, SourceLoader, FileLoader
|
||||||
|
from importlib.machinery import FileFinder
|
||||||
|
from importlib import invalidate_caches
|
||||||
|
from importlib.machinery import ModuleSpec, SourceFileLoader
|
||||||
|
import importlib.util
|
||||||
|
from importlib.util import spec_from_file_location
|
||||||
|
|
||||||
|
|
||||||
|
from lazy_import.ast import NameVisitor, generate_frontmatter
|
||||||
|
|
||||||
|
|
||||||
|
class LazyLoader(FileLoader, SourceLoader):
|
||||||
|
"""
|
||||||
|
Try to import any names that are referenced without imports
|
||||||
|
|
||||||
|
Thx to https://stackoverflow.com/a/43573798/13113166 for the clear example
|
||||||
|
"""
|
||||||
|
|
||||||
|
def get_data(self, path) -> str | None:
|
||||||
|
"""
|
||||||
|
Modify the source code to include imports and assignments to make
|
||||||
|
lazy imports work.
|
||||||
|
|
||||||
|
Do it this way rather than using `source_to_code` because
|
||||||
|
this way we still get meaningful error messages that can show
|
||||||
|
the source lines that are failing
|
||||||
|
"""
|
||||||
|
with open(path) as f:
|
||||||
|
data = f.read()
|
||||||
|
|
||||||
|
if self.name.split('.')[0] in sys.stdlib_module_names:
|
||||||
|
return data
|
||||||
|
|
||||||
|
parsed = ast.parse(data)
|
||||||
|
frontmatter = generate_frontmatter(parsed)
|
||||||
|
|
||||||
|
# put the frontmatter first and replace
|
||||||
|
frontmatter.extend(parsed.body)
|
||||||
|
parsed.body = frontmatter
|
||||||
|
|
||||||
|
# fix after modifying and return to string
|
||||||
|
parsed = ast.fix_missing_locations(parsed)
|
||||||
|
deparsed = ast.unparse(parsed)
|
||||||
|
return deparsed
|
||||||
|
|
||||||
|
class LazyFinder(MetaPathFinder):
|
||||||
|
def find_spec(self, fullname, path, target=None):
|
||||||
|
if path is None or path == "":
|
||||||
|
path = [os.getcwd()] # top level import --
|
||||||
|
|
||||||
|
if fullname.split('.')[0] in sys.builtin_module_names:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if "." in fullname:
|
||||||
|
*parents, name = fullname.split(".")
|
||||||
|
else:
|
||||||
|
name = fullname
|
||||||
|
|
||||||
|
for entry in path:
|
||||||
|
if os.path.isdir(os.path.join(entry, name)):
|
||||||
|
# this module has child modules
|
||||||
|
filename = os.path.join(entry, name, "__init__.py")
|
||||||
|
submodule_locations = [os.path.join(entry, name)]
|
||||||
|
else:
|
||||||
|
filename = os.path.join(entry, name + ".py")
|
||||||
|
submodule_locations = None
|
||||||
|
if not os.path.exists(filename):
|
||||||
|
continue
|
||||||
|
|
||||||
|
return spec_from_file_location(fullname, filename, loader=LazyLoader(fullname, filename),
|
||||||
|
submodule_search_locations=submodule_locations)
|
||||||
|
|
||||||
|
return None # we don't know how to import this
|
||||||
|
|
||||||
|
def patch_importing_frame():
|
||||||
|
"""
|
||||||
|
Inject needed imports into the importing frame as well :)
|
||||||
|
"""
|
||||||
|
current_frame = inspect.currentframe()
|
||||||
|
outer_frames = inspect.getouterframes(current_frame, context=3)
|
||||||
|
importing_frame = outer_frames[-1].frame
|
||||||
|
|
||||||
|
try:
|
||||||
|
source = inspect.getsource(importing_frame)
|
||||||
|
except OSError:
|
||||||
|
# stdin, compiled extensions, etc.
|
||||||
|
return
|
||||||
|
|
||||||
|
node = ast.parse(source)
|
||||||
|
frontmatter = generate_frontmatter(node, mode='str')
|
||||||
|
exec(frontmatter, importing_frame.f_globals, importing_frame.f_locals)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def install():
|
||||||
|
patch_importing_frame()
|
||||||
|
sys.meta_path.insert(0, LazyFinder())
|
|
@ -1,3 +1,6 @@
|
||||||
import lazy_import
|
import lazy_import
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
__all__ = ['lazy_import']
|
__all__ = ['lazy_import']
|
||||||
|
|
||||||
|
DATA_DIR = Path(__file__).resolve().parent / 'data'
|
|
@ -3,20 +3,33 @@ import collections.abc
|
||||||
import json as jay_son
|
import json as jay_son
|
||||||
from typing import List
|
from typing import List
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from collections.abc import ChainMap as cm
|
from collections import ChainMap as cm
|
||||||
|
|
||||||
mod_variable = 10
|
mod_variable = 10
|
||||||
|
|
||||||
|
my_list = [1,2,3]
|
||||||
|
my_dict = {'a': 1, 'b':2}
|
||||||
|
list_comprehension = [item for item in my_list]
|
||||||
|
dict_comprehension = {key: val for key, val in my_dict.items()}
|
||||||
|
|
||||||
|
for item in my_list:
|
||||||
|
zzz = 1
|
||||||
|
|
||||||
|
|
||||||
def imports_are_lazy():
|
def imports_are_lazy():
|
||||||
ast
|
ast
|
||||||
typing.List
|
typing.List
|
||||||
re.match('hell ya', 'hell ya')
|
re.match('hell ya', 'hell ya')
|
||||||
|
match('hell ya again', 'hell ya again')
|
||||||
# one day you should be able to do this
|
# one day you should be able to do this
|
||||||
# numpy as np
|
# numpy as np
|
||||||
|
|
||||||
def a_function(a: typing.Iterable) -> importlib.abc.Finder:
|
def a_function(a: typing.Iterable) -> importlib.abc.Finder:
|
||||||
a = 1
|
a = 1
|
||||||
|
|
||||||
|
def another_function(a: 'pathlib.Path') -> 'os.path.basename':
|
||||||
|
pass
|
||||||
|
|
||||||
class AClass():
|
class AClass():
|
||||||
zz = 1
|
zz = 1
|
||||||
yy = array.array()
|
yy = array.array()
|
||||||
|
@ -44,8 +57,8 @@ def test_names_are_lazy():
|
||||||
"""
|
"""
|
||||||
you can just use the last unique segment
|
you can just use the last unique segment
|
||||||
"""
|
"""
|
||||||
_ = numpy.random.random(100)
|
_ = random.randint(1, 10)
|
||||||
_ = random
|
_ = randint(1, 10)
|
||||||
|
|
||||||
assert random is numpy.random.random
|
assert randint is random.randint
|
||||||
|
|
||||||
|
|
|
@ -1 +1,43 @@
|
||||||
def test
|
import ast
|
||||||
|
|
||||||
|
from lazy_import.ast import flatten_attribute, NameVisitor, NameCollection, Name
|
||||||
|
|
||||||
|
from .conftest import DATA_DIR
|
||||||
|
|
||||||
|
# def test_flatten_attribute():
|
||||||
|
# attr = ast.Attribute(
|
||||||
|
# value=ast.Attribute(
|
||||||
|
# value=ast.Name(id='numpy'),
|
||||||
|
# attr='random'),
|
||||||
|
# attr='random'
|
||||||
|
# )
|
||||||
|
# assert isinstance(attr, ast.Attribute)
|
||||||
|
#
|
||||||
|
# assert flatten_attribute(attr) == "numpy.random.random"
|
||||||
|
|
||||||
|
def test_find_fake_names():
|
||||||
|
expected = NameCollection(names=[Name(module='ast', name=None, aliases=set()),
|
||||||
|
Name(module='typing', name='List', aliases=set()),
|
||||||
|
Name(module='re', name='match', aliases={'match'}),
|
||||||
|
Name(module='importlib.abc',
|
||||||
|
name='Finder',
|
||||||
|
aliases=set()),
|
||||||
|
|
||||||
|
Name(module='os.path', name='basename', aliases=set()),
|
||||||
|
Name(module='pathlib', name='Path', aliases=set()),
|
||||||
|
Name(module='array', name='array', aliases=set()),
|
||||||
|
Name(module='base64', name='b64decode', aliases=set()),
|
||||||
|
Name(module='binascii', name='hexlify', aliases=set()),
|
||||||
|
Name(module='random',
|
||||||
|
name='randint',
|
||||||
|
aliases={'randint'}),
|
||||||
|
])
|
||||||
|
|
||||||
|
with open(DATA_DIR / 'input_file.py', 'r') as sfile:
|
||||||
|
source_code = sfile.read()
|
||||||
|
|
||||||
|
node = ast.parse(source_code)
|
||||||
|
visitor = NameVisitor()
|
||||||
|
visitor.visit(node)
|
||||||
|
assert visitor.fake_names == expected
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ import lazy_import
|
||||||
from typing import List
|
from typing import List
|
||||||
import collections.abc
|
import collections.abc
|
||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from collections.abc import ChainMap as cm
|
from collections import ChainMap as cm
|
||||||
|
|
||||||
mod_variable = 10
|
mod_variable = 10
|
||||||
|
|
||||||
|
@ -26,8 +26,7 @@ def test_names_are_lazy():
|
||||||
"""
|
"""
|
||||||
you can just use the last unique segment
|
you can just use the last unique segment
|
||||||
"""
|
"""
|
||||||
_ = numpy.random.random(100)
|
_ = random.randint(1, 10)
|
||||||
_ = random
|
_ = randint(1, 10)
|
||||||
|
|
||||||
assert random is numpy.random.random
|
|
||||||
|
|
||||||
|
assert randint is random.randint
|
||||||
|
|
Loading…
Reference in a new issue