[DATALAD RUNCMD] run codespell throughout fixing typo automagically but ignoring the failure due to ambigous typos

=== Do not change lines below ===
{
 "chain": [],
 "cmd": "codespell -w || :",
 "exit": 0,
 "extra_inputs": [],
 "inputs": [],
 "outputs": [],
 "pwd": "."
}
^^^ Do not change lines above ^^^
This commit is contained in:
Yaroslav Halchenko 2024-04-17 15:59:53 -04:00
parent e315ccbf35
commit 63a405f4aa
11 changed files with 14 additions and 14 deletions

View file

@ -11,7 +11,7 @@
* the [register_container_type](https://github.com/hdmf-dev/hdmf/blob/dd39b3878523c4b03f5286fc740752befd192d8b/src/hdmf/build/manager.py#L727-L736) method in hdmf's TypeMap class seems to overwrite the loaded schema???
* `__NS_CATALOG` seems to actually hold references to the schema but it doesn't seem to be used anywhere except within `__TYPE_MAP` ?
* [NWBHDF5IO](https://github.com/NeurodataWithoutBorders/pynwb/blob/dev/src/pynwb/__init__.py#L237-L238) uses `TypeMap` to greate a `BuildManager`
* Parent class [HDF5IO](https://github.com/hdmf-dev/hdmf/blob/dd39b3878523c4b03f5286fc740752befd192d8b/src/hdmf/backends/hdf5/h5tools.py#L37) then reimplements a lot of basic functionality from elsehwere
* Parent class [HDF5IO](https://github.com/hdmf-dev/hdmf/blob/dd39b3878523c4b03f5286fc740752befd192d8b/src/hdmf/backends/hdf5/h5tools.py#L37) then reimplements a lot of basic functionality from elsewhere
* Parent-parent metaclass [HDMFIO](https://github.com/hdmf-dev/hdmf/blob/dev/src/hdmf/backends/io.py) appears to be the final writing class?
* `BuildManager.build` then [calls `TypeMap.build`](https://github.com/hdmf-dev/hdmf/blob/dd39b3878523c4b03f5286fc740752befd192d8b/src/hdmf/build/manager.py#L171) ???
* `TypeMap.build` ...

View file

@ -72,7 +72,7 @@ is relatively complex, and so to use a schema extension one must also
program the python classes or mappings to python class attributes
needed to use them, configuration for getter and setter methods,
i/o routines, etc. Since schema extensions are relatively hard to make,
to accomodate heterogeneous data NWB uses `DynamicTable`s, which can be
to accommodate heterogeneous data NWB uses `DynamicTable`s, which can be
given arbitrary new columns.
The loose coupling between schema and code has a few impacts:

View file

@ -2,4 +2,4 @@
## v0.1.0 - Package exists
thats about as much as can be said
that's about as much as can be said

View file

@ -243,7 +243,7 @@ class NamespacesAdapter(Adapter):
ns = ns[0]
break
else:
raise NameError(f"Couldnt find namespace {name}")
raise NameError(f"Couldn't find namespace {name}")
else:
ns = ns[0]

View file

@ -311,7 +311,7 @@ def truncate_file(source: Path, target: Optional[Path] = None, n:int=10) -> Path
try:
obj.resize(n, axis=0)
except TypeError:
# contiguous arrays cant be trivially resized, so we have to copy and create a new dataset
# contiguous arrays can't be trivially resized, so we have to copy and create a new dataset
tmp_name = obj.name + '__tmp'
original_name = obj.name
obj.parent.move(obj.name, tmp_name)
@ -326,7 +326,7 @@ def truncate_file(source: Path, target: Optional[Path] = None, n:int=10) -> Path
# use h5repack to actually remove the items from the dataset
if shutil.which('h5repack') is None:
warnings.warn('Truncated file made, but since h5repack not found in path, file wont be any smaller')
warnings.warn('Truncated file made, but since h5repack not found in path, file won't be any smaller')
return target
print('Repacking hdf5...')

View file

@ -22,7 +22,7 @@ FlatDType = EnumDefinition(
DTypeTypes = []
for nwbtype, linkmltype in flat_to_linkml.items():
# skip the dtypes that are the same as the builtin linkml types (which should alredy exist)
# skip the dtypes that are the same as the builtin linkml types (which should already exist)
# to avoid a recursion error
if linkmltype == nwbtype:
continue

View file

@ -815,7 +815,7 @@ def resolve_references(src: dict, completed: Dict[str, H5ReadResult]) -> Tuple[d
if isinstance(item, HDF5_Path):
other_item = completed.get(item, None)
if other_item is None:
errors.append(f"Couldnt find: {item}")
errors.append(f"Couldn't find: {item}")
res[path] = other_item.result
completes.append(item)

View file

@ -23,7 +23,7 @@ def model_from_dynamictable(group:h5py.Group, base:Optional[BaseModel] = None) -
nptype = group[col].dtype.type
if nptype == np.void:
warnings.warn(f"Cant handle numpy void type for column {col} in {group.name}")
warnings.warn(f"Can't handle numpy void type for column {col} in {group.name}")
continue
type_ = Optional[NDArray[Any, nptype]]
@ -64,7 +64,7 @@ def dynamictable_to_model(
# # dask can't handle this, we just arrayproxy it
items[col] = NDArrayProxy(h5f_file=group.file.filename, path=group[col].name)
#else:
# warnings.warn(f"Dask cant handle object type arrays like {col} in {group.name}. Skipping")
# warnings.warn(f"Dask can't handle object type arrays like {col} in {group.name}. Skipping")
# pdb.set_trace()
# # can't auto-chunk with "object" type
# items[col] = da.from_array(group[col], chunks=-1)

View file

@ -189,7 +189,7 @@ class NDArrayProxy():
obj = h5f.get(self.path)
return obj[slice]
def __setitem__(self, slice, value):
raise NotImplementedError(f"Cant write into an arrayproxy yet!")
raise NotImplementedError(f"Can't write into an arrayproxy yet!")
@classmethod

View file

@ -16,7 +16,7 @@ def test_build_base(nwb_schema):
assert len(base.classes) == 1
img = base.classes[0]
assert img.name == "Image"
# no parent class, tree_root shoudl be true
# no parent class, tree_root should be true
assert img.tree_root
assert len(img.attributes) == 3

View file

@ -340,12 +340,12 @@ groups:
each point in time is assumed to be 2-D (has only x & y dimensions).'
groups:
- neurodata_type_inc: CorrectedImageStack
doc: Reuslts from motion correction of an image stack.
doc: Results from motion correction of an image stack.
quantity: '+'
- neurodata_type_def: CorrectedImageStack
neurodata_type_inc: NWBDataInterface
doc: Reuslts from motion correction of an image stack.
doc: Results from motion correction of an image stack.
groups:
- name: corrected
neurodata_type_inc: ImageSeries