Cleanup: Fix typos in scripts subdirectory

Fix spellings from comment and string
Found via codespell

Pull Request: https://projects.blender.org/blender/blender/pulls/145834
This commit is contained in:
luz paz
2025-09-11 11:29:06 +02:00
committed by Pratik Borhade
parent a8d366b48a
commit 072f956ce0
24 changed files with 33 additions and 33 deletions

View File

@@ -29,7 +29,7 @@ Tests:
Test the command line program, "blender_ext.py" Blender is not used.
- test_blender
Test blender (currenlty only some limited tests).
Test blender (currently only some limited tests).
- test_cli_blender
Runs tests that access & manipulate extensions via the command line.

View File

@@ -3419,7 +3419,7 @@ class EXTENSIONS_OT_package_install(Operator, _ExtCmdMixIn):
*,
remote_url,
):
# Skip the URL prefix scheme, e.g. `https://` for less "noisy" outpout.
# Skip the URL prefix scheme, e.g. `https://` for less "noisy" output.
url_split = remote_url.partition("://")
url_for_display = url_split[2] if url_split[2] else remote_url

View File

@@ -231,7 +231,7 @@ def command_output_from_json_0(
# the function only finishes when `poll()` is not none, it's just use to ensure file-handles
# are closed before this function exits, this only seems to be a problem on WIN32.
# WIN32 needs to use a separate process-group else Blender will recieve the "break", see #131947.
# WIN32 needs to use a separate process-group else Blender will receive the "break", see #131947.
creationflags = 0
if sys.platform == "win32":
creationflags = subprocess.CREATE_NEW_PROCESS_GROUP

View File

@@ -1556,7 +1556,7 @@ def pkg_manifest_validate_terse_description_or_error(value: str) -> str | None:
elif value[-1] in {")", "]", "}"}:
pass # Allow closing brackets (sometimes used to mention formats).
else:
return "alpha-numeric suffix expected, the string must not end with punctuation"
return "alphanumeric suffix expected, the string must not end with punctuation"
return None
@@ -3970,7 +3970,7 @@ class subcmd_client:
return False
if isinstance((repo_gen_dict := pkg_repo_data_from_json_or_error(result_dict)), str):
msglog.fatal_error("unexpected contants in JSON {:s}".format(repo_gen_dict))
msglog.fatal_error("unexpected contents in JSON {:s}".format(repo_gen_dict))
return False
del result_dict

View File

@@ -200,13 +200,13 @@ def my_create_package(
data_list.append("""paths = [{:s}]\n""".format(", ".join("\"{:s}\"".format(v) for v in value)))
if value_copy:
raise Exception("Unexpected mata-data [build]: {!r}".format(value_copy))
raise Exception("Unexpected metadata [build]: {!r}".format(value_copy))
del value_copy
fh.write("".join(data_list).encode('utf-8'))
if metadata_copy:
raise Exception("Unexpected mata-data: {!r}".format(metadata_copy))
raise Exception("Unexpected metadata: {!r}".format(metadata_copy))
files_create_in_dir(temp_dir_pkg, files)

View File

@@ -23,7 +23,7 @@ The JSON data is formatted into a list of nested lists of 4 items:
Where each list may be empty, and the items in
the subtree are formatted the same way.
data_types is a string, aligned with data that spesifies a type
data_types is a string, aligned with data that specifies a type
for each property.
The types are as follows:

View File

@@ -22,7 +22,7 @@ The JSON data is formatted into a list of nested lists of 4 items:
Where each list may be empty, and the items in
the subtree are formatted the same way.
data_types is a string, aligned with data that spesifies a type
data_types is a string, aligned with data that specifies a type
for each property.
The types are as follows:

View File

@@ -200,7 +200,7 @@ def get_gltf_interpolation(interpolation, export_settings):
"BEZIER": "CUBICSPLINE",
"LINEAR": "LINEAR",
"CONSTANT": "STEP"
}.get(interpolation, export_settings['gltf_sampling_interpolation_fallback']) # If unknown, default to the mode choosen by the user
}.get(interpolation, export_settings['gltf_sampling_interpolation_fallback']) # If unknown, default to the mode chosen by the user
def get_anisotropy_rotation_gltf_to_blender(rotation):

View File

@@ -142,7 +142,7 @@ def __try_sparse_accessor(array):
else:
indices_type = gltf2_io_constants.ComponentType.UnsignedInt
# Cast indices to appropiate type (if needed)
# Cast indices to appropriate type (if needed)
nonzero_indices = nonzero_indices.astype(
gltf2_io_constants.ComponentType.to_numpy_dtype(indices_type),
copy=False,

View File

@@ -462,7 +462,7 @@ def gather_action_animations(obj_uuid: int,
# This way, we can evaluate drivers on shape keys, and bake them
drivers = get_sk_drivers(obj_uuid, export_settings)
if drivers:
# So ... Let's create some costum properties and the armature
# So ... Let's create some custom properties and the armature
# First, retrieve the armature object
for mesh_uuid in drivers:
_, channels = get_driver_on_shapekey(mesh_uuid, export_settings)
@@ -471,7 +471,7 @@ def gather_action_animations(obj_uuid: int,
if channel is None:
continue
if blender_object.animation_data is None or blender_object.animation_data.drivers is None:
# There is no animation on the armature, so no need to crate driver
# There is no animation on the armature, so no need to create driver
# But, we need to copy the current value of the shape key to the custom property
blender_object["gltf_" + mesh_uuid][idx] = blender_object.data.shape_keys.key_blocks[channel.data_path.split('"')[
1]].value
@@ -903,7 +903,7 @@ def __get_blender_actions(obj_uuid: str,
for act in bpy.data.actions:
already_added_action = False
# For the assigned action, we aleady have the slot
# For the assigned action, we already have the slot
if act == blender_object.animation_data.action:
continue

View File

@@ -58,7 +58,7 @@ def get_channel_groups(obj_uuid: str, blender_action: bpy.types.Action, slot: bp
blender_object = export_settings['vtree'].nodes[obj_uuid].blender_object
# When mutliple rotation mode detected, keep the currently used
# When multiple rotation mode detected, keep the currently used
multiple_rotation_mode_detected = {}
# When both normal and delta are used --> Set to to_be_sampled list

View File

@@ -207,7 +207,7 @@ def __convert_keyframes(armature_uuid, bone_name, channel, keyframes, action_nam
def __gather_interpolation(node_channel_is_animated, node_channel_interpolation, keyframes, export_settings):
if len(keyframes) > 2:
# keep STEP as STEP, other become the interpolation choosen by the user
# keep STEP as STEP, other become the interpolation chosen by the user
return {
"STEP": "STEP"
}.get(node_channel_interpolation, export_settings['gltf_sampling_interpolation_fallback'])

View File

@@ -34,7 +34,7 @@ def gather_object_sampled_channels(object_uuid: str, blender_action_name: str, s
for _, _, chan_prop, _ in [chan for chan in to_be_sampled if chan[1] == "OBJECT"]:
list_of_animated_channels[chan_prop] = get_gltf_interpolation(
export_settings['gltf_sampling_interpolation_fallback'], export_settings) # if forced to be sampled, keep the interpolation choosen by the user
export_settings['gltf_sampling_interpolation_fallback'], export_settings) # if forced to be sampled, keep the interpolation chosen by the user
else:
pass
# There is no animated channels (because if it was, we would have a slot_identifier)

View File

@@ -148,7 +148,7 @@ def __gather_interpolation(
export_settings):
if len(keyframes) > 2:
# keep STEP as STEP, other become the interpolation choosen by the user
# keep STEP as STEP, other become the interpolation chosen by the user
return {
"STEP": "STEP"
}.get(node_channel_interpolation, export_settings['gltf_sampling_interpolation_fallback'])

View File

@@ -924,7 +924,7 @@ def get_texture_transform_from_mapping_node(mapping_node, export_settings):
def check_if_is_linked_to_active_output(shader_socket, group_path):
# Here, group_path must be copyed, because if there are muliply link that enter/exit a group node
# Here, group_path must be copied, because if there are muliple links that enter/exit a group node
# This will modify it, and we don't want to modify the original group_path (from the parameter) inside the loop
for link in shader_socket.links:

View File

@@ -307,7 +307,7 @@ class PrimitiveCreator:
attr['len'] = gltf2_blender_conversion.get_data_length(attr['blender_data_type'])
attr['type'] = gltf2_blender_conversion.get_numpy_type(attr['blender_data_type'])
# Now we have all attribtues, we can change order if we want
# Now we have all attributes, we can change order if we want
# Note that the glTF specification doesn't say anything about order
# Attributes are defined only by name
# But if user want it in a particular order, he can use this hook to perform it
@@ -874,7 +874,7 @@ class PrimitiveCreator:
continue
primitives.append({
# No attribute here, as they are shared accross all primitives
# No attribute here, as they are shared across all primitives
'indices': indices,
'material': material_idx,
'uvmap_attributes_index': uvmap_attributes_index

View File

@@ -316,7 +316,7 @@ def __gather_extensions(blender_mesh,
# Avoid duplicates
export_settings['log'].warning(
'Variant ' + str(v.variant.variant_idx) +
' has 2 differents materials for a single slot. Skipping it.')
' has 2 different materials for a single slot. Skipping it.')
continue
vari = ext_variants.gather_variant(v.variant.variant_idx, export_settings)

View File

@@ -104,7 +104,7 @@ def __gather_wrap(blender_shader_node, group_path, export_settings):
wrap_t = wrap_s
# Starting Blender 3.5, MIRROR is now an extension of image node
# So this manual uv wrapping trick is no more usefull for MIRROR x MIRROR
# So this manual uv wrapping trick is no more useful for MIRROR x MIRROR
# But still works for old files
# Still needed for heterogen heterogeneous sampler, like MIRROR x REPEAT, for example
# Take manual wrapping into account

View File

@@ -919,7 +919,7 @@ class VExportTree:
# If not found, keep current material as default
def break_bone_hierarchy(self):
# Can be usefull when matrix is not decomposable
# Can be useful when matrix is not decomposable
for arma in self.get_all_node_of_type(VExportNode.ARMATURE):
bones = self.get_all_bones(arma)
for bone in bones:
@@ -931,7 +931,7 @@ class VExportTree:
self.nodes[bone].parent_bone_uuid = None
def break_obj_hierarchy(self):
# Can be usefull when matrix is not decomposable
# Can be useful when matrix is not decomposable
# TODO: if we get real collection one day, we probably need to adapt this code
for obj in self.get_all_objects():
if self.nodes[obj].armature is not None and self.nodes[obj].parent_uuid == self.nodes[obj].armature:

View File

@@ -58,7 +58,7 @@ class BlenderScene():
BlenderScene.select_imported_objects(gltf)
BlenderScene.set_active_object(gltf)
# Exlude not default scene(s) collection(s), if we are in collection
# Exclude not default scene(s) collection(s), if we are in collection
if gltf.import_settings['import_scene_as_collection'] is True:
if gltf.data.scene is not None:
for scene_idx, coll in gltf.blender_collections.items():

View File

@@ -176,7 +176,7 @@ def init_vnodes(gltf):
if idx_scene == gltf.data.scene:
gltf.blender_collections[idx_scene] = gltf.active_collection
# No collection creation, so no linking
# Link between glTF scence and blender scene is already done
# Link between glTF scene and blender scene is already done
# Check if we have orphan nodes

View File

@@ -83,7 +83,7 @@ class UI_OT_i18n_updatetranslation_work_repo(Operator):
# on Windows and OSX they are `spawn`ed.
# See https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
# This is a problem because spawned processes do not inherit the whole environment
# of the current (Blender-customized) python. In pratice, the `bpy` module won't load e.g.
# of the current (Blender-customized) python. In practice, the `bpy` module won't load e.g.
# So care must be taken that the callback passed to the executor does not rely on any
# Blender-specific modules etc. This is why it is using a class method from `bl_i18n_utils`
# module, rather than a local function of this current Blender-only module.

View File

@@ -119,7 +119,7 @@ def keyconfig_test(kc):
item = km.keymap_items[j + i + 1]
if src.compare(item):
print("===========")
print(km.name, "[self confict]")
print(km.name, "[self conflict]")
print(_kmistr(src, is_modal).strip())
print(_kmistr(item, is_modal).strip())
result = True

View File

@@ -143,7 +143,7 @@ def find_rotation_edge(bm, uv_layer, faces, aspect_y):
prev_uv = uv
prev_select = loop[uv_layer].select_edge
# Add 90 degrees to align along V co-ordinate.
# Add 90 degrees to align along V coordinate.
# Twice, because we divide by two.
sum_u, sum_v = -sum_u, -sum_v
@@ -169,9 +169,9 @@ def find_rotation_geometry(bm, uv_layer, faces, method, axis, aspect_y):
delta_co0 = face.loops[fan - 1].vert.co - face.loops[0].vert.co
delta_co1 = face.loops[fan].vert.co - face.loops[0].vert.co
w = delta_co0.cross(delta_co1).length
# U direction in geometry co-ordinates.
# U direction in geometry coordinates.
sum_u_co += (delta_co0 * mat[0][0] + delta_co1 * mat[0][1]) * w
# V direction in geometry co-ordinates.
# V direction in geometry coordinates.
sum_v_co += (delta_co0 * mat[1][0] + delta_co1 * mat[1][1]) * w
if axis == 'X':
@@ -353,7 +353,7 @@ def get_random_transform(transform_params, entropy):
if scale_even:
scale_v = scale_u
# Results in homogenous co-ordinates.
# Results in homogeneous coordinates.
return [[scale_u * math.cos(angle), -scale_v * math.sin(angle), offset_u],
[scale_u * math.sin(angle), scale_v * math.cos(angle), offset_v]]