File: batch_edit_text.py

package info (click to toggle)
blender 4.3.2%2Bdfsg-2
  • links: PTS, VCS
  • area: main
  • in suites: sid, trixie
  • size: 309,564 kB
  • sloc: cpp: 2,385,210; python: 330,236; ansic: 280,972; xml: 2,446; sh: 972; javascript: 317; makefile: 170
file content (65 lines) | stat: -rw-r--r-- 1,774 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
# SPDX-FileCopyrightText: 2023 Blender Authors
#
# SPDX-License-Identifier: GPL-2.0-or-later

from typing import (
    Callable,
    Generator,
    Optional,
    Sequence,
)

TextOpFn = Callable[
    # file_name, data_src
    [str, str],
    # data_dst or None when no change is made.
    Optional[str]
]


def operation_wrap(fn: str, text_operation: TextOpFn) -> None:
    with open(fn, "r", encoding="utf-8") as f:
        data_src = f.read()
        data_dst = text_operation(fn, data_src)

    if data_dst is None or (data_src == data_dst):
        return

    with open(fn, "w", encoding="utf-8") as f:
        f.write(data_dst)


def run(
        *,
        directories: Sequence[str],
        is_text: Callable[[str], bool],
        text_operation: TextOpFn,
        use_multiprocess: bool,
) -> None:
    print(directories)

    import os

    def source_files(path: str) -> Generator[str, None, None]:
        for dirpath, dirnames, filenames in os.walk(path):
            dirnames[:] = [d for d in dirnames if not d.startswith(".")]
            for filename in filenames:
                if filename.startswith("."):
                    continue
                filepath = os.path.join(dirpath, filename)
                if is_text(filepath):
                    yield filepath

    if use_multiprocess:
        args = [
            (fn, text_operation) for directory in directories
            for fn in source_files(directory)
        ]
        import multiprocessing
        job_total = multiprocessing.cpu_count()
        pool = multiprocessing.Pool(processes=job_total)
        pool.starmap(operation_wrap, args)
    else:
        for directory in directories:
            for fn in source_files(directory):
                operation_wrap(fn, text_operation)