File: batch_edit_text.py

package info (click to toggle)
blender 3.4.1%2Bdfsg-2
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 280,208 kB
  • sloc: ansic: 1,213,366; cpp: 1,148,738; python: 468,812; xml: 13,577; sh: 5,969; javascript: 304; lisp: 247; makefile: 67
file content (66 lines) | stat: -rw-r--r-- 1,813 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
# SPDX-License-Identifier: GPL-2.0-or-later

from typing import (
    Callable,
    Generator,
    Optional,
    Sequence,
    Tuple,
)

TextOpFn = Callable[
    # file_name, data_src
    [str, str],
    # data_dst or None when no change is made.
    Optional[str]
]


def operation_wrap(args: Tuple[str, TextOpFn]) -> None:
    fn, text_operation = args
    with open(fn, "r", encoding="utf-8") as f:
        data_src = f.read()
        data_dst = text_operation(fn, data_src)

    if data_dst is None or (data_src == data_dst):
        return

    with open(fn, "w", encoding="utf-8") as f:
        f.write(data_dst)


def run(
        *,
        directories: Sequence[str],
        is_text: Callable[[str], bool],
        text_operation: TextOpFn,
        use_multiprocess: bool,
) -> None:
    print(directories)

    import os

    def source_files(path: str) -> Generator[str, None, None]:
        for dirpath, dirnames, filenames in os.walk(path):
            dirnames[:] = [d for d in dirnames if not d.startswith(".")]
            for filename in filenames:
                if filename.startswith("."):
                    continue
                ext = os.path.splitext(filename)[1]
                filepath = os.path.join(dirpath, filename)
                if is_text(filepath):
                    yield filepath

    if use_multiprocess:
        args = [
            (fn, text_operation) for directory in directories
            for fn in source_files(directory)
        ]
        import multiprocessing
        job_total = multiprocessing.cpu_count()
        pool = multiprocessing.Pool(processes=job_total * 2)
        pool.map(operation_wrap, args)
    else:
        for directory in directories:
            for fn in source_files(directory):
                operation_wrap((fn, text_operation))