File: vtkjs_helper.py

package info (click to toggle)
paraview 5.13.2%2Bdfsg-3
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 544,220 kB
  • sloc: cpp: 3,374,605; ansic: 1,332,409; python: 150,381; xml: 122,166; sql: 65,887; sh: 7,317; javascript: 5,262; yacc: 4,417; java: 3,977; perl: 2,363; lex: 1,929; f90: 1,397; makefile: 170; objc: 153; tcl: 59; pascal: 50; fortran: 29
file content (247 lines) | stat: -rw-r--r-- 9,890 bytes parent folder | download | duplicates (2)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
import base64
import json
import re
import os
import shutil
import sys
import zipfile

try:
    import zlib

    compression = zipfile.ZIP_DEFLATED
except:
    compression = zipfile.ZIP_STORED

# -----------------------------------------------------------------------------


def convertDirectoryToZipFile(directoryPath):
    if os.path.isfile(directoryPath):
        return

    zipFilePath = "%s.zip" % directoryPath
    zf = zipfile.ZipFile(zipFilePath, mode="w")

    try:
        for dirName, subdirList, fileList in os.walk(directoryPath):
            for fname in fileList:
                fullPath = os.path.join(dirName, fname)
                relPath = "%s" % (os.path.relpath(fullPath, directoryPath))
                zf.write(fullPath, arcname=relPath, compress_type=compression)
    finally:
        zf.close()

    shutil.rmtree(directoryPath)
    shutil.move(zipFilePath, directoryPath)


# -----------------------------------------------------------------------------


def addDataToViewer(dataPath, srcHtmlPath):
    if os.path.isfile(dataPath) and os.path.exists(srcHtmlPath):
        dstDir = os.path.dirname(dataPath)
        dstHtmlPath = os.path.join(dstDir, "%s.html" % os.path.basename(dataPath)[:-6])

        # Extract data as base64
        with open(dataPath, "rb") as data:
            dataContent = data.read()
            base64Content = base64.b64encode(dataContent)
            base64Content = base64Content.decode().replace("\n", "")

        # Create new output file
        with open(srcHtmlPath, mode="r", encoding="utf-8") as srcHtml:
            with open(dstHtmlPath, mode="w", encoding="utf-8") as dstHtml:
                for line in srcHtml:
                    if "</body>" in line:
                        dstHtml.write("<script>\n")
                        dstHtml.write('var contentToLoad = "%s";\n\n' % base64Content)
                        dstHtml.write(
                            'Glance.importBase64Dataset("%s" , contentToLoad, glanceInstance.proxyManager);\n'
                            % os.path.basename(dataPath)
                        )
                        dstHtml.write("glanceInstance.showApp();\n")
                        dstHtml.write("</script>\n")

                    dstHtml.write(line)


# -----------------------------------------------------------------------------


def zipAllTimeSteps(directoryPath):
    if os.path.isfile(directoryPath):
        return

    class UrlCounterDict(dict):
        Counter = 0

        def GetUrlName(self, name):
            if name not in self.keys():
                self[name] = str(objNameToUrls.Counter)
                self.Counter = self.Counter + 1
            return self[name]

    def InitIndex(sourcePath, destObj):
        with open(sourcePath, "r") as sourceFile:
            sourceData = sourceFile.read()
            sourceObj = json.loads(sourceData)
            for key in sourceObj:
                destObj[key] = sourceObj[key]
            # remove vtkHttpDataSetReader information
            for obj in destObj["scene"]:
                obj.pop(obj["type"])
                obj.pop("type")

    def getUrlToNameDictionary(indexObj):
        urls = {}
        for obj in indexObj["scene"]:
            urls[obj[obj["type"]]["url"]] = obj["name"]
        return urls

    def addDirectoryToZip(
        dirname, zipobj, storedData, rootIdx, timeStep, objNameToUrls
    ):
        # Update root index.json file from index.json of this timestep
        with open(os.path.join(dirname, "index.json"), "r") as currentIdxFile:
            currentIdx = json.loads(currentIdxFile.read())
            urlToName = getUrlToNameDictionary(currentIdx)
            rootTimeStepSection = rootIdx["animation"]["timeSteps"][timeStep]
            for key in currentIdx:
                if key == "scene" or key == "version":
                    continue
                rootTimeStepSection[key] = currentIdx[key]
            for obj in currentIdx["scene"]:
                objName = obj["name"]
                rootTimeStepSection[objName] = {}
                rootTimeStepSection[objName]["actor"] = obj["actor"]
                rootTimeStepSection[objName]["actorRotation"] = obj["actorRotation"]
                rootTimeStepSection[objName]["mapper"] = obj["mapper"]
                rootTimeStepSection[objName]["property"] = obj["property"]

        # For every object in the current timestep
        for folder in sorted(os.listdir(dirname)):
            currentItem = os.path.join(dirname, folder)
            if os.path.isdir(currentItem) is False:
                continue
            # Write all data array of the current timestep in the archive
            for filename in os.listdir(os.path.join(currentItem, "data")):
                fullpath = os.path.join(currentItem, "data", filename)
                if os.path.isfile(fullpath) and filename not in storedData:
                    storedData.add(filename)
                    relPath = os.path.join("data", filename)
                    zipobj.write(fullpath, arcname=relPath, compress_type=compression)
            # Write the index.json containing pointers to these data arrays
            # while replacing every basepath as '../../data'
            objIndexFilePath = os.path.join(dirname, folder, "index.json")
            with open(objIndexFilePath, "r") as objIndexFile:
                objIndexObjData = json.loads(objIndexFile.read())
            for elm in objIndexObjData.keys():
                try:
                    if "ref" in objIndexObjData[elm].keys():
                        objIndexObjData[elm]["ref"]["basepath"] = "../../data"
                    if "arrays" in objIndexObjData[elm].keys():
                        for array in objIndexObjData[elm]["arrays"]:
                            array["data"]["ref"]["basepath"] = "../../data"
                except AttributeError:
                    continue
            currentObjName = urlToName[folder]
            objIndexRelPath = os.path.join(
                objNameToUrls.GetUrlName(currentObjName), str(timeStep), "index.json"
            )
            zipobj.writestr(
                objIndexRelPath,
                json.dumps(objIndexObjData, indent=2),
                compress_type=compression,
            )

    # ---

    zipFilePath = "%s.zip" % directoryPath
    currentDirectory = os.path.abspath(os.path.join(directoryPath, os.pardir))
    rootIndexPath = os.path.join(currentDirectory, "index.json")
    rootIndexFile = open(rootIndexPath, "r")
    rootIndexObj = json.loads(rootIndexFile.read())

    zf = zipfile.ZipFile(zipFilePath, mode="w")
    try:
        # We copy the scene from an index of a specific timestep to the root index
        # Scenes should all have the same objects so only do it for the first one
        isSceneInitialized = False
        # currentlyAddedData set stores hashes of every data we already added to the
        # vtkjs archive to prevent data duplication
        currentlyAddedData = set()
        # Regex that folders storing timestep data from paraview should follow
        reg = re.compile(r"^" + os.path.basename(directoryPath) + r"\.[0-9]+$")
        # We assume an object will not be deleted from a timestep to another so we create a generic index.json for each object
        genericIndexObj = {}
        genericIndexObj["series"] = []
        timeStep = 0
        for item in rootIndexObj["animation"]["timeSteps"]:
            genericIndexObj["series"].append({})
            genericIndexObj["series"][timeStep]["url"] = str(timeStep)
            genericIndexObj["series"][timeStep]["timeStep"] = float(item["time"])
            timeStep = timeStep + 1
        # Keep track of the url for every object
        objNameToUrls = UrlCounterDict()

        timeStep = 0
        # zip all timestep directories
        for folder in sorted(os.listdir(currentDirectory)):
            fullPath = os.path.join(currentDirectory, folder)
            if os.path.isdir(fullPath) and reg.match(folder):
                if not isSceneInitialized:
                    InitIndex(os.path.join(fullPath, "index.json"), rootIndexObj)
                    isSceneInitialized = True
                addDirectoryToZip(
                    fullPath,
                    zf,
                    currentlyAddedData,
                    rootIndexObj,
                    timeStep,
                    objNameToUrls,
                )
                shutil.rmtree(fullPath)
                timeStep = timeStep + 1

        # Write every index.json holding time information for each object
        for name in objNameToUrls:
            zf.writestr(
                os.path.join(objNameToUrls[name], "index.json"),
                json.dumps(genericIndexObj, indent=2),
                compress_type=compression,
            )

        # Update root index.json urls and write it in the archive
        for obj in rootIndexObj["scene"]:
            obj["id"] = obj["name"]
            obj["type"] = "vtkHttpDataSetSeriesReader"
            obj["vtkHttpDataSetSeriesReader"] = {}
            obj["vtkHttpDataSetSeriesReader"]["url"] = objNameToUrls[obj["name"]]
        zf.writestr(
            "index.json", json.dumps(rootIndexObj, indent=2), compress_type=compression
        )
        os.remove(rootIndexPath)

    finally:
        zf.close()

    shutil.move(zipFilePath, directoryPath)


# -----------------------------------------------------------------------------
# Main
# -----------------------------------------------------------------------------

if __name__ == "__main__":
    if len(sys.argv) < 2:
        print(
            "Usage: directoryToFile /path/to/directory.vtkjs [/path/to/ParaViewGlance.html]"
        )
    else:
        fileName = sys.argv[1]
        convertDirectoryToZipFile(fileName)

        if len(sys.argv) == 3:
            addDataToViewer(fileName, sys.argv[2])