1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140
|
"""
Copyright (C) 2023 Michael Ablassmeier <abi@grinser.de>
Copyright (C) 2020 Red Hat, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
import json
import os
import datetime
from typing import List, Any, Tuple, Dict
from argparse import Namespace
from libvirtnbdbackup.objects import DomainDisk
from libvirtnbdbackup.sparsestream import exceptions
class SparseStream:
"""Sparse Stream writer/reader class"""
def __init__(self, types, version: int = 2) -> None:
"""Stream version:
1: base version
2: stream version with compression support
"""
self.version = version
self.compressionMethod: str = "lz4"
self.types = types.SparseStreamTypes()
def dumpMetadata(
self,
args: Namespace,
virtualSize: int,
dataSize: int,
disk: DomainDisk,
) -> bytes:
"""First block in backup stream is Meta data information
about virtual size of the disk being backed up, as well
as various information regarding backup.
Dumps Metadata frame to be written at start of stream in
json format.
"""
meta = {
"virtualSize": virtualSize,
"dataSize": dataSize,
"date": datetime.datetime.now().isoformat(),
"diskName": disk.target,
"diskFormat": disk.format,
"checkpointName": args.cpt.name,
"compressed": args.compress,
"compressionMethod": self.compressionMethod,
"parentCheckpoint": args.cpt.parent,
"incremental": (args.level in ("inc", "diff")),
"streamVersion": self.version,
}
return json.dumps(meta, indent=4).encode("utf-8")
def writeCompressionTrailer(self, writer, trailer: List[Any]) -> None:
"""Dump compression trailer to end of stream"""
size = writer.write(json.dumps(trailer).encode())
writer.write(self.types.TERM)
self.writeFrame(writer, self.types.COMP, 0, size)
def _readHeader(self, reader) -> Tuple[str, str, str]:
"""Attempt to read header"""
header = reader.read(self.types.FRAME_LEN)
try:
kind, start, length = header.split(b" ", 2)
except ValueError as err:
raise exceptions.BlockFormatException(
f"Invalid block format: [{err}]"
) from err
return kind, start, length
@staticmethod
def _parseHeader(kind, start: str, length: str) -> Tuple[str, int, int]:
"""Return parsed header information"""
try:
return kind, int(start, 16), int(length, 16)
except ValueError as err:
raise exceptions.FrameformatException(
f"Invalid frame format: [{err}]"
) from err
def readCompressionTrailer(self, reader) -> Dict[int, Any]:
"""If compressed stream is found, information about compressed
block sizes is appended as last json payload.
Function seeks to end of file and reads trailer information.
"""
pos = reader.tell()
reader.seek(0, os.SEEK_END)
reader.seek(-(self.types.FRAME_LEN + len(self.types.TERM)), os.SEEK_CUR)
_, _, length = self._readHeader(reader)
reader.seek(-(self.types.FRAME_LEN + int(length, 16)), os.SEEK_CUR)
trailer = self.loadMetadata(reader.read(int(length, 16)))
reader.seek(pos)
return trailer
@staticmethod
def loadMetadata(s: bytes) -> Any:
"""Load and parse metadata information
Parameters:
s: (str) Json string as received during data file read
Returns:
json.loads: (dict) Decoded json string as python object
"""
try:
return json.loads(s.decode("utf-8"))
except json.decoder.JSONDecodeError as err:
raise exceptions.MetaHeaderFormatException(
f"Invalid meta header format: [{err}]"
) from err
def writeFrame(self, writer, kind, start: int, length: int) -> None:
"""Write backup frame
Parameters:
writer: (fh) Writer object that implements .write()
"""
writer.write(self.types.FRAME % (kind, start, length))
def readFrame(self, reader) -> Tuple[str, int, int]:
"""Read backup frame
Parameters:
reader: (fh) Reader object which implements .read()
"""
kind, start, length = self._readHeader(reader)
return self._parseHeader(kind, start, length)
|