1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139
|
# Copyright (C) 2016 The OpenTimestamps developers
#
# This file is part of python-opentimestamps.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-opentimestamps including this file, may be copied,
# modified, propagated, or distributed except according to the terms contained
# in the LICENSE file.
from opentimestamps.core.op import CryptOp
from opentimestamps.core.serialize import (
BadMagicError, DeserializationError, StreamSerializationContext, StreamDeserializationContext
)
from opentimestamps.core.packetstream import PacketReader, PacketWriter, PacketMissingError
class TimestampLog:
"""Timestamps for append-only files
With append-only files such as log files, rather than timestamping once, you
want to timestamp them repeatedly as new data is appended to them. Logfile
timestamps support this usecase by allowing multiple timestamps on the same
file to be recorded, with each timestamp including the length of the log file
at the time that particular timestamp was created.
In addition, logfile timestamps are serialized such that they themselves can be
written to append-only storage.
"""
HEADER_MAGIC = b'\x00OpenTimestamps\x00\x00Log\x00\xd9\x19\xc5\x3a\x99\xb1\x12\xe9\xa6\xa1\x00'
class TimestampLogReader(TimestampLog):
@classmethod
def open(cls, fd):
"""Open an existing timestamp log
fd must be positioned at the start of the log; the header will be
immediately read and DeserializationError raised if incorrect.
"""
ctx = StreamDeserializationContext(fd)
actual_magic = ctx.read_bytes(len(cls.HEADER_MAGIC))
if cls.HEADER_MAGIC != actual_magic:
raise BadMagicError(cls.HEADER_MAGIC, actual_magic)
file_hash_op = CryptOp.deserialize(ctx)
return cls(fd, file_hash_op)
def __init__(self, fd, file_hash_op):
"""Create a timestamp log reader instance
You probably want to use TimestampLogReader.open() instead.
"""
self.fd = fd
self.file_hash_op = file_hash_op
def __iter__(self):
"""Iterate through all timestamps in the timestamp log"""
while True:
try:
reader = PacketReader(self.fd)
except PacketMissingError:
break
ctx = StreamDeserializationContext(reader)
try:
length = ctx.read_varuint()
file_hash = ctx.read_bytes(self.file_hash_op.DIGEST_LENGTH)
timestamp = Timestamp.deserialize(ctx, file_hash)
yield (length, timestamp)
except DeserializationError as exp:
# FIXME: should provide a way to get insight into these errors
pass
class TimestampLogWriter(TimestampLog):
@classmethod
def open(cls, fd):
"""Open an existing timestamp log for writing
fd must be both readable and writable, and must be positioned at the
beginning of the timestamp log file. The header will be immediately
read, with BadMagicError raised if it's incorrect.
"""
# Use the log reader to read the header information
reader = TimestampLogReader.open(fd)
# Parse the entries to find the last one
for stamp in reader:
pass
# FIXME: pad the end as necessary to deal with trucated writes
return cls(fd, reader.file_hash_op)
@classmethod
def create(cls, fd, file_hash_op):
"""Create a new timestamp log
Writes the header appropriately.
"""
ctx = StreamSerializationContext(fd)
ctx.write_bytes(cls.HEADER_MAGIC)
file_hash_op.serialize(ctx)
return cls(fd, file_hash_op)
def __init__(self, fd, file_hash_op):
"""Create a new timestamp log writer
You probably want to use the open() or create() methods instead.
"""
self.fd = fd
self.file_hash_op = file_hash_op
def append(self, length, timestamp):
"""Add a new timestamp to the log"""
if len(timestamp.msg) != self.file_hash_op.DIGEST_LENGTH:
raise ValueError("Timestamp msg length does not match expected digest length; %d != %d" % (len(timestamp.msg), self.file_hash_op.DIGEST_LENGTH))
with PacketWriter(self.fd) as packet_fd:
ctx = StreamSerializationContext(packet_fd)
ctx.write_varuint(length)
ctx.write_bytes(timestamp.msg)
timestamp.serialize(ctx)
|