1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233
|
const os = require('os')
const path = require('path')
const { format, promisify } = require('util')
const rimraf = promisify(require('rimraf'))
const glob = promisify(require('glob'))
const MiniPass = require('minipass')
const fsMiniPass = require('fs-minipass')
const log = require('./log-shim')
const padZero = (n, length) => n.toString().padStart(length.toString().length, '0')
const globify = pattern => pattern.split('\\').join('/')
class LogFiles {
// Default to a plain minipass stream so we can buffer
// initial writes before we know the cache location
#logStream = null
// We cap log files at a certain number of log events per file.
// Note that each log event can write more than one line to the
// file. Then we rotate log files once this number of events is reached
#MAX_LOGS_PER_FILE = null
// Now that we write logs continuously we need to have a backstop
// here for infinite loops that still log. This is also partially handled
// by the config.get('max-files') option, but this is a failsafe to
// prevent runaway log file creation
#MAX_FILES_PER_PROCESS = null
#fileLogCount = 0
#totalLogCount = 0
#path = null
#logsMax = null
#files = []
constructor ({
maxLogsPerFile = 50_000,
maxFilesPerProcess = 5,
} = {}) {
this.#MAX_LOGS_PER_FILE = maxLogsPerFile
this.#MAX_FILES_PER_PROCESS = maxFilesPerProcess
this.on()
}
static format (count, level, title, ...args) {
let prefix = `${count} ${level}`
if (title) {
prefix += ` ${title}`
}
return format(...args)
.split(/\r?\n/)
.reduce((lines, line) =>
lines += prefix + (line ? ' ' : '') + line + os.EOL,
''
)
}
on () {
this.#logStream = new MiniPass()
process.on('log', this.#logHandler)
}
off () {
process.off('log', this.#logHandler)
this.#endStream()
}
load ({ path, logsMax = Infinity } = {}) {
// dir is user configurable and is required to exist so
// this can error if the dir is missing or not configured correctly
this.#path = path
this.#logsMax = logsMax
// Log stream has already ended
if (!this.#logStream) {
return
}
log.verbose('logfile', `logs-max:${logsMax} dir:${this.#path}`)
// Pipe our initial stream to our new file stream and
// set that as the new log logstream for future writes
// if logs max is 0 then the user does not want a log file
if (this.#logsMax > 0) {
const initialFile = this.#openLogFile()
if (initialFile) {
this.#logStream = this.#logStream.pipe(initialFile)
}
}
// Kickoff cleaning process, even if we aren't writing a logfile.
// This is async but it will always ignore the current logfile
// Return the result so it can be awaited in tests
return this.#cleanLogs()
}
log (...args) {
this.#logHandler(...args)
}
get files () {
return this.#files
}
get #isBuffered () {
return this.#logStream instanceof MiniPass
}
#endStream (output) {
if (this.#logStream) {
this.#logStream.end(output)
this.#logStream = null
}
}
#logHandler = (level, ...args) => {
// Ignore pause and resume events since we
// write everything to the log file
if (level === 'pause' || level === 'resume') {
return
}
// If the stream is ended then do nothing
if (!this.#logStream) {
return
}
const logOutput = this.#formatLogItem(level, ...args)
if (this.#isBuffered) {
// Cant do anything but buffer the output if we dont
// have a file stream yet
this.#logStream.write(logOutput)
return
}
// Open a new log file if we've written too many logs to this one
if (this.#fileLogCount >= this.#MAX_LOGS_PER_FILE) {
// Write last chunk to the file and close it
this.#endStream(logOutput)
if (this.#files.length >= this.#MAX_FILES_PER_PROCESS) {
// but if its way too many then we just stop listening
this.off()
} else {
// otherwise we are ready for a new file for the next event
this.#logStream = this.#openLogFile()
}
} else {
this.#logStream.write(logOutput)
}
}
#formatLogItem (...args) {
this.#fileLogCount += 1
return LogFiles.format(this.#totalLogCount++, ...args)
}
#getLogFilePath (count = '') {
return `${this.#path}debug-${count}.log`
}
#openLogFile () {
// Count in filename will be 0 indexed
const count = this.#files.length
try {
// Pad with zeros so that our log files are always sorted properly
// We never want to write files ending in `-9.log` and `-10.log` because
// log file cleaning is done by deleting the oldest so in this example
// `-10.log` would be deleted next
const f = this.#getLogFilePath(padZero(count, this.#MAX_FILES_PER_PROCESS))
// Some effort was made to make the async, but we need to write logs
// during process.on('exit') which has to be synchronous. So in order
// to never drop log messages, it is easiest to make it sync all the time
// and this was measured to be about 1.5% slower for 40k lines of output
const logStream = new fsMiniPass.WriteStreamSync(f, { flags: 'a' })
if (count > 0) {
// Reset file log count if we are opening
// after our first file
this.#fileLogCount = 0
}
this.#files.push(logStream.path)
return logStream
} catch (e) {
// If the user has a readonly logdir then we don't want to
// warn this on every command so it should be verbose
log.verbose('logfile', `could not be created: ${e}`)
}
}
async #cleanLogs () {
// module to clean out the old log files
// this is a best-effort attempt. if a rm fails, we just
// log a message about it and move on. We do return a
// Promise that succeeds when we've tried to delete everything,
// just for the benefit of testing this function properly.
try {
const logPath = this.#getLogFilePath()
const logGlob = path.join(path.dirname(logPath), path.basename(logPath)
// tell glob to only match digits
.replace(/\d/g, '[0123456789]')
// Handle the old (prior to 8.2.0) log file names which did not have a
// counter suffix
.replace(/-\.log$/, '*.log')
)
// Always ignore the currently written files
const files = await glob(globify(logGlob), { ignore: this.#files.map(globify), silent: true })
const toDelete = files.length - this.#logsMax
if (toDelete <= 0) {
return
}
log.silly('logfile', `start cleaning logs, removing ${toDelete} files`)
for (const file of files.slice(0, toDelete)) {
try {
await rimraf(file, { glob: false })
} catch (e) {
log.silly('logfile', 'error removing log file', file, e)
}
}
} catch (e) {
log.warn('logfile', 'error cleaning log files', e)
} finally {
log.silly('logfile', 'done cleaning log files')
}
}
}
module.exports = LogFiles
|