fix: Redirect spammy electron stderr to a debug sink (#32188)

* wip: system test to reproduce

* system test for alsa stderr

* split cypress from 3rd party stderr at parent process to electron child

* rm garbage warning regexes

* fix newline behavior when parsing internal stderr

* migrate left over console errors

* clean up system test name

* fix typed import

* extract stderr splitting to separate pkg so runner can use @packages/error

* rm new err log from packherd-quire

* handle backpressure

* docs

* some unit tests & coverage for stderr-filtering

* unit tests

* no longer test regexp specific output in spawn unit tests

* filter enabled debug namespaces rather than just cypress namespacesc

* revise stream splitting et al

* try to fix v8 snapshot build??

* fix console.log assertion

* add missing eslint config

* rm unused spies

* fix regexp for optional leading wsp and ansi on debug entries

* update unit tests because sinon

* lint

* colon..

* build stderr-filtering before checking if binary exists

* adds TagStream transform stream, fixes stderr from child proc config

* add build-prod script for stderr-filtering

* changelog

* properly handle backpressure in prefixed content transform stream

* use standard tsconfig?

* better tsconfig

* Add pkgStderrFiltering to monorepoPaths

* add \"files\" manifest

* pipe all stderr to stderr when CYPRESS_INTERNAL_DEBUG_ELECTRON is enabled

* rm explicit build of stderr-filtering in check-if-binary-exists step

* ensure all dependencies of scripts/ are built before scripts are executed in the check-if-binary-exists command

* fix dev version ref

* swap logic

* add stdin piping

* fix exec name on the run-on-dependencies command to be more useful

* use correct env

* rm obsolete type refs

* simplify stderr-filtering public iface, pipe cy-in-cy stderr through filtering tx

* bust cache

* fix mocks

* fix v8-snapshot

* move stderrfiltering to dev pkg in cli

* skip integrity check in ci, if they are out of date things should fail anyway

* copypasta over a portion of stderr-filtering to cli, since cli cannot import @packages

* Delete issues.md

* rm special filtering for cy in cy

* rm too narrow rules file

---------

Co-authored-by: Jennifer Shehane <shehane.jennifer@gmail.com>
Co-authored-by: Jennifer Shehane <jennifer@cypress.io>
Co-authored-by: Bill Glesias <bglesias@gmail.com>
This commit is contained in:
Cacie Prins
2025-08-19 17:05:53 -04:00
committed by GitHub
parent 01683b8f80
commit e463fdbc61
53 changed files with 2588 additions and 315 deletions

View File

@@ -1,2 +1,2 @@
# Bump this version to force CI to re-create the cache from scratch.
8-13-2025
8-19-2025

View File

@@ -1268,6 +1268,9 @@ commands:
check-if-binary-exists:
steps:
- run-on-dependencies:
package: internal-scripts
command: build
- run:
name: Check if binary exists, exit if it does
command: |
@@ -1275,6 +1278,22 @@ commands:
yarn gulp e2eTestScaffold
yarn check-binary-on-cdn --version $(node ./scripts/get-next-version.js) --type binary --file cypress.zip
run-on-dependencies:
parameters:
package:
description: Package to build all dependencies for. All dependencies in the
dependency chain must have a build script.
type: string
command:
description: Command to run on the dependencies.
type: string
steps:
- run:
name: Exec << parameters.command >> recursively on << parameters.package >>
dependencies.
command: |
yarn lerna run <<parameters.command>> --scope=<<parameters.package>> --include-dependencies
build-and-package-binary:
steps:
- run:

View File

@@ -39,6 +39,7 @@ _Released 08/12/2025 (PENDING)_
- Fixed an issue where `.fixture()` would not return updated content after the underlying file was modified via `.writeFile()`. The fixture cache is now properly invalidated when the backing file is written to, ensuring updated content is returned in subsequent `.fixture()` calls. Fixes [#4716](https://github.com/cypress-io/cypress/issues/4716).
- Fixed an issue where `.fixture()` calls with a specified encoding would sometimes still attempt to parse the file based on its extension. Files with an explicit encoding are now always treated as raw content. Fixes [#32139](https://github.com/cypress-io/cypress/issues/32139).
- Fixed an issue where `.fixture()` calls with different encoding options would return inconsistent content based on execution order. Fixes [#32138](https://github.com/cypress-io/cypress/issues/32138).
- Filters content written to stderr to prevent Electron from spamming with inconsequential errors/warnings. This stderr content can be viewed by enabling the `cypress:internal-stderr` debug namespace. Fixes [#32070](https://github.com/cypress-io/cypress/issues/32070)
- Fixed an issue where Angular Component Testing was printing extraneous warnings to the console by default. By default, errors only will now print to the console. This can still be overridden by passing in a custom webpack config or setting the `verbose` option inside your `angular.json`. Addresses [#26456](https://github.com/cypress-io/cypress/issues/26456).
- Fixed an issue where `ts-loader` was improperly being detected inside `@cypress/webpack-preprocessor`. Fixes [#32265](https://github.com/cypress-io/cypress/issues/32265).

View File

@@ -4,8 +4,6 @@ const cp = require('child_process')
const path = require('path')
const Promise = require('bluebird')
const debug = require('debug')('cypress:cli')
const debugVerbose = require('debug')('cypress-verbose:cli')
const util = require('../util')
const state = require('../tasks/state')
const xvfb = require('./xvfb')
@@ -13,117 +11,6 @@ const verify = require('../tasks/verify')
const errors = require('../errors')
const readline = require('readline')
const isXlibOrLibudevRe = /^(?:Xlib|libudev)/
const isHighSierraWarningRe = /\*\*\* WARNING/
const isRenderWorkerRe = /\.RenderWorker-/
// This is a warning that occurs when running in a container on Linux.
// https://github.com/cypress-io/cypress/issues/29563
// Example:
// [437:1212/125803.148706:ERROR:zygote_host_impl_linux.cc(273)] Failed to adjust OOM score of renderer with pid 610: Permission denied (13)
const isOOMScoreWarningRe = /Failed to adjust OOM score of renderer with pid/
// Chromium (which Electron uses) always makes several attempts to connect to the system dbus.
// This works fine in most desktop environments, but in a docker container, there is no dbus service
// and Chromium emits several error lines, similar to these:
// [1957:0406/160550.146820:ERROR:bus.cc(392)] Failed to connect to the bus: Failed to connect to socket /var/run/dbus/system_bus_socket: No such file or directory
// [1957:0406/160550.147994:ERROR:bus.cc(392)] Failed to connect to the bus: Address does not contain a colon
// These warnings are absolutely harmless. Failure to connect to dbus means that electron won't be able to access the user's
// credential wallet (none exists in a docker container) and won't show up in the system tray (again, none exists).
// Failure to connect is expected and normal here, but users frequently misidentify these errors as the cause of their problems.
// https://github.com/cypress-io/cypress/issues/19299
const isDbusWarning = /Failed to connect to the bus:/
// Electron began logging these on self-signed certs with 17.0.0-alpha.4.
// Once this is fixed upstream this regex can be removed: https://github.com/electron/electron/issues/34583
// Sample:
// [3801:0606/152837.383892:ERROR:cert_verify_proc_builtin.cc(681)] CertVerifyProcBuiltin for www.googletagmanager.com failed:
// ----- Certificate i=0 (OU=Cypress Proxy Server Certificate,O=Cypress Proxy CA,L=Internet,ST=Internet,C=Internet,CN=www.googletagmanager.com) -----
// ERROR: No matching issuer found
const isCertVerifyProcBuiltin = /(^\[.*ERROR:cert_verify_proc_builtin\.cc|^----- Certificate i=0 \(OU=Cypress Proxy|^ERROR: No matching issuer found$)/
/**
* Electron logs benign warnings about Vulkan when run on hosts that do not have a GPU. This is coming from the primary Electron process,
* and not the browser being used for tests.
* Samples:
* Warning: loader_scanned_icd_add: Driver /usr/lib/x86_64-linux-gnu/libvulkan_intel.so supports Vulkan 1.2, but only supports loader interface version 4. Interface version 5 or newer required to support this version of Vulkan (Policy #LDP_DRIVER_7)
* Warning: loader_scanned_icd_add: Driver /usr/lib/x86_64-linux-gnu/libvulkan_lvp.so supports Vulkan 1.1, but only supports loader interface version 4. Interface version 5 or newer required to support this version of Vulkan (Policy #LDP_DRIVER_7)
* Warning: loader_scanned_icd_add: Driver /usr/lib/x86_64-linux-gnu/libvulkan_radeon.so supports Vulkan 1.2, but only supports loader interface version 4. Interface version 5 or newer required to support this verison of Vulkan (Policy #LDP_DRIVER_7)
* Warning: Layer VK_LAYER_MESA_device_select uses API version 1.2 which is older than the application specified API version of 1.3. May cause issues.
*/
const isHostVulkanDriverWarning = /^Warning:.+(#LDP_DRIVER_7|VK_LAYER_MESA_device_select).+/
/**
* Electron logs benign warnings about Vulkan when run in docker containers whose host does not have a GPU. This is coming from the primary
* Electron process, and not the browser being used for tests.
* Sample:
* Warning: vkCreateInstance: Found no drivers!
* Warning: vkCreateInstance failed with VK_ERROR_INCOMPATIBLE_DRIVER
* at CheckVkSuccessImpl (../../third_party/dawn/src/dawn/native/vulkan/VulkanError.cpp:88)
* at CreateVkInstance (../../third_party/dawn/src/dawn/native/vulkan/BackendVk.cpp:458)
* at Initialize (../../third_party/dawn/src/dawn/native/vulkan/BackendVk.cpp:344)
* at Create (../../third_party/dawn/src/dawn/native/vulkan/BackendVk.cpp:266)
* at operator() (../../third_party/dawn/src/dawn/native/vulkan/BackendVk.cpp:521)
*/
const isContainerVulkanDriverWarning = /^Warning: vkCreateInstance/
const isContainerVulkanStack = /^\s*at (CheckVkSuccessImpl|CreateVkInstance|Initialize|Create|operator).+(VulkanError|BackendVk).cpp/
/**
* In Electron 32.0.0 a new debug scenario log message started appearing when iframes navigate to about:blank. This is a benign message.
* https://github.com/electron/electron/issues/44368
* Sample:
* [78887:1023/114920.074882:ERROR:debug_utils.cc(14)] Hit debug scenario: 4
*/
const isDebugScenario4 = /^\[[^\]]+debug_utils\.cc[^\]]+\] Hit debug scenario: 4/
/**
* In Electron 32.0.0 a new EGL driver message started appearing when running on Linux. This is a benign message.
* https://github.com/electron/electron/issues/43415
* Sample:
* [78887:1023/114920.074882:ERROR:gl_display.cc(14)] EGL Driver message (Error) eglQueryDeviceAttribEXT: Bad attribute.
*/
const isEGLDriverMessage = /^\[[^\]]+gl_display\.cc[^\]]+\] EGL Driver message \(Error\) eglQueryDeviceAttribEXT: Bad attribute\./
/**
* Mesa/GLX related warnings that occur in certain Linux environments without proper GPU support
* or when running in containers. These are benign warnings that don't affect functionality.
* Samples:
* error: XDG_RUNTIME_DIR is invalid or not set in the environment.
* MESA: error: ZINK: failed to choose pdev
* glx: failed to create drisw screen
*/
const isXdgRuntimeError = /^error: XDG_RUNTIME_DIR is invalid or not set/
const isMesaZinkError = /^MESA: error: ZINK: failed to choose pdev/
const isGlxDriverError = /^glx: failed to create drisw screen/
const GARBAGE_WARNINGS = [
isXlibOrLibudevRe,
isHighSierraWarningRe,
isRenderWorkerRe,
isOOMScoreWarningRe,
isDbusWarning,
isCertVerifyProcBuiltin,
isHostVulkanDriverWarning,
isContainerVulkanDriverWarning,
isContainerVulkanStack,
isDebugScenario4,
isEGLDriverMessage,
isXdgRuntimeError,
isMesaZinkError,
isGlxDriverError,
]
const isGarbageLineWarning = (str) => {
return _.some(GARBAGE_WARNINGS, (re) => {
return re.test(str)
})
}
function isPlatform (platform) {
return os.platform() === platform
}
@@ -161,8 +48,6 @@ function getStdio (needsXvfb) {
}
module.exports = {
isGarbageLineWarning,
start (args, options = {}) {
const needsXvfb = xvfb.isNeeded()
let executable = state.getPathToExecutable(state.getBinaryDir())
@@ -316,13 +201,6 @@ module.exports = {
child.stderr.on('data', (data) => {
const str = data.toString()
// bail if this is warning line garbage
if (isGarbageLineWarning(str)) {
debugVerbose(str)
return
}
// if we have a callback and this explicitly returns
// false then bail
if (onStderrData && onStderrData(str)) {

View File

@@ -1,6 +1,5 @@
require('../../spec_helper')
const _ = require('lodash')
const cp = require('child_process')
const os = require('os')
const tty = require('tty')
@@ -66,72 +65,6 @@ describe('lib/exec/spawn', function () {
sinon.stub(state, 'getPathToExecutable').withArgs(defaultBinaryDir).returns('/path/to/cypress')
})
context('.isGarbageLineWarning', () => {
it('returns true', () => {
const str = `
[46454:0702/140217.292422:ERROR:gles2_cmd_decoder.cc(4439)] [.RenderWorker-0x7f8bc5815a00.GpuRasterization]GL ERROR :GL_INVALID_FRAMEBUFFER_OPERATION : glDrawElements: framebuffer incomplete
[46454:0702/140217.292466:ERROR:gles2_cmd_decoder.cc(17788)] [.RenderWorker-0x7f8bc5815a00.GpuRasterization]GL ERROR :GL_INVALID_OPERATION : glCreateAndConsumeTextureCHROMIUM: invalid mailbox name
[46454:0702/140217.292526:ERROR:gles2_cmd_decoder.cc(4439)] [.RenderWorker-0x7f8bc5815a00.GpuRasterization]GL ERROR :GL_INVALID_FRAMEBUFFER_OPERATION : glClear: framebuffer incomplete
[46454:0702/140217.292555:ERROR:gles2_cmd_decoder.cc(4439)] [.RenderWorker-0x7f8bc5815a00.GpuRasterization]GL ERROR :GL_INVALID_FRAMEBUFFER_OPERATION : glDrawElements: framebuffer incomplete
[46454:0702/140217.292584:ERROR:gles2_cmd_decoder.cc(4439)] [.RenderWorker-0x7f8bc5815a00.GpuRasterization]GL ERROR :GL_INVALID_FRAMEBUFFER_OPERATION : glClear: framebuffer incomplete
[46454:0702/140217.292612:ERROR:gles2_cmd_decoder.cc(4439)] [.RenderWorker-0x7f8bc5815a00.GpuRasterization]GL ERROR :GL_INVALID_FRAMEBUFFER_OPERATION : glDrawElements: framebuffer incomplete'
[1957:0406/160550.146820:ERROR:bus.cc(392)] Failed to connect to the bus: Failed to connect to socket /var/run/dbus/system_bus_socket: No such file or directory
[1957:0406/160550.147994:ERROR:bus.cc(392)] Failed to connect to the bus: Address does not contain a colon
[3801:0606/152837.383892:ERROR:cert_verify_proc_builtin.cc(681)] CertVerifyProcBuiltin for www.googletagmanager.com failed:
----- Certificate i=0 (OU=Cypress Proxy Server Certificate,O=Cypress Proxy CA,L=Internet,ST=Internet,C=Internet,CN=www.googletagmanager.com) -----
ERROR: No matching issuer found
Warning: loader_scanned_icd_add: Driver /usr/lib/x86_64-linux-gnu/libvulkan_intel.so supports Vulkan 1.2, but only supports loader interface version 4. Interface version 5 or newer required to support this version of Vulkan (Policy #LDP_DRIVER_7)
Warning: loader_scanned_icd_add: Driver /usr/lib/x86_64-linux-gnu/libvulkan_lvp.so supports Vulkan 1.1, but only supports loader interface version 4. Interface version 5 or newer required to support this version of Vulkan (Policy #LDP_DRIVER_7)
Warning: loader_scanned_icd_add: Driver /usr/lib/x86_64-linux-gnu/libvulkan_radeon.so supports Vulkan 1.2, but only supports loader interface version 4. Interface version 5 or newer required to support this verison of Vulkan (Policy #LDP_DRIVER_7)
Warning: Layer VK_LAYER_MESA_device_select uses API version 1.2 which is older than the application specified API version of 1.3. May cause issues.
Warning: vkCreateInstance: Found no drivers!
Warning: vkCreateInstance failed with VK_ERROR_INCOMPATIBLE_DRIVER
at CheckVkSuccessImpl (../../third_party/dawn/src/dawn/native/vulkan/VulkanError.cpp:88)
at CreateVkInstance (../../third_party/dawn/src/dawn/native/vulkan/BackendVk.cpp:458)
at Initialize (../../third_party/dawn/src/dawn/native/vulkan/BackendVk.cpp:344)
at Create (../../third_party/dawn/src/dawn/native/vulkan/BackendVk.cpp:266)
at operator() (../../third_party/dawn/src/dawn/native/vulkan/BackendVk.cpp:521)
[78887:1023/114920.074882:ERROR:debug_utils.cc(14)] Hit debug scenario: 4
[18489:0822/130231.159571:ERROR:gl_display.cc(497)] EGL Driver message (Error) eglQueryDeviceAttribEXT: Bad attribute.
[437:1212/125803.148706:ERROR:zygote_host_impl_linux.cc(273)] Failed to adjust OOM score of renderer with pid 610: Permission denied (13)
`
const lines = _
.chain(str)
.split('\n')
.invokeMap('trim')
.compact()
.value()
_.each(lines, (line) => {
expect(spawn.isGarbageLineWarning(line), `expected line to be garbage: ${line}`).to.be.true
})
})
it('returns true for XDG runtime dir warnings', () => {
expect(spawn.isGarbageLineWarning('error: XDG_RUNTIME_DIR is invalid or not set')).to.be.true
})
it('returns true for MESA ZINK errors', () => {
expect(spawn.isGarbageLineWarning('MESA: error: ZINK: failed to choose pdev')).to.be.true
})
it('returns true for GLX driver errors', () => {
expect(spawn.isGarbageLineWarning('glx: failed to create drisw screen')).to.be.true
})
it('returns true for OOM score adjustment warnings', () => {
expect(spawn.isGarbageLineWarning('[437:1212/125803.148706:ERROR:zygote_host_impl_linux.cc(273)] Failed to adjust OOM score of renderer with pid 610: Permission denied (13)')).to.be.true
})
})
context('.start', function () {
// ️️⚠️ NOTE ⚠️
// when asserting the calls made to spawn the child Cypress process
@@ -522,55 +455,6 @@ describe('lib/exec/spawn', function () {
return spawn.start()
})
it('does not write to process.stderr when from xlib or libudev', function () {
const buf1 = Buffer.from('Xlib: something foo')
const buf2 = Buffer.from('libudev something bar')
const buf3 = Buffer.from('asdf')
this.spawnedProcess.stderr.on
.withArgs('data')
.onFirstCall()
.yields(buf1)
.onSecondCall()
.yields(buf2)
.onThirdCall()
.yields(buf3)
this.spawnedProcess.on.withArgs('close').yieldsAsync(0)
sinon.stub(process.stderr, 'write').withArgs(buf3)
os.platform.returns('linux')
xvfb.isNeeded.returns(true)
return spawn.start()
.then(() => {
expect(process.stderr.write).not.to.be.calledWith(buf1)
expect(process.stderr.write).not.to.be.calledWith(buf2)
})
})
it('does not write to process.stderr when from high sierra warnings', function () {
const buf1 = Buffer.from('2018-05-19 15:30:30.287 Cypress[7850:32145] *** WARNING: Textured Window')
const buf2 = Buffer.from('asdf')
this.spawnedProcess.stderr.on
.withArgs('data')
.onFirstCall()
.yields(buf1)
.onSecondCall(buf2)
.yields(buf2)
this.spawnedProcess.on.withArgs('close').yieldsAsync(0)
sinon.stub(process.stderr, 'write').withArgs(buf2)
os.platform.returns('darwin')
return spawn.start()
.then(() => {
expect(process.stderr.write).not.to.be.calledWith(buf1)
})
})
// https://github.com/cypress-io/cypress/issues/1841
// https://github.com/cypress-io/cypress/issues/5241
;['EPIPE', 'ENOTCONN'].forEach((errCode) => {

View File

@@ -114,6 +114,7 @@ export function startDevServer ({
// for instance @nguniversal/builders:ssr-dev-server.
// see https://github.com/nrwl/nx/blob/f930117ed6ab13dccc40725c7e9551be081cc83d/packages/cypress/src/executors/cypress/cypress.impl.ts
if (builderName !== '@nguniversal/builders:ssr-dev-server') {
// eslint-disable-next-line no-console
console.info(`Passing watch mode to DevServer - watch mode is ${watch}`)
overrides = {
watch,

View File

@@ -69,6 +69,7 @@ function generateCTSpec ({ tree, appPath, component }: { tree: Tree, appPath: st
const componentFilename = component['name'].split('.')[0]
const componentName = componentMatch ? componentMatch[0] : componentFilename
// eslint-disable-next-line no-console
console.log(`Creating new component spec for: ${componentName}\n`)
return tree.create(`${appPath}/${componentFilename}.component.cy.ts`, ctSpecContent({ componentName, componentFilename }))

View File

@@ -21,6 +21,7 @@
"@babel/parser": "7.28.0",
"@graphql-tools/batch-execute": "^8.4.6",
"@graphql-tools/delegate": "8.2.1",
"@packages/stderr-filtering": "0.0.0-development",
"@packages/telemetry": "0.0.0-development",
"@urql/core": "2.4.4",
"@urql/exchange-execute": "1.1.0",

View File

@@ -38,6 +38,7 @@ import type { IncomingHttpHeaders } from 'http'
import type { App as ElectronApp } from 'electron'
import { globalPubSub } from '.'
import { ProjectLifecycleManager } from './data/ProjectLifecycleManager'
import { logError } from '@packages/stderr-filtering'
import type { CypressError } from '@packages/errors'
import { resetIssuedWarnings } from '@packages/config'
@@ -290,9 +291,7 @@ export class DataContext {
}
logTraceError (e: unknown) {
// TODO(tim): handle this consistently
// eslint-disable-next-line no-console
console.error(e)
logError(e)
}
onError = (cypressError: CypressError, title: string = 'Unexpected Error') => {

View File

@@ -14,6 +14,7 @@ import { getError } from '@packages/errors'
import { resetIssuedWarnings } from '@packages/config'
import type { RunSpecErrorCode } from '@packages/graphql/src/schemaTypes'
import debugLib from 'debug'
import { logError } from '@packages/stderr-filtering'
export class RunSpecError extends Error {
constructor (public code: typeof RunSpecErrorCode[number], msg: string) {
@@ -209,9 +210,7 @@ export class ProjectActions {
}
})
} catch (e) {
// TODO(tim): remove / replace with ctx.log.error
// eslint-disable-next-line
console.error(e)
logError(e)
throw e
}
}

View File

@@ -19,6 +19,7 @@ const debug = debugLib(`cypress:lifecycle:ProjectConfigIpc`)
const debugVerbose = debugLib(`cypress-verbose:lifecycle:ProjectConfigIpc`)
const CHILD_PROCESS_FILE_PATH = require.resolve('@packages/server/lib/plugins/child/require_async_child')
import { TagStream } from '@packages/stderr-filtering'
// NOTE: need the file:// prefix to avoid https://nodejs.org/api/errors.html#err_unsupported_esm_url_scheme on windows
const tsx = os.platform() === 'win32' ? `file://${toPosix(require.resolve('tsx'))}` : toPosix(require.resolve('tsx'))
@@ -150,8 +151,8 @@ export class ProjectConfigIpc extends EventEmitter {
if (this._childProcess.stdout && this._childProcess.stderr) {
// manually pipe plugin stdout and stderr for Cypress Cloud capture
// @see https://github.com/cypress-io/cypress/issues/7434
this._childProcess.stdout.on('data', (data) => process.stdout.write(data))
this._childProcess.stderr.on('data', (data) => process.stderr.write(data))
this._childProcess.stdout.pipe(process.stdout)
this._childProcess.stderr.pipe(new TagStream()).pipe(process.stderr)
}
let resolved = false
@@ -260,7 +261,7 @@ export class ProjectConfigIpc extends EventEmitter {
})
}
private forkConfigProcess () {
private forkConfigProcess (): ChildProcess {
const configProcessArgs = ['--projectRoot', this.projectRoot, '--file', this.configFilePath]
// we do NOT want telemetry enabled within our cy-in-cy tests as it isn't configured to handled it
const env = _.omit(process.env, 'CYPRESS_INTERNAL_E2E_TESTING_SELF', 'CYPRESS_INTERNAL_ENABLE_TELEMETRY')

View File

@@ -1,7 +1,7 @@
const cp = require('child_process')
const os = require('os')
const path = require('path')
const debug = require('debug')('cypress:electron')
const debugElectron = require('debug')('cypress:electron')
const Promise = require('bluebird')
const minimist = require('minimist')
const inspector = require('inspector')
@@ -10,8 +10,12 @@ const paths = require('./paths')
const install = require('./install')
let fs = require('fs-extra')
const debugStderr = require('debug')('cypress:internal-stderr')
fs = Promise.promisifyAll(fs)
const { filter, DEBUG_PREFIX } = require('@packages/stderr-filtering')
/**
* If running as root on Linux, no-sandbox must be passed or Chrome will not start
*/
@@ -26,7 +30,7 @@ module.exports = {
},
install (...args) {
debug('installing %o', { args })
debugElectron('installing %o', { args })
return install.package.apply(install, args)
},
@@ -39,7 +43,7 @@ module.exports = {
* Returns the Node version bundled inside Electron.
*/
getElectronNodeVersion () {
debug('getting Electron Node version')
debugElectron('getting Electron Node version')
const args = []
@@ -50,7 +54,7 @@ module.exports = {
// runs locally installed "electron" bin alias
const localScript = path.join(__dirname, 'print-node-version.js')
debug('local script that prints Node version %s', localScript)
debugElectron('local script that prints Node version %s', localScript)
args.push(localScript)
@@ -59,7 +63,7 @@ module.exports = {
timeout: 10000, // prevents hanging Electron if there is an error for some reason
}
debug('Running Electron with %o %o', args, options)
debugElectron('Running Electron with %o %o', args, options)
return execa('electron', args, options)
.then((result) => result.stdout)
@@ -72,7 +76,7 @@ module.exports = {
cli (argv = []) {
const opts = minimist(argv)
debug('cli options %j', opts)
debugElectron('cli options %j', opts)
const pathToApp = argv[0]
@@ -88,26 +92,26 @@ module.exports = {
},
open (appPath, argv, cb) {
debug('opening %s', appPath)
debugElectron('opening %s', appPath)
appPath = path.resolve(appPath)
const dest = paths.getPathToResources('app')
debug('appPath %s', appPath)
debugElectron('appPath %s', appPath)
debug('dest path %s', dest)
debugElectron('dest path %s', dest)
// make sure this path exists!
return fs.accessAsync(appPath)
.then(() => {
debug('appPath exists %s', appPath)
debugElectron('appPath exists %s', appPath)
// clear out the existing symlink
return fs.removeAsync(dest)
}).then(() => {
const symlinkType = paths.getSymlinkType()
debug('making symlink from %s to %s of type %s', appPath, dest, symlinkType)
debugElectron('making symlink from %s to %s of type %s', appPath, dest, symlinkType)
return fs.ensureSymlinkAsync(appPath, dest, symlinkType)
}).then(() => {
@@ -135,33 +139,50 @@ module.exports = {
}
}
debug('spawning %s with args', execPath, argv)
debugElectron('spawning %s with args', execPath, argv)
if (debug.enabled) {
if (debugElectron.enabled) {
// enable the internal chromium logger
argv.push('--enable-logging')
}
const spawned = cp.spawn(execPath, argv, { stdio: 'inherit' })
const spawned = cp.spawn(execPath, argv, { stdio: 'pipe' })
.on('error', (err) => {
// If electron is throwing an error event, we need to ensure it's
// printed to console.
// eslint-disable-next-line no-console
console.error(err)
return process.exit(1)
})
.on('close', (code, signal) => {
debug('electron closing %o', { code, signal })
debugElectron('electron closing %o', { code, signal })
if (signal) {
debug('electron exited with a signal, forcing code = 1 %o', { signal })
debugElectron('electron exited with a signal, forcing code = 1 %o', { signal })
code = 1
}
if (cb) {
debug('calling callback with code', code)
debugElectron('calling callback with code', code)
return cb(code)
}
debug('process.exit with code', code)
debugElectron('process.exit with code', code)
return process.exit(code)
})
if ([1, '1'].includes(process.env.ELECTRON_ENABLE_LOGGING)) {
spawned.stderr.pipe(process.stderr)
} else {
spawned.stderr.pipe(filter(process.stderr, debugStderr, DEBUG_PREFIX))
}
spawned.stdout.pipe(process.stdout)
process.stdin.pipe(spawned.stdin)
return spawned
}).catch((err) => {
// eslint-disable-next-line no-console

View File

@@ -4,6 +4,7 @@
"private": true,
"main": "index.js",
"scripts": {
"build": "echo 'electron package build: no build necessary'",
"build-binary": "node ./bin/cypress-electron --install",
"clean-deps": "rimraf node_modules",
"postinstall": "echo '@packages/electron needs: yarn build'",
@@ -16,6 +17,7 @@
},
"dependencies": {
"@packages/icons": "0.0.0-development",
"@packages/stderr-filtering": "0.0.0-development",
"bluebird": "3.5.3",
"debug": "^4.3.4",
"fs-extra": "9.1.0",

View File

@@ -20,6 +20,7 @@
"dependencies": {
"@graphql-tools/delegate": "8.2.1",
"@graphql-tools/wrap": "8.1.1",
"@packages/stderr-filtering": "0.0.0-development",
"@urql/core": "2.4.4",
"chalk": "4.1.2",
"cors": "2.8.5",

View File

@@ -1,6 +1,7 @@
import { plugin } from 'nexus'
import { isPromiseLike, pathToArray } from 'nexus/dist/utils'
import chalk from 'chalk'
import { logError } from '@packages/stderr-filtering'
const HANGING_RESOLVER_THRESHOLD = 100
@@ -29,8 +30,7 @@ export const nexusSlowGuardPlugin = plugin({
if (process.env.CYPRESS_INTERNAL_ENV !== 'production') {
const totalMS = (process.hrtime.bigint() - start) / BigInt(1000000)
// eslint-disable-next-line no-console
console.error(chalk.red(`\n\nNexusSlowGuard: Taking more than ${threshold}ms to execute ${JSON.stringify(resolvePath)} for ${operationId} (total time ${totalMS}ms)\n\n`))
logError(chalk.red(`\n\nNexusSlowGuard: Taking more than ${threshold}ms to execute ${JSON.stringify(resolvePath)} for ${operationId} (total time ${totalMS}ms)\n\n`))
}
}, threshold)

View File

@@ -2,6 +2,7 @@ import type { StudioCloudApi } from '@packages/types/src/studio/studio-server-ty
import Debug from 'debug'
import { stripPath } from '../../strip_path'
const debug = Debug('cypress:server:cloud:api:studio:report_studio_errors')
import { logError } from '@packages/stderr-filtering'
export interface ReportStudioErrorOptions {
cloudApi: StudioCloudApi
@@ -46,8 +47,7 @@ export function reportStudioError ({
process.env.NODE_ENV === 'development' ||
process.env.CYPRESS_INTERNAL_E2E_TESTING_SELF
) {
// eslint-disable-next-line no-console
console.error(`Error in ${studioMethod}:`, error)
logError(`Error in ${studioMethod}:`, error)
return
}

View File

@@ -23,6 +23,7 @@ import { telemetryManager } from './telemetry/TelemetryManager'
import { BUNDLE_LIFECYCLE_MARK_NAMES, BUNDLE_LIFECYCLE_TELEMETRY_GROUP_NAMES } from './telemetry/constants/bundle-lifecycle'
import { INITIALIZATION_TELEMETRY_GROUP_NAMES } from './telemetry/constants/initialization'
import crypto from 'crypto'
import { logError } from '@packages/stderr-filtering'
const debug = Debug('cypress:server:studio-lifecycle-manager')
const routes = require('../routes')
@@ -357,8 +358,7 @@ export class StudioLifecycleManager {
return studioManager
}).catch((error) => {
// eslint-disable-next-line no-console
console.error('Error during reload of studio manager: %o', error)
logError('Error during reload of studio manager: %o', error)
return null
})

View File

@@ -8,6 +8,7 @@ import FileUtil from './util/file'
import { fs } from './util/fs'
import { AllowedState, allowedKeys } from '@packages/types'
import { globalPubSub } from '@packages/data-context'
import { logError } from '@packages/stderr-filtering'
const debug = Debug('cypress:server:saved_state')
@@ -81,8 +82,7 @@ const normalizeAndAllowSet = (set, key, value) => {
})
if (invalidKeys.length) {
// eslint-disable-next-line no-console
console.error(`WARNING: attempted to save state for non-allowed key(s): ${invalidKeys.join(', ')}. All keys must be allowed in server/lib/saved_state.ts`)
logError(`WARNING: attempted to save state for non-allowed key(s): ${invalidKeys.join(', ')}. All keys must be allowed in server/lib/saved_state.ts`)
}
return set(_.pick(valueObject, allowedKeys))

View File

@@ -34,6 +34,7 @@
"@cypress/webpack-preprocessor": "0.0.0-development",
"@ffmpeg-installer/ffmpeg": "1.1.0",
"@packages/icons": "0.0.0-development",
"@packages/stderr-filtering": "0.0.0-development",
"@packages/telemetry": "0.0.0-development",
"@types/mime": "^3.0.1",
"ansi_up": "5.0.0",

View File

@@ -1,6 +1,7 @@
import { expect } from 'chai'
import { sinon } from '../../../../spec_helper'
import { reportStudioError } from '@packages/server/lib/cloud/api/studio/report_studio_error'
import { START_TAG, END_TAG } from '@packages/stderr-filtering'
describe('lib/cloud/api/studio/report_studio_error', () => {
let cloudRequestStub: sinon.SinonStub
@@ -47,8 +48,10 @@ describe('lib/cloud/api/studio/report_studio_error', () => {
// eslint-disable-next-line no-console
expect(console.error).to.have.been.calledWith(
START_TAG,
'Error in testMethod:',
error,
END_TAG,
)
})
@@ -67,8 +70,10 @@ describe('lib/cloud/api/studio/report_studio_error', () => {
// eslint-disable-next-line no-console
expect(console.error).to.have.been.calledWith(
START_TAG,
'Error in testMethod:',
error,
END_TAG,
)
})
@@ -87,8 +92,10 @@ describe('lib/cloud/api/studio/report_studio_error', () => {
// eslint-disable-next-line no-console
expect(console.error).to.have.been.calledWith(
START_TAG,
'Error in testMethod:',
error,
END_TAG,
)
})

View File

@@ -5,7 +5,7 @@ const Promise = require('bluebird')
const { fs } = require(`../../lib/util/fs`)
const FileUtil = require(`../../lib/util/file`)
const appData = require(`../../lib/util/app_data`)
const { START_TAG, END_TAG } = require(`@packages/stderr-filtering`)
const savedState = require(`../../lib/saved_state`)
describe('lib/saved_state', () => {
@@ -90,7 +90,7 @@ describe('lib/saved_state', () => {
return state.set({ foo: 'bar', baz: 'qux' })
}).then(() => {
// eslint-disable-next-line no-console
expect(console.error).to.be.calledWith('WARNING: attempted to save state for non-allowed key(s): foo, baz. All keys must be allowed in server/lib/saved_state.ts')
expect(console.error).to.be.calledWith(START_TAG, 'WARNING: attempted to save state for non-allowed key(s): foo, baz. All keys must be allowed in server/lib/saved_state.ts', END_TAG)
})
})
})

View File

@@ -0,0 +1,145 @@
# Stderr Filtering
A Node.js package for standardizing error logging with tags to enable filtering of third-party stderr output to debug streams.
## Overview
This package provides a standardized approach to error logging that allows third-party stderr output to be filtered and redirected to debug output. The primary mechanism is the `logError` function, which wraps error messages with special tags that can be detected and filtered by the stream processing utilities.
## Primary Use Case
The main purpose of this package is to align all node-executed packages in the project with tagged error logging, enabling third-party stderr to be shunted to debug output. This is achieved through:
1. **Standardized error logging** using `logError()` with consistent tags
2. **Stream filtering** at execution boundaries to enforce the filtering behavior
## API Reference
### logError
The primary utility for logging error messages with special tags for stderr filtering.
```typescript
import { logError, START_TAG, END_TAG } from '@packages/stderr-filtering'
// Log an error with filtering tags
logError('Something went wrong')
// Use tags directly if needed
console.error(START_TAG, 'Error message', END_TAG)
```
**Exported Constants:**
- `START_TAG` - Tag that marks the beginning of filterable error content
- `END_TAG` - Tag that marks the end of filterable error content
### FilterTaggedContent
Filters content based on start and end tags, supporting multi-line tagged content. Used at execution boundaries to enforce filtering.
```typescript
import { FilterTaggedContent } from '@packages/stderr-filtering'
const taggedEntries = createWriteStream('taggedEntries.log')
const filter = new FilterTaggedContent('<TAG>', '</TAG>', taggedEntries)
inputStream.pipe(filter).pipe(outputStream)
```
**Constructor Parameters:**
- `startTag: string` - String that marks the beginning of content to filter
- `endTag: string` - String that marks the end of content to filter
- `filtered: Writable` - Stream for filtered content
### FilterPrefixedContent
Filters content based on a prefix pattern, routing matching lines to a filtered stream. Used for additional filtering at execution boundaries.
```typescript
import { FilterPrefixedContent } from '@packages/stderr-filtering'
const errorStream = new Writable()
const filter = new FilterPrefixedContent(/^ERROR:/, errorStream)
inputStream.pipe(filter).pipe(outputStream)
```
**Constructor Parameters:**
- `prefix: RegExp` - Regular expression pattern to test against the beginning of each line
- `filtered: Writable` - Stream for lines that match the prefix pattern
### WriteToDebug
A writable stream that routes incoming data to a debug logger with proper line handling. Used for debug output at execution boundaries.
```typescript
import { WriteToDebug } from '@packages/stderr-filtering'
import debug from 'debug'
const debugLogger = debug('myapp:stream')
const debugStream = new WriteToDebug(debugLogger)
someStream.pipe(debugStream)
```
**Constructor Parameters:**
- `debug: Debugger` - Debug logger instance to write output to
## Usage Examples
### Standard Error Logging
```typescript
import { logError } from '@packages/stderr-filtering'
// Use logError for all error logging to enable filtering
try {
// Some operation that might fail
} catch (error) {
logError('Operation failed:', error.message)
}
```
### Execution Boundary Filtering
```typescript
import { FilterTaggedContent, WriteToDebug } from '@packages/stderr-filtering'
import debug from 'debug'
const debugLogger = debug('app:stderr')
const debugStream = new WriteToDebug(debugLogger)
// Filter tagged errors to debug output
const filter = new FilterTaggedContent(
'<<<CYPRESS.STDERR.START>>>',
'<<<CYPRESS.STDERR.END>>>',
debugStream
)
// Apply at execution boundary
process.stderr.pipe(filter).pipe(process.stdout)
```
### Basic Error Filtering
```typescript
import { FilterPrefixedContent } from '@packages/stderr-filtering'
import { createWriteStream } from 'fs'
const errorLog = createWriteStream('errors.log')
const filter = new FilterPrefixedContent(/^ERROR:/, errorLog)
process.stderr.pipe(filter).pipe(process.stdout)
```
## Error Handling
The package provides robust error handling throughout the stream processing chain:
- Errors in processing are properly propagated up the stream
- Async operations use proper promise rejection handling
- Stream lifecycle events are handled correctly
## License
This package is part of the Cypress project and is licensed under the MIT License.

View File

@@ -0,0 +1,13 @@
import { baseConfig } from '@packages/eslint-config'
export default [
...baseConfig,
{
files: ['**/*.{ts,js,jsx,tsx,vue}'],
languageOptions: {
parserOptions: {
tsconfigRootDir: __dirname,
},
},
},
]

View File

@@ -0,0 +1,22 @@
import { type Writable } from 'stream'
import type { Debugger } from 'debug'
import { START_TAG, END_TAG } from './constants'
import { FilterPrefixedContent } from './FilterPrefixedContent'
import { FilterTaggedContent } from './FilterTaggedContent'
import { WriteToDebug } from './WriteToDebug'
const DISABLE_TAGS = process.env.ELECTRON_ENABLE_LOGGING === '1'
export function filter (stderr: Writable, debug: Debugger, prefix: RegExp, disableTags: boolean = false): Writable {
const prefixTx = new FilterPrefixedContent(prefix, stderr)
const tagTx = new FilterTaggedContent(START_TAG, END_TAG, stderr)
const debugWriter = new WriteToDebug(debug)
if (DISABLE_TAGS || disableTags) {
prefixTx.pipe(debugWriter)
} else {
prefixTx.pipe(tagTx).pipe(debugWriter)
}
return prefixTx
}

View File

@@ -0,0 +1,118 @@
import { Transform, Writable } from 'stream'
import { StringDecoder } from 'string_decoder'
import { LineDecoder } from './LineDecoder'
import Debug from 'debug'
const debugStderr = Debug('cypress:stderr')
import { writeWithBackpressure } from './writeWithBackpressure'
/**
* Filters content based on a prefix pattern, routing matching lines to a filtered stream.
*
* This transform stream processes incoming data line by line and routes content between two
* output streams based on a regular expression prefix test. Lines that match the prefix pattern
* are sent to the filtered stream, while non-matching lines are sent to the main output stream.
*
* Example usage:
* ```typescript
* const errorStream = new Writable()
* const filter = new FilterPrefixedContent(/^ERROR:/, errorStream)
* inputStream.pipe(filter).pipe(outputStream)
* ```
*/
export class FilterPrefixedContent extends Transform {
private strDecoder?: StringDecoder
private lineDecoder?: LineDecoder
/**
* Creates a new FilterPrefixedContent instance.
*
* @param prefix The regular expression pattern to test against the beginning of each line
* @param filtered The writable stream for lines that match the prefix pattern
*/
constructor (private prefix: RegExp, private wasteStream: Writable) {
super(({
transform: (chunk, encoding, next) => this.transform(chunk, encoding, next),
flush: (callback) => this.flush(callback),
}))
}
/**
* Processes incoming chunks and routes lines based on prefix matching.
*
* @param chunk The buffer chunk to process
* @param encoding The encoding of the chunk
* @param next Callback to call when processing is complete
*/
transform = async (chunk: Buffer, encoding: BufferEncoding, next: (err?: Error) => void) => {
try {
if (!this.strDecoder) {
// @ts-expect-error type here is not correct, 'buffer' is not a valid encoding but it does get passed in
this.strDecoder = new StringDecoder(encoding === 'buffer' ? 'utf8' : encoding)
}
if (!this.lineDecoder) {
this.lineDecoder = new LineDecoder()
}
const str = this.strDecoder.write(chunk)
this.lineDecoder.write(str)
for (const line of Array.from(this.lineDecoder || [])) {
await this.writeLine(line, encoding)
}
next()
} catch (err) {
debugStderr('error in transform', err)
next(err as Error)
}
}
/**
* Flushes any remaining buffered content when the stream ends.
*
* @param callback Callback to call when flushing is complete
*/
flush = async (callback: (err?: Error) => void) => {
try {
if (!this.strDecoder) {
this.strDecoder = new StringDecoder()
}
if (!this.lineDecoder) {
this.lineDecoder = new LineDecoder()
}
if (this.lineDecoder) {
for (const line of Array.from(this.lineDecoder?.end() || [])) {
await this.writeLine(line)
}
}
callback()
} catch (err) {
callback(err as Error)
}
}
/**
* Routes a single line to the appropriate stream based on prefix matching.
*
* Tests the line against the prefix regular expression and routes it to either
* the filtered stream (if it matches) or the main output stream (if it doesn't match).
*
* @param line The line to test and route
*/
private async writeLine (line: string, encoding?: BufferEncoding | 'buffer'): Promise<void> {
if (this.prefix.test(line)) {
await writeWithBackpressure(this.wasteStream, Buffer.from(line, (encoding === 'buffer' ? 'utf8' : encoding) ?? 'utf8'))
} else {
const canWrite = this.push(Buffer.from(line, (encoding === 'buffer' ? 'utf8' : encoding) ?? 'utf8'))
if (!canWrite) {
await new Promise((resolve) => this.once('drain', resolve))
}
}
}
}

View File

@@ -0,0 +1,157 @@
import { Transform, Writable } from 'stream'
import { StringDecoder } from 'string_decoder'
import { LineDecoder } from './LineDecoder'
import Debug from 'debug'
import { writeWithBackpressure } from './writeWithBackpressure'
const debug = Debug('cypress:stderr-filtering:FilterTaggedContent')
/**
* Filters content based on start and end tags, supporting multi-line tagged content.
*
* This transform stream processes incoming data and routes content between two output streams
* based on tag detection. Content between start and end tags is sent to the filtered stream,
* while content outside tags is sent to the main output stream. The class handles cases where
* tags span multiple lines by maintaining state across line boundaries.
*
* Example usage:
* ```typescript
* const filter = new FilterTaggedContent('<secret>', '</secret>', filteredStream)
* inputStream.pipe(filter).pipe(outputStream)
* ```
*/
export class FilterTaggedContent extends Transform {
private strDecoder?: StringDecoder
private lineDecoder?: LineDecoder
private inTaggedContent: boolean = false
/**
* Creates a new FilterTaggedContent instance.
*
* @param startTag The string that marks the beginning of content to filter
* @param endTag The string that marks the end of content to filter
* @param filtered The writable stream for filtered content
*/
constructor (private startTag: string, private endTag: string, private wasteStream: Writable) {
super({
transform: (chunk, encoding, next) => this.transform(chunk, encoding, next),
flush: (callback) => this.flush(callback),
})
}
/**
* Processes incoming chunks and routes content based on tag detection.
*
* @param chunk The buffer chunk to process
* @param encoding The encoding of the chunk
* @param next Callback to call when processing is complete
*/
transform = async (chunk: Buffer, encoding: BufferEncoding, next: (err?: Error) => void) => {
try {
this.ensureDecoders(encoding)
const str = this.strDecoder?.write(chunk) ?? ''
this.lineDecoder?.write(str)
debug('processing str for tags: "%s"', str)
for (const line of Array.from(this.lineDecoder || [])) {
await this.processLine(line)
}
next()
} catch (err) {
next(err as Error)
}
}
/**
* Flushes any remaining buffered content when the stream ends.
*
* @param callback Callback to call when flushing is complete
*/
flush = async (callback: (err?: Error) => void) => {
debug('flushing')
this.ensureDecoders()
try {
for (const line of Array.from(this.lineDecoder?.end() || [])) {
await this.processLine(line)
}
callback()
} catch (err) {
callback(err as Error)
}
}
private ensureDecoders (encoding?: BufferEncoding | 'buffer') {
const enc = (encoding === 'buffer' ? 'utf8' : encoding) ?? 'utf8'
if (!this.lineDecoder) {
this.lineDecoder = new LineDecoder()
}
if (!this.strDecoder) {
this.strDecoder = new StringDecoder(enc)
}
}
/**
* Processes a single line and routes content based on tag positions.
*
* This method handles the complex logic of detecting start and end tags within a line,
* maintaining state across lines, and routing content to the appropriate streams.
* It supports cases where both tags appear on the same line, only one tag appears,
* or no tags appear but the line is part of ongoing tagged content.
*
* @param line The line to process
*/
private async processLine (line: string): Promise<void> {
const startPos = line.indexOf(this.startTag)
const endPos = line.lastIndexOf(this.endTag)
if (startPos >= 0 && endPos >= 0) {
// Both tags on same line
if (startPos > 0) {
await this.pass(line.slice(0, startPos))
}
await this.writeToWasteStream(line.slice(startPos + this.startTag.length, endPos))
if (endPos + this.endTag.length < line.length) {
await this.pass(line.slice(endPos + this.endTag.length))
}
} else if (startPos >= 0) {
// Start tag found
if (startPos > 0) {
await this.pass(line.slice(0, startPos))
}
await this.writeToWasteStream(line.slice(startPos + this.startTag.length))
this.inTaggedContent = true
} else if (endPos >= 0) {
// End tag found
await this.writeToWasteStream(line.slice(0, endPos))
if (endPos + this.endTag.length < line.length) {
await this.pass(line.slice(endPos + this.endTag.length))
}
this.inTaggedContent = false
} else if (this.inTaggedContent) {
// Currently in tagged content
await this.writeToWasteStream(line)
} else {
// Not in tagged content
await this.pass(line)
}
}
private async writeToWasteStream (line: string, encoding?: BufferEncoding | 'buffer') {
debug('writing to waste stream: "%s"', line)
await writeWithBackpressure(this.wasteStream, Buffer.from(line, (encoding === 'buffer' ? 'utf8' : encoding) ?? 'utf8'))
}
private async pass (line: string, encoding?: BufferEncoding | 'buffer') {
debug('passing: "%s"', line)
this.push(Buffer.from(line, (encoding === 'buffer' ? 'utf8' : encoding) ?? 'utf8'))
}
}

View File

@@ -0,0 +1,101 @@
/**
* Decodes incoming string chunks into complete lines, handling partial lines across chunk boundaries.
*
* This class buffers incoming string data and provides an iterator interface to yield complete
* lines. It handles the case where a line might be split across multiple chunks by maintaining
* an internal buffer. The end() method should be called to flush any remaining buffered content
* when processing is complete.
*/
import Debug from 'debug'
import { END_TAG } from './constants'
const debug = Debug(`cypress:stderr-filtering:LineDecoder:${process.pid}`)
export class LineDecoder {
private buffer: string = ''
constructor (private overrideToken: string = END_TAG) {}
/**
* Adds a chunk of string data to the internal buffer.
*
* @param chunk The string chunk to add to the buffer
*/
public write (chunk: string) {
debug('writing chunk to line decoder', { chunk })
this.buffer += chunk
}
/**
* Iterates over complete lines in the current buffer.
*
* This generator yields complete lines from the buffer, splitting on newline characters.
* Any incomplete line at the end of the buffer is kept for the next iteration.
* Handles both Windows (\r\n) and Unix (\n) line endings.
*
* @yields Complete lines with newline characters preserved
*/
* [Symbol.iterator] (): Generator<string> {
debug('iterating over lines in line decoder')
let nextLine: string | undefined = undefined
do {
nextLine = this.nextLine()
if (nextLine) {
debug('yielding line:', nextLine)
debug('buffer size:', this.buffer.length)
yield nextLine
}
} while (nextLine)
}
/**
* Flushes the remaining buffer content and yields all remaining lines.
*
* This method should be called when processing is complete to ensure all buffered
* content is yielded. It processes any remaining content in the buffer plus an
* optional final chunk. Handles both Windows (\r\n) and Unix (\n) line endings.
*
* @param chunk Optional final chunk to process along with the buffer
* @yields All remaining lines from the buffer and final chunk
*/
* end (chunk?: string) {
this.buffer = `${this.buffer}${(chunk || '')}`
let nextLine: string | undefined = undefined
do {
nextLine = this.nextLine()
if (nextLine) {
yield nextLine
}
} while (nextLine)
}
private nextLine () {
const [newlineIndex, length] = [this.buffer.indexOf('\n'), 1]
const endsWithOverrideToken = newlineIndex < 0 ? this.buffer.endsWith(this.overrideToken) : false
if (endsWithOverrideToken) {
debug('ends with override token')
const line = this.buffer
this.buffer = ''
return line
}
if (newlineIndex >= 0) {
debug('contains a newline')
const line = this.buffer.slice(0, newlineIndex + length)
this.buffer = this.buffer.slice(newlineIndex + length)
return line
}
return undefined
}
}

View File

@@ -0,0 +1,100 @@
import { Transform } from 'stream'
import { START_TAG, END_TAG } from './constants'
import { StringDecoder } from 'string_decoder'
import Debug from 'debug'
const debug = Debug('cypress:stderr-filtering:TagStream')
/**
* A Transform stream that wraps input data with start and end tags.
*
* This stream processes incoming chunks and wraps them with configurable
* start and end tags before passing them downstream. It handles both
* Buffer and string inputs, using a StringDecoder for proper encoding
* when processing Buffer chunks.
*
* By default, the start and end tags are the constants exported by this package:
* - START_TAG
* - END_TAG
*
* @example
* ```typescript
* const tagStream = new TagStream('[START]', '[END]');
* tagStream.pipe(process.stdout);
* tagStream.write('Hello World'); // Outputs: [START]Hello World[END]
* ```
*/
export class TagStream extends Transform {
decoder?: StringDecoder
private get initializedDecoder () {
debug('initializedDecoder', !!this.decoder)
if (!this.decoder) {
this.decoder = new StringDecoder()
}
return this.decoder
}
/**
* Creates a new TagStream instance.
*
* @param startTag - The tag to prepend to each chunk. Defaults to START_TAG.
* @param endTag - The tag to append to each chunk. Defaults to END_TAG.
*/
constructor (private startTag: string = START_TAG, private endTag: string = END_TAG) {
super({
transform: (...args) => this.transform(...args),
})
}
/**
* Transforms incoming chunks by wrapping them with start and end tags.
*
* Processes the input chunk, handles both Buffer and string inputs,
* and wraps the result with the configured start and end tags.
* Implements backpressure handling by waiting for the 'drain' event
* when the downstream stream cannot accept more data.
*
* @param chunk - The input chunk to transform. Can be Buffer, string, or any other type.
* @param encoding - The encoding of the chunk (used by Transform stream).
* @param callback - Callback function to signal completion of transformation.
* @returns Promise that resolves when transformation is complete.
*/
async transform (chunk: Buffer | string | any, encoding: string, callback: (error?: Error, data?: Buffer) => void) {
try {
const out = chunk instanceof Buffer ?
this.initializedDecoder.write(chunk) :
chunk
const transformed = `${this.startTag}${out}${this.endTag}`
debug(`transformed: "${transformed.replaceAll('\n', '\\n')}"`)
const canWrite = this.push(out ? Buffer.from(transformed) : '')
if (!canWrite) {
debug('waiting for drain')
await new Promise((resolve) => this.once('drain', resolve))
}
callback()
} catch (err) {
debug('error', err)
callback(err as Error)
}
}
/**
* Flushes any remaining buffered data and wraps it with tags.
*
* Called when the stream is ending to process any remaining
* data in the StringDecoder buffer.
*
* @param callback - Callback function to signal completion of flush operation.
*/
flush (callback: (error?: Error, data?: Buffer) => void) {
debug('flushing')
const out = this.initializedDecoder.end()
callback(undefined, Buffer.from(`${this.startTag}${out}${this.endTag}`))
}
}

View File

@@ -0,0 +1,90 @@
import { Writable } from 'stream'
import type { Debugger } from 'debug'
import { StringDecoder } from 'node:string_decoder'
import { LineDecoder } from './LineDecoder'
/**
* A writable stream that routes incoming data to a debug logger.
*
* This class extends Writable to provide a stream interface that processes incoming
* data and forwards it to a debug logger. It handles line-by-line processing and
* automatically manages string decoding and line buffering. The stream is useful
* for debugging purposes where you want to log stream data with proper line handling.
*
* Example usage:
* ```typescript
* const debug = require('debug')('myapp:stream')
* const debugStream = new WriteToDebug(debug)
* someStream.pipe(debugStream)
* ```
*/
export class WriteToDebug extends Writable {
private strDecoder?: StringDecoder
private lineDecoder?: LineDecoder
/**
* Creates a new WriteToDebug instance.
*
* @param debug The debug logger instance to write output to
*/
constructor (private debug: Debugger) {
super({
write: (chunk, encoding, next) => {
if (!this.strDecoder) {
// @ts-expect-error type here is not correct, 'buffer' is not a valid encoding but it does get passed in
this.strDecoder = new StringDecoder(encoding === 'buffer' ? 'utf8' : encoding)
}
if (!this.lineDecoder) {
this.lineDecoder = new LineDecoder()
}
const str = this.strDecoder.write(chunk)
this.lineDecoder.write(str)
for (const line of this.lineDecoder) {
this.debugLine(line)
}
next()
},
final: (callback) => {
if (!this.strDecoder) {
this.strDecoder = new StringDecoder()
}
if (!this.lineDecoder) {
this.lineDecoder = new LineDecoder()
}
for (const line of this.lineDecoder.end()) {
this.debugLine(line)
}
this.strDecoder = undefined
this.lineDecoder = undefined
callback()
},
})
}
/**
* Processes a single line and sends it to the debug logger.
*
* This method cleans the line by removing trailing newlines while preserving
* intentional whitespace, then sends non-empty lines to the debug logger.
* Empty lines are filtered out to avoid cluttering the debug output.
*
* @param line The line to process and log
*/
private debugLine (line: string) {
// Remove trailing newline but preserve intentional whitespace
const clean = line.endsWith('\n') ? line.slice(0, -1) : line
if (clean) {
this.debug(clean)
}
}
}

View File

@@ -0,0 +1,113 @@
import { vi, describe, it, expect, beforeEach, afterEach } from 'vitest'
import { filter } from '../Filter'
import { FilterPrefixedContent } from '../FilterPrefixedContent'
import { FilterTaggedContent } from '../FilterTaggedContent'
import { WriteToDebug } from '../WriteToDebug'
import { START_TAG, END_TAG, DEBUG_PREFIX } from '../constants'
// Mock all dependencies
vi.mock('../FilterPrefixedContent')
vi.mock('../FilterTaggedContent')
vi.mock('../WriteToDebug')
// Mock process.env
const originalEnv = process.env
describe('Filter', () => {
let mockStderr: any
let mockDebug: any
let mockFilterPrefixedContent: any
let mockFilterTaggedContent: any
let mockWriteToDebug: any
beforeEach(() => {
// Reset environment
process.env = { ...originalEnv }
// Create mock objects
mockStderr = {
write: vi.fn(),
}
mockDebug = vi.fn()
mockPrefix = /^ERROR:/
// Mock FilterPrefixedContent
mockFilterPrefixedContent = {
pipe: vi.fn().mockImplementation((stream) => stream),
}
// Mock FilterTaggedContent
mockFilterTaggedContent = {
pipe: vi.fn().mockImplementation((stream) => stream),
}
// Mock WriteToDebug
mockWriteToDebug = {
pipe: vi.fn().mockImplementation((stream) => stream),
}
// Setup mocks
vi.mocked(FilterPrefixedContent).mockImplementation(() => mockFilterPrefixedContent)
vi.mocked(FilterTaggedContent).mockImplementation(() => mockFilterTaggedContent)
vi.mocked(WriteToDebug).mockImplementation(() => mockWriteToDebug)
})
afterEach(() => {
vi.clearAllMocks()
process.env = originalEnv
})
describe('when disableTags is false', () => {
beforeEach(() => {
process.env.ELECTRON_ENABLE_LOGGING = '0'
})
it('pipes prefixTx -> tagTx -> debugWriter', () => {
const result = filter(mockStderr, mockDebug, DEBUG_PREFIX, false)
// Verify FilterPrefixedContent was created with correct args
expect(FilterPrefixedContent).toHaveBeenCalledWith(DEBUG_PREFIX, mockStderr)
// Verify FilterTaggedContent was created with correct args
expect(FilterTaggedContent).toHaveBeenCalledWith(START_TAG, END_TAG, mockStderr)
// Verify WriteToDebug was created with correct args
expect(WriteToDebug).toHaveBeenCalledWith(mockDebug)
// Verify the pipe chain: prefixTx -> tagTx -> debugWriter
expect(mockFilterPrefixedContent.pipe).toHaveBeenCalledWith(mockFilterTaggedContent)
expect(mockFilterTaggedContent.pipe).toHaveBeenCalledWith(mockWriteToDebug)
// Verify the result is the prefixTx
expect(result).toBe(mockFilterPrefixedContent)
})
})
describe('when disableTags parameter is true', () => {
beforeEach(() => {
process.env.ELECTRON_ENABLE_LOGGING = '0'
})
it('should pipe prefixTx -> debugWriter (skip tagTx)', () => {
const result = filter(mockStderr, mockDebug, DEBUG_PREFIX, true)
// Verify FilterPrefixedContent was created with correct args
expect(FilterPrefixedContent).toHaveBeenCalledWith(DEBUG_PREFIX, mockStderr)
// Verify FilterTaggedContent was created with correct args
expect(FilterTaggedContent).toHaveBeenCalledWith(START_TAG, END_TAG, mockStderr)
// Verify WriteToDebug was created with correct args
expect(WriteToDebug).toHaveBeenCalledWith(mockDebug)
// Verify the pipe chain: prefixTx -> debugWriter (skip tagTx)
expect(mockFilterPrefixedContent.pipe).toHaveBeenCalledWith(mockWriteToDebug)
expect(mockFilterTaggedContent.pipe).not.toHaveBeenCalled()
// Verify the result is the prefixTx
expect(result).toBe(mockFilterPrefixedContent)
})
})
})

View File

@@ -0,0 +1,224 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'
import { FilterPrefixedContent } from '../FilterPrefixedContent'
import { LineDecoder } from '../LineDecoder'
import { StringDecoder } from 'string_decoder'
import { writeWithBackpressure } from '../writeWithBackpressure'
import { Writable } from 'stream'
vi.mock('../LineDecoder', () => {
return {
LineDecoder: vi.fn(),
}
})
vi.mock('string_decoder', () => {
return {
StringDecoder: vi.fn(),
}
})
vi.mock('../writeWithBackpressure', () => {
return {
writeWithBackpressure: vi.fn(),
}
})
describe('FilterPrefixedContent', () => {
// Test constants
const ERROR_PREFIX = /^ERROR:/
const ENCODING_UTF8 = 'utf8'
const ENCODING_BUFFER = 'buffer' as any
// Test data
const TEST_LINES = {
ERROR: 'ERROR: test error\n',
INFO: 'INFO: test info\n',
WARN: 'WARN: warning message\n',
FATAL: 'FATAL: critical error\n',
EMPTY: '\n',
}
const TEST_DATA = {
SINGLE_LINE_TEXT: 'test data\n',
MULTI_LINE_TEXT: 'ERROR: test error\nINFO: test info\n',
PARTIAL_TEXT_1: 'ERROR: Partial',
PARTIAL_TEXT_2: ' error message\n',
COMPLETE_PARTIAL: 'ERROR: Partial error message\n',
}
const TEST_CHUNKS = {
SINGLE_LINE: Buffer.from('test data\n'),
MULTI_LINE: Buffer.from('ERROR: test error\nINFO: test info\n'),
COMPLEX: Buffer.from('ERROR: First error\nINFO: First info\nERROR: Second error\nINFO: Second info\n'),
PARTIAL_1: Buffer.from('ERROR: Partial'),
PARTIAL_2: Buffer.from(' error message\n'),
}
let filter: FilterPrefixedContent
let wasteStream: Writable
let mockLineDecoder: any
let mockStringDecoder: any
beforeEach(() => {
vi.clearAllMocks()
wasteStream = new Writable()
mockLineDecoder = {
write: vi.fn(),
[Symbol.iterator]: vi.fn().mockReturnValue([][Symbol.iterator]()),
end: vi.fn(),
}
mockStringDecoder = {
write: vi.fn().mockImplementation((chunk) => {
return chunk.toString()
}),
}
vi.mocked(LineDecoder).mockImplementation(() => mockLineDecoder)
vi.mocked(StringDecoder).mockImplementation(() => mockStringDecoder)
filter = new FilterPrefixedContent(ERROR_PREFIX, wasteStream)
vi.mocked(writeWithBackpressure).mockResolvedValue()
vi.spyOn(filter, 'push')
})
afterEach(() => {
vi.restoreAllMocks()
})
describe('transform', () => {
it('initializes StringDecoder and LineDecoder on first call', async () => {
const chunk = TEST_CHUNKS.SINGLE_LINE
const next = vi.fn()
await filter.transform(chunk, ENCODING_UTF8, next)
mockStringDecoder.write.mockReturnValue(TEST_DATA.SINGLE_LINE_TEXT)
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_DATA.SINGLE_LINE_TEXT][Symbol.iterator]())
expect(StringDecoder).toHaveBeenCalledWith(ENCODING_UTF8)
expect(LineDecoder).toHaveBeenCalled()
expect(mockStringDecoder.write, 'string decoder write').toHaveBeenCalledWith(chunk)
expect(mockLineDecoder.write, 'line decoder write').toHaveBeenCalledWith(TEST_DATA.SINGLE_LINE_TEXT)
expect(next).toHaveBeenCalledWith()
})
it('handles buffer encoding correctly', async () => {
const chunk = TEST_CHUNKS.SINGLE_LINE
const next = vi.fn()
mockLineDecoder[Symbol.iterator].mockReturnValue([][Symbol.iterator]())
await filter.transform(chunk, ENCODING_BUFFER, next)
expect(StringDecoder).toHaveBeenCalledWith(ENCODING_UTF8)
expect(next).toHaveBeenCalledWith()
})
it('handles errors during processing', async () => {
const chunk = TEST_CHUNKS.SINGLE_LINE
const next = vi.fn()
const error = new Error('Processing error')
mockStringDecoder.write.mockImplementation(() => {
throw error
})
await filter.transform(chunk, ENCODING_UTF8, next)
expect(next).toHaveBeenCalledWith(error)
})
it('reuses existing StringDecoder and LineDecoder instances', async () => {
const chunk1 = Buffer.from(TEST_DATA.SINGLE_LINE_TEXT)
const chunk2 = Buffer.from(TEST_DATA.SINGLE_LINE_TEXT)
const next = vi.fn()
await filter.transform(chunk1, ENCODING_UTF8, next)
await filter.transform(chunk2, ENCODING_UTF8, next)
expect(StringDecoder).toHaveBeenCalledTimes(1)
expect(LineDecoder).toHaveBeenCalledTimes(1)
})
describe('when the prefix is not found', () => {
it('passes the line to the next stream', async () => {
const chunk = Buffer.from(TEST_DATA.SINGLE_LINE_TEXT)
const next = vi.fn()
mockStringDecoder.write.mockReturnValue(TEST_DATA.SINGLE_LINE_TEXT)
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_DATA.SINGLE_LINE_TEXT][Symbol.iterator]())
await filter.transform(chunk, ENCODING_UTF8, next)
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_DATA.SINGLE_LINE_TEXT, ENCODING_UTF8))
expect(next).toHaveBeenCalledTimes(1)
})
})
describe('when the prefix is found', () => {
it('writes the line to the waste stream', async () => {
const chunk = Buffer.from(TEST_LINES.ERROR)
const next = vi.fn()
mockStringDecoder.write.mockReturnValue(TEST_LINES.ERROR)
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_LINES.ERROR][Symbol.iterator]())
await filter.transform(chunk, ENCODING_UTF8, next)
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.ERROR, ENCODING_UTF8))
expect(next).toHaveBeenCalledTimes(1)
})
})
})
describe('flush', () => {
beforeEach(() => {
mockLineDecoder.end.mockReturnValue([][Symbol.iterator]())
})
it('processes remaining lines from LineDecoder.end()', async () => {
const callback = vi.fn()
const remainingLines = [TEST_LINES.ERROR, TEST_LINES.INFO]
mockLineDecoder.end.mockReturnValue(remainingLines[Symbol.iterator]())
await filter.flush(callback)
expect(mockLineDecoder.end).toHaveBeenCalledWith()
expect(callback).toHaveBeenCalledWith()
})
it('handles empty remaining lines', async () => {
const callback = vi.fn()
await filter.flush(callback)
expect(mockLineDecoder.end).toHaveBeenCalledWith()
expect(callback).toHaveBeenCalledWith()
})
it('handles undefined LineDecoder', async () => {
const callback = vi.fn()
const newFilter = new FilterPrefixedContent(ERROR_PREFIX, wasteStream)
await newFilter.flush(callback)
expect(callback).toHaveBeenCalledWith()
})
it('handles errors during flush', async () => {
const callback = vi.fn()
const error = new Error('Flush error')
mockLineDecoder.end.mockImplementation(() => {
throw error
})
await filter.flush(callback)
expect(callback).toHaveBeenCalledWith(error)
})
})
})

View File

@@ -0,0 +1,411 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'
import { FilterTaggedContent } from '../FilterTaggedContent'
import { LineDecoder } from '../LineDecoder'
import { StringDecoder } from 'string_decoder'
import { Writable } from 'stream'
import { writeWithBackpressure } from '../writeWithBackpressure'
vi.mock('node:string_decoder', () => {
return {
StringDecoder: vi.fn(),
}
})
vi.mock('../LineDecoder', () => {
return {
LineDecoder: vi.fn(),
}
})
vi.mock('../writeWithBackpressure', () => {
return {
writeWithBackpressure: vi.fn(),
}
})
describe('FilterTaggedContent', () => {
const ENCODING_UTF8 = 'utf8'
const ENCODING_BUFFER = 'buffer' as any
const START_TAG = '<tag>'
const END_TAG = '</tag>'
const TEST_LINES = {
ONE: 'one',
TWO: `two`,
THREE: 'three',
FOUR: `four`,
FIVE: `five`,
}
let filter: FilterTaggedContent
let wasteStream: Writable
let mockLineDecoder: any
let mockStringDecoder: any
beforeEach(() => {
vi.clearAllMocks()
wasteStream = new Writable()
mockLineDecoder = {
write: vi.fn(),
[Symbol.iterator]: vi.fn(),
end: vi.fn(),
}
mockStringDecoder = {
write: vi.fn(),
}
vi.mocked(LineDecoder).mockImplementation(() => mockLineDecoder)
vi.mocked(StringDecoder).mockImplementation(() => mockStringDecoder)
filter = new FilterTaggedContent(START_TAG, END_TAG, wasteStream)
vi.spyOn(filter, 'push')
vi.mocked(writeWithBackpressure).mockResolvedValue()
})
afterEach(() => {
vi.restoreAllMocks()
})
describe('transform', () => {
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([][Symbol.iterator]())
})
describe('when the string decoder is not initialized', () => {
describe('and the encoding is buffer', () => {
it('initializes the string decoder with utf8', () => {
filter.transform(Buffer.from(''), ENCODING_BUFFER, vi.fn())
expect(StringDecoder).toHaveBeenCalledWith(ENCODING_UTF8)
})
})
describe('and the encoding is utf8', () => {
it('initializes the string decoder', () => {
filter.transform(Buffer.from(''), ENCODING_UTF8, vi.fn())
expect(StringDecoder).toHaveBeenCalledWith(ENCODING_UTF8)
})
})
})
describe('when the line decoder is not initialized', () => {
it('initializes the line decoder', () => {
filter.transform(Buffer.from(''), ENCODING_UTF8, vi.fn())
expect(LineDecoder).toHaveBeenCalled()
})
})
it('writes to the string decoder', () => {
const buf = Buffer.from(TEST_LINES.ONE)
filter.transform(buf, ENCODING_UTF8, vi.fn())
expect(mockStringDecoder.write).toHaveBeenCalledWith(buf)
})
it('handles errors during transform', async () => {
const error = new Error('Transform error')
const next = vi.fn()
mockStringDecoder.write.mockImplementation(() => {
throw error
})
await filter.transform(Buffer.from(TEST_LINES.ONE), ENCODING_UTF8, next)
expect(next).toHaveBeenCalledWith(error)
})
describe('when the string decoder returns a string', () => {
beforeEach(() => {
mockStringDecoder.write.mockReturnValue(TEST_LINES.ONE)
})
it('writes to the line decoder and string decoder', () => {
filter.transform(Buffer.from(TEST_LINES.ONE, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(mockLineDecoder.write).toHaveBeenCalledWith(TEST_LINES.ONE)
})
describe('and the line decoder returns a line with no tags', () => {
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_LINES.ONE][Symbol.iterator]())
})
it('pushes to the main stream', () => {
filter.transform(Buffer.from(''), ENCODING_UTF8, vi.fn())
expect(mockLineDecoder.write).toHaveBeenCalledWith(TEST_LINES.ONE)
})
})
describe('and the line decoder returns a line with a start tag', () => {
const TEST_STRING = `${START_TAG}${TEST_LINES.ONE}`
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_STRING][Symbol.iterator]())
})
it('writes to the waste stream', () => {
filter.transform(Buffer.from(TEST_STRING, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
})
})
describe('and the line decoder returns a line with an end tag', () => {
const TEST_STRING = `${TEST_LINES.ONE}${END_TAG}`
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_STRING][Symbol.iterator]())
})
it('writes to the waste stream', () => {
filter.transform(Buffer.from(TEST_STRING, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
})
})
describe('and the line decoder returns a line with both start and end tags', () => {
const TEST_STRING = `${START_TAG}${TEST_LINES.ONE}${END_TAG}`
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_STRING][Symbol.iterator]())
})
it('writes to the waste stream', async () => {
await filter.transform(Buffer.from(TEST_STRING, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
})
})
describe('and the line decoder returns a line with content before start tag', () => {
const TEST_STRING = `${TEST_LINES.ONE}${START_TAG}${TEST_LINES.TWO}`
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_STRING][Symbol.iterator]())
})
it('writes content before tag to the main stream and tagged content to the waste stream', async () => {
await filter.transform(Buffer.from(TEST_STRING, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.TWO, ENCODING_UTF8))
})
})
describe('and the line decoder returns a line with content after end tag', () => {
const TEST_STRING = `${START_TAG}${TEST_LINES.ONE}${END_TAG}${TEST_LINES.TWO}`
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_STRING][Symbol.iterator]())
})
it('writes tagged content to waste stream and content after tag to main pipeline', async () => {
await filter.transform(Buffer.from(TEST_STRING, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(writeWithBackpressure, 'wasteStream.write').toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
expect(filter.push, 'filter.push').toHaveBeenCalledWith(Buffer.from(TEST_LINES.TWO, ENCODING_UTF8))
})
})
describe('and the line decoder returns a line with only end tag and content after', () => {
const TEST_STRING = `${TEST_LINES.ONE}${END_TAG}${TEST_LINES.TWO}`
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_STRING][Symbol.iterator]())
})
it('writes content before end tag to waste stream and content after tag to main pipeline', async () => {
await filter.transform(Buffer.from(TEST_STRING, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.TWO, ENCODING_UTF8))
})
})
describe('and the line decoder returns a line with content before and after tags', () => {
const TEST_STRING = `${TEST_LINES.ONE}${START_TAG}${TEST_LINES.TWO}${END_TAG}${TEST_LINES.THREE}`
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_STRING][Symbol.iterator]())
})
it('writes content before tag to main pipeline, tagged content to waste stream, and content after tag to main pipeline', async () => {
await filter.transform(Buffer.from(TEST_STRING, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.TWO, ENCODING_UTF8))
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.THREE, ENCODING_UTF8))
})
})
describe('and the line decoder returns an empty line', () => {
const TEST_STRING = ''
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_STRING][Symbol.iterator]())
})
it('writes empty string to main pipeline', async () => {
await filter.transform(Buffer.from(TEST_STRING, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_STRING))
})
})
describe('and the line decoder returns a line with only whitespace', () => {
const TEST_STRING = ' \n'
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_STRING][Symbol.iterator]())
})
it('writes whitespace to main pipeline', async () => {
await filter.transform(Buffer.from(TEST_STRING, ENCODING_UTF8), ENCODING_UTF8, vi.fn())
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_STRING))
})
})
})
})
describe('flush', () => {
beforeEach(() => {
mockLineDecoder.end.mockReturnValue([][Symbol.iterator]())
})
it('processes remaining lines from LineDecoder.end()', async () => {
const callback = vi.fn()
const remainingLines = [TEST_LINES.ONE]
mockLineDecoder.end.mockReturnValue(remainingLines[Symbol.iterator]())
await filter.flush(callback)
expect(mockLineDecoder.end).toHaveBeenCalledWith()
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.ONE))
expect(callback).toHaveBeenCalledWith()
})
it('handles empty remaining lines', async () => {
const callback = vi.fn()
await filter.flush(callback)
expect(mockLineDecoder.end).toHaveBeenCalledWith()
expect(callback).toHaveBeenCalledWith()
})
it('handles undefined LineDecoder', async () => {
const callback = vi.fn()
const newFilter = new FilterTaggedContent(START_TAG, END_TAG, wasteStream)
await newFilter.flush(callback)
expect(callback).toHaveBeenCalledWith()
})
it('handles errors during flush', async () => {
const callback = vi.fn()
const error = new Error('Flush error')
mockLineDecoder.end.mockImplementation(() => {
throw error
})
await filter.flush(callback)
expect(callback).toHaveBeenCalledWith(error)
})
})
describe('integration scenarios', () => {
it('handles multi-line tagged content', async () => {
const multiLineContent = `${START_TAG}${TEST_LINES.ONE}\n${TEST_LINES.TWO}\n${TEST_LINES.THREE}${END_TAG}`
const chunk = Buffer.from(multiLineContent)
const next = vi.fn()
const lines = [
`${START_TAG}${TEST_LINES.ONE}`,
TEST_LINES.TWO,
`${TEST_LINES.THREE}${END_TAG}`,
]
mockStringDecoder.write.mockReturnValue(multiLineContent)
mockLineDecoder[Symbol.iterator].mockReturnValue(lines[Symbol.iterator]())
await filter.transform(chunk, ENCODING_UTF8, next)
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.TWO, ENCODING_UTF8))
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.THREE, ENCODING_UTF8))
})
it('handles multiple tagged sections across lines', async () => {
const multiSectionContent = `${START_TAG}${TEST_LINES.ONE}${END_TAG}\n${START_TAG}${TEST_LINES.TWO}${END_TAG}`
const chunk = Buffer.from(multiSectionContent)
const next = vi.fn()
const lines = [
`${START_TAG}${TEST_LINES.ONE}${END_TAG}`,
`${START_TAG}${TEST_LINES.TWO}${END_TAG}`,
]
mockStringDecoder.write.mockReturnValue(multiSectionContent)
mockLineDecoder[Symbol.iterator].mockReturnValue(lines[Symbol.iterator]())
await filter.transform(chunk, ENCODING_UTF8, next)
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.TWO, ENCODING_UTF8))
})
it('handles content with tags and surrounding text', async () => {
const contentWithText = `${TEST_LINES.ONE}${START_TAG}${TEST_LINES.TWO}${END_TAG}${TEST_LINES.THREE}`
const chunk = Buffer.from(contentWithText)
const next = vi.fn()
const lines = [contentWithText]
mockStringDecoder.write.mockReturnValue(contentWithText)
mockLineDecoder[Symbol.iterator].mockReturnValue(lines[Symbol.iterator]())
await filter.transform(chunk, ENCODING_UTF8, next)
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.ONE))
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.TWO))
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.THREE))
})
it('handles partial lines across multiple chunks', async () => {
const chunk1 = Buffer.from(`${TEST_LINES.ONE}${START_TAG}${TEST_LINES.TWO}`)
const chunk2 = Buffer.from(`${TEST_LINES.THREE}${END_TAG}${TEST_LINES.FOUR}`)
const next = vi.fn()
mockStringDecoder.write
.mockReturnValueOnce(`${TEST_LINES.ONE}${START_TAG}${TEST_LINES.TWO}`)
.mockReturnValueOnce(`${TEST_LINES.THREE}${END_TAG}${TEST_LINES.FOUR}`)
mockLineDecoder[Symbol.iterator]
.mockReturnValueOnce([][Symbol.iterator]()) // First chunk has no complete lines
.mockReturnValueOnce([`${TEST_LINES.ONE}${START_TAG}${TEST_LINES.TWO}${TEST_LINES.THREE}${END_TAG}${TEST_LINES.FOUR}`][Symbol.iterator]()) // Second chunk completes the line
await filter.transform(chunk1, ENCODING_UTF8, next)
await filter.transform(chunk2, ENCODING_UTF8, next)
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(`${TEST_LINES.TWO}${TEST_LINES.THREE}`, ENCODING_UTF8))
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.FOUR, ENCODING_UTF8))
})
it('handles mixed tagged and untagged content', async () => {
const mixedContent = `${TEST_LINES.ONE}\n${START_TAG}${TEST_LINES.TWO}${END_TAG}\n${TEST_LINES.THREE}`
const chunk = Buffer.from(mixedContent)
const next = vi.fn()
const lines = [
TEST_LINES.ONE,
`${START_TAG}${TEST_LINES.TWO}${END_TAG}`,
TEST_LINES.THREE,
]
mockStringDecoder.write.mockReturnValue(mixedContent)
mockLineDecoder[Symbol.iterator].mockReturnValue(lines[Symbol.iterator]())
await filter.transform(chunk, ENCODING_UTF8, next)
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.ONE, ENCODING_UTF8))
expect(writeWithBackpressure).toHaveBeenCalledWith(wasteStream, Buffer.from(TEST_LINES.TWO, ENCODING_UTF8))
expect(filter.push).toHaveBeenCalledWith(Buffer.from(TEST_LINES.THREE, ENCODING_UTF8))
})
})
})

View File

@@ -0,0 +1,66 @@
import { describe, it, expect } from 'vitest'
import { LineDecoder } from '../LineDecoder'
import { END_TAG } from '../constants'
describe('LineDecoder', () => {
let decoder: LineDecoder
beforeEach(() => {
decoder = new LineDecoder()
})
describe('when a chunk with a trailing newline is provided', () => {
const firstChunk = 'one\n'
beforeEach(() => {
decoder.write(firstChunk)
})
it('should yield the line', () => {
const lines = Array.from(decoder)
expect(lines).toEqual([firstChunk])
})
describe('and then another chunk is provided', () => {
const secondChunk = 'two\n'
beforeEach(() => {
decoder.write(secondChunk)
})
it('should yield the lines', () => {
const lines = Array.from(decoder)
expect(lines).toEqual([firstChunk, secondChunk])
})
describe('and the decoder is iterated through', () => {
beforeEach(() => {
// iterate through the decoder to empty the buffer
Array.from(decoder)
})
it('should yield the lines', () => {
const lines = Array.from(decoder)
expect(lines).toEqual([])
})
})
})
})
describe('when the only content in the buffer ends with override token and not a newline', () => {
const str = 'Some Text'
beforeEach(() => {
decoder.write(`${str}${END_TAG}`)
})
it('yields the line as if the end tag were a newline', () => {
const lines = Array.from(decoder)
expect(lines).toEqual([`${str}${END_TAG}`])
})
})
})

View File

@@ -0,0 +1,118 @@
import { describe, it, expect, vi, MockedObject } from 'vitest'
import { TagStream } from '../TagStream'
import { START_TAG, END_TAG } from '../constants'
import { StringDecoder } from 'string_decoder'
vi.mock('stream')
vi.mock('string_decoder')
describe('TagStream', () => {
let mockStringDecoder: MockedObject<StringDecoder>
let tagStream: TagStream
const strInput = 'Hello, world!'
beforeEach(() => {
vi.clearAllMocks()
mockStringDecoder = {
write: vi.fn(),
end: vi.fn(),
}
vi.mocked(StringDecoder).mockImplementation(() => mockStringDecoder)
tagStream = new TagStream()
vi.spyOn(tagStream, 'push').mockImplementation(() => true)
})
describe('when transforming a string', () => {
it('passes on the string wrapped in the start and end tags', async () => {
const cb = vi.fn()
await tagStream.transform(strInput, 'utf-8', cb)
expect(tagStream.push).toHaveBeenCalledWith(Buffer.from(`${START_TAG}${strInput}${END_TAG}`))
expect(cb).toHaveBeenCalled()
})
it('flushes the stream', async () => {
const cb = vi.fn()
mockStringDecoder.end.mockReturnValue(strInput)
await tagStream.flush(cb)
expect(cb).toHaveBeenCalledWith(undefined, Buffer.from(`${START_TAG}${strInput}${END_TAG}`))
})
})
describe('when the downstream stream is not ready', () => {
beforeEach(() => {
vi.mocked(tagStream.push).mockClear()
vi.spyOn(tagStream, 'once').mockImplementation((ev, cb) => {
if (ev === 'drain') {
cb()
}
return tagStream
})
})
it('waits for the stream to be ready', async () => {
const cb = vi.fn()
vi.mocked(tagStream.push).mockReturnValue(false)
const promise = tagStream.transform(strInput, 'utf-8', cb)
await promise
expect(tagStream.once).toHaveBeenCalledWith('drain', expect.any(Function))
expect(cb).toHaveBeenCalled()
})
})
describe('when transforming a buffer', () => {
const bufInput = Buffer.from(strInput)
describe('and writing to the string decoder returns a string', () => {
beforeEach(() => {
mockStringDecoder.write.mockReturnValue(strInput)
})
it('passes on a buffer of the tagged output', async () => {
const cb = vi.fn()
await tagStream.transform(bufInput, 'buffer', cb)
expect(tagStream.push).toHaveBeenCalledWith(Buffer.from(`${START_TAG}${strInput}${END_TAG}`))
expect(cb).toHaveBeenCalled()
})
})
describe('and writing to the string decoder returns nothing', () => {
beforeEach(() => {
mockStringDecoder.write.mockReturnValue('')
})
it('passes nothing on to the callback', async () => {
const cb = vi.fn()
await tagStream.transform(bufInput, 'buffer', cb)
expect(tagStream.push).toHaveBeenCalledWith('')
expect(cb).toHaveBeenCalled()
})
})
describe('when str decoder throws an error', () => {
const err = new Error('test')
it('passes the error on to the callback', async () => {
expect.assertions(1)
const cb = vi.fn()
mockStringDecoder.write.mockImplementation(() => {
throw err
})
await tagStream.transform(bufInput, 'buffer', cb)
expect(cb).toHaveBeenCalledWith(err)
})
})
})
})

View File

@@ -0,0 +1,304 @@
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'
import { Writable } from 'stream'
import { WriteToDebug } from '../WriteToDebug'
import { StringDecoder } from 'node:string_decoder'
import { LineDecoder } from '../LineDecoder'
vi.mock('node:string_decoder')
vi.mock('../LineDecoder')
const MockStringDecoder = vi.mocked(StringDecoder)
const MockLineDecoder = vi.mocked(LineDecoder)
describe('WriteToDebug', () => {
const TEST_CONSTANTS = {
ENCODING_UTF8: 'utf8' as const,
ENCODING_BUFFER: 'buffer' as const,
}
const TEST_DATA = {
SINGLE_LINE: 'test line',
MULTI_LINE: 'line1\nline2\nline3',
EMPTY_LINE: '',
WHITESPACE_ONLY: ' \n',
LINE_WITH_TRAILING_NEWLINE: 'test line\n',
LINE_WITHOUT_TRAILING_NEWLINE: 'test line',
}
const TEST_CHUNKS = {
SINGLE_LINE: Buffer.from(TEST_DATA.SINGLE_LINE),
MULTI_LINE: Buffer.from(TEST_DATA.MULTI_LINE),
EMPTY: Buffer.from(''),
}
let writeToDebug: WriteToDebug
let mockDebug: any
let mockStringDecoder: any
let mockLineDecoder: any
beforeEach(() => {
vi.clearAllMocks()
mockDebug = vi.fn()
mockStringDecoder = {
write: vi.fn().mockReturnValue(''),
}
mockLineDecoder = {
write: vi.fn(),
end: vi.fn().mockReturnValue([][Symbol.iterator]()),
[Symbol.iterator]: vi.fn().mockReturnValue([][Symbol.iterator]()),
}
MockStringDecoder.mockImplementation(() => mockStringDecoder)
MockLineDecoder.mockImplementation(() => mockLineDecoder)
writeToDebug = new WriteToDebug(mockDebug)
})
afterEach(() => {
vi.restoreAllMocks()
})
describe('constructor', () => {
it('creates a Writable stream with correct configuration', () => {
expect(writeToDebug).toBeInstanceOf(Writable)
expect(writeToDebug).toBeInstanceOf(WriteToDebug)
})
})
describe('write', () => {
beforeEach(() => {
mockStringDecoder.write.mockReturnValue('')
mockLineDecoder[Symbol.iterator].mockReturnValue([][Symbol.iterator]())
})
describe('when string decoder is not initialized', () => {
describe('and encoding is utf8', () => {
it('initializes string decoder with utf8 encoding', () => {
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(MockStringDecoder).toHaveBeenCalledWith(TEST_CONSTANTS.ENCODING_UTF8)
})
})
describe('and encoding is buffer', () => {
it('initializes string decoder with utf8 encoding', () => {
// @ts-expect-error type here is not correct, 'buffer' is not a valid encoding but it does get passed in
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_BUFFER)
expect(MockStringDecoder).toHaveBeenCalledWith(TEST_CONSTANTS.ENCODING_UTF8)
})
})
})
describe('when line decoder is not initialized', () => {
it('initializes line decoder', () => {
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(MockLineDecoder).toHaveBeenCalled()
})
})
it('writes to string decoder', () => {
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(mockStringDecoder.write).toHaveBeenCalledWith(TEST_CHUNKS.SINGLE_LINE)
})
describe('when string decoder returns a string', () => {
beforeEach(() => {
mockStringDecoder.write.mockReturnValue(TEST_DATA.SINGLE_LINE)
})
it('writes to line decoder', () => {
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(mockLineDecoder.write).toHaveBeenCalledWith(TEST_DATA.SINGLE_LINE)
})
describe('and line decoder returns a single line', () => {
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_DATA.SINGLE_LINE][Symbol.iterator]())
})
it('calls debug with the line', () => {
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(mockDebug).toHaveBeenCalledWith(TEST_DATA.SINGLE_LINE)
})
})
describe('and line decoder returns multiple lines', () => {
const lines = ['line1', 'line2', 'line3']
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue(lines[Symbol.iterator]())
})
it('calls debug with each line', () => {
writeToDebug.write(TEST_CHUNKS.MULTI_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(mockDebug).toHaveBeenCalledTimes(3)
expect(mockDebug).toHaveBeenNthCalledWith(1, 'line1')
expect(mockDebug).toHaveBeenNthCalledWith(2, 'line2')
expect(mockDebug).toHaveBeenNthCalledWith(3, 'line3')
})
})
describe('and line decoder returns an empty line', () => {
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_DATA.EMPTY_LINE][Symbol.iterator]())
})
it('does not call debug', () => {
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(mockDebug).not.toHaveBeenCalled()
})
})
describe('and line decoder returns a line with trailing newline', () => {
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_DATA.LINE_WITH_TRAILING_NEWLINE][Symbol.iterator]())
})
it('calls debug with line without trailing newline', () => {
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(mockDebug).toHaveBeenCalledWith(TEST_DATA.LINE_WITHOUT_TRAILING_NEWLINE)
})
})
describe('and line decoder returns a line without trailing newline', () => {
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_DATA.LINE_WITHOUT_TRAILING_NEWLINE][Symbol.iterator]())
})
it('calls debug with the line as-is', () => {
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(mockDebug).toHaveBeenCalledWith(TEST_DATA.LINE_WITHOUT_TRAILING_NEWLINE)
})
})
describe('and line decoder returns whitespace-only line', () => {
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue([TEST_DATA.WHITESPACE_ONLY][Symbol.iterator]())
})
it('calls debug with whitespace-only line', () => {
writeToDebug.write(TEST_CHUNKS.SINGLE_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(mockDebug).toHaveBeenCalledWith(' ')
})
})
describe('and line decoder returns mixed content', () => {
const lines = ['line1', '', 'line2', ' \n', 'line3']
beforeEach(() => {
mockLineDecoder[Symbol.iterator].mockReturnValue(lines[Symbol.iterator]())
})
it('calls debug only for non-empty lines', () => {
writeToDebug.write(TEST_CHUNKS.MULTI_LINE, TEST_CONSTANTS.ENCODING_UTF8)
expect(mockDebug).toHaveBeenCalledTimes(4)
expect(mockDebug).toHaveBeenNthCalledWith(1, 'line1')
expect(mockDebug).toHaveBeenNthCalledWith(2, 'line2')
expect(mockDebug).toHaveBeenNthCalledWith(3, ' ')
expect(mockDebug).toHaveBeenNthCalledWith(4, 'line3')
})
})
})
})
describe('final', () => {
beforeEach(() => {
mockStringDecoder.write.mockReturnValue('')
mockLineDecoder[Symbol.iterator].mockReturnValue([][Symbol.iterator]())
mockLineDecoder.end.mockReturnValue([][Symbol.iterator]())
})
it('processes remaining lines from line decoder', () => {
const remainingLines = ['final line 1', 'final line 2']
mockLineDecoder.end.mockReturnValue(remainingLines[Symbol.iterator]())
writeToDebug.end()
expect(mockLineDecoder.end).toHaveBeenCalledWith()
expect(mockDebug).toHaveBeenCalledTimes(2)
expect(mockDebug).toHaveBeenNthCalledWith(1, 'final line 1')
expect(mockDebug).toHaveBeenNthCalledWith(2, 'final line 2')
})
it('handles empty remaining lines', () => {
writeToDebug.end()
expect(mockLineDecoder.end).toHaveBeenCalledWith()
expect(mockDebug).not.toHaveBeenCalled()
})
it('handles undefined decoders', () => {
const newWriteToDebug = new WriteToDebug(mockDebug)
newWriteToDebug.end()
// Should not throw
expect(true).toBe(true)
})
it('cleans up decoders after processing', () => {
writeToDebug.end()
// Should not throw
expect(true).toBe(true)
})
})
describe('integration scenarios', () => {
it('handles multi-line content with mixed empty lines', () => {
const multiLineContent = 'line1\n\nline2\n \nline3'
const lines = ['line1', '', 'line2', ' \n', 'line3']
mockStringDecoder.write.mockReturnValue(multiLineContent)
mockLineDecoder[Symbol.iterator].mockReturnValue(lines[Symbol.iterator]())
writeToDebug.write(Buffer.from(multiLineContent), TEST_CONSTANTS.ENCODING_UTF8)
expect(mockDebug).toHaveBeenCalledTimes(4)
expect(mockDebug).toHaveBeenNthCalledWith(1, 'line1')
expect(mockDebug).toHaveBeenNthCalledWith(2, 'line2')
expect(mockDebug).toHaveBeenNthCalledWith(3, ' ')
expect(mockDebug).toHaveBeenNthCalledWith(4, 'line3')
})
it('handles content with trailing newlines', () => {
const contentWithNewlines = 'line1\nline2\n'
const lines = ['line1\n', 'line2\n']
mockStringDecoder.write.mockReturnValue(contentWithNewlines)
mockLineDecoder[Symbol.iterator].mockReturnValue(lines[Symbol.iterator]())
writeToDebug.write(Buffer.from(contentWithNewlines), TEST_CONSTANTS.ENCODING_UTF8)
expect(mockDebug).toHaveBeenCalledTimes(2)
expect(mockDebug).toHaveBeenNthCalledWith(1, 'line1')
expect(mockDebug).toHaveBeenNthCalledWith(2, 'line2')
})
it('handles final flush with remaining content', () => {
const remainingLines = ['final line 1', 'final line 2\n', '']
mockLineDecoder.end.mockReturnValue(remainingLines[Symbol.iterator]())
writeToDebug.end()
expect(mockDebug).toHaveBeenCalledTimes(2)
expect(mockDebug).toHaveBeenNthCalledWith(1, 'final line 1')
expect(mockDebug).toHaveBeenNthCalledWith(2, 'final line 2')
})
})
})

View File

@@ -0,0 +1,134 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'
import { logError } from '../logError'
import { START_TAG, END_TAG } from '../constants'
describe('logError', () => {
let consoleErrorSpy: ReturnType<typeof vi.spyOn>
beforeEach(() => {
// Mock console.error at the module boundary
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
})
afterEach(() => {
// Restore the original console.error
consoleErrorSpy.mockRestore()
})
describe('START_TAG and END_TAG constants', () => {
it('exports unique and identifiable tags', () => {
expect(START_TAG).not.toBe(END_TAG)
})
})
describe('logError function', () => {
it('calls console.error with start tag, arguments, and end tag', () => {
const errorMessage = 'Something went wrong'
const errorObject = new Error('Test error')
logError(errorMessage, errorObject)
expect(consoleErrorSpy).toHaveBeenCalledTimes(1)
expect(consoleErrorSpy).toHaveBeenCalledWith(START_TAG, errorMessage, errorObject, END_TAG)
})
it('handles single string argument', () => {
const message = 'Single error message'
logError(message)
expect(consoleErrorSpy).toHaveBeenCalledTimes(1)
expect(consoleErrorSpy).toHaveBeenCalledWith(START_TAG, message, END_TAG)
})
it('handles multiple arguments', () => {
const message = 'Error occurred'
const code = 500
const details = { userId: 123, action: 'login' }
logError(message, code, details)
expect(consoleErrorSpy).toHaveBeenCalledTimes(1)
expect(consoleErrorSpy).toHaveBeenCalledWith(START_TAG, message, code, details, END_TAG)
})
it('handles no arguments', () => {
logError()
expect(consoleErrorSpy).toHaveBeenCalledTimes(1)
expect(consoleErrorSpy).toHaveBeenCalledWith(START_TAG, END_TAG)
})
it('handles various argument types', () => {
const stringArg = 'string message'
const numberArg = 42
const booleanArg = true
const nullArg = null
const undefinedArg = undefined
const objectArg = { key: 'value' }
const arrayArg = [1, 2, 3]
logError(stringArg, numberArg, booleanArg, nullArg, undefinedArg, objectArg, arrayArg)
expect(consoleErrorSpy).toHaveBeenCalledTimes(1)
expect(consoleErrorSpy).toHaveBeenCalledWith(
START_TAG,
stringArg,
numberArg,
booleanArg,
nullArg,
undefinedArg,
objectArg,
arrayArg,
END_TAG,
)
})
it('preserves the order of arguments', () => {
const first = 'first'
const second = 'second'
const third = 'third'
logError(first, second, third)
expect(consoleErrorSpy).toHaveBeenCalledTimes(1)
expect(consoleErrorSpy).toHaveBeenCalledWith(START_TAG, first, second, third, END_TAG)
})
it('handles Error objects correctly', () => {
const error = new Error('Test error message')
error.stack = 'Error: Test error message\n at test.js:1:1'
logError(error)
expect(consoleErrorSpy).toHaveBeenCalledTimes(1)
expect(consoleErrorSpy).toHaveBeenCalledWith(START_TAG, error, END_TAG)
})
})
describe('integration with console.error', () => {
it('calls console.error exactly once per logError call', () => {
logError('First error')
logError('Second error')
logError('Third error')
expect(consoleErrorSpy).toHaveBeenCalledTimes(3)
})
it('passes through all arguments to console.error', () => {
const complexObject = {
message: 'Complex error',
timestamp: new Date(),
metadata: {
userId: 123,
sessionId: 'abc-123',
},
}
logError('Error occurred:', complexObject)
expect(consoleErrorSpy).toHaveBeenCalledWith(START_TAG, 'Error occurred:', complexObject, END_TAG)
})
})
})

View File

@@ -0,0 +1,49 @@
import { describe, it, expect, vi, beforeEach, MockedObject } from 'vitest'
import { writeWithBackpressure } from '../writeWithBackpressure'
import { Writable } from 'stream'
describe('writeWithBackpressure', () => {
let output: MockedObject<Writable>
beforeEach(() => {
vi.clearAllMocks()
output = {
// @ts-expect-error - mock impl does not match impl
write: vi.fn<Writable['write']>(),
// @ts-expect-error - mock impl does not match impl
once: vi.fn<Writable['once']>().mockImplementation((event, listener) => {
if (event === 'drain') {
listener()
}
return output
}),
}
})
describe('when the stream is ready to write', () => {
beforeEach(() => {
output.write.mockReturnValue(true)
})
it('writes a chunk to a writable stream', async () => {
await writeWithBackpressure(output, 'test')
expect(output.write).toHaveBeenCalledWith(Buffer.from('test'))
})
})
describe('when the stream is not ready to write', () => {
beforeEach(() => {
output.write.mockReturnValue(false)
})
it('resolves once drain is emitted', async () => {
const writePromise = writeWithBackpressure(output, 'test')
expect(output.write).toHaveBeenCalledWith(Buffer.from('test'))
expect(output.once).toHaveBeenCalledWith('drain', expect.any(Function))
expect(writePromise).resolves
})
})
})

View File

@@ -0,0 +1,18 @@
/**
* These tags are used to mark the beginning and end of error content that should
* be filtered from stderr output. The tags are designed to be unique and easily
* identifiable in log output.
*/
export const START_TAG = '<<<CYPRESS.STDERR.START>>>'
/**
* Marks the end of error content that should be filtered from stderr output.
*/
export const END_TAG = '<<<CYPRESS.STDERR.END>>>'
/**
* A regex that will match output from the 'debug' package
*/
// this regexp needs to match control characters
// eslint-disable-next-line no-control-regex
export const DEBUG_PREFIX = /^\s+(?:\u001b\[[0-9;]*m)*((\S+):)+/u

View File

@@ -0,0 +1,13 @@
export { FilterTaggedContent } from './FilterTaggedContent'
export { FilterPrefixedContent } from './FilterPrefixedContent'
export { TagStream } from './TagStream'
export { WriteToDebug } from './WriteToDebug'
export { filter } from './Filter'
export { logError } from './logError'
export { START_TAG, END_TAG, DEBUG_PREFIX } from './constants'

View File

@@ -0,0 +1,29 @@
/**
* Standard error logging tags used for stderr filtering.a
*/
import { START_TAG, END_TAG } from './constants'
/**
* Logs error messages with special tags for stderr filtering.
*
* This function wraps console.error calls with start and end tags that can be
* used by FilterTaggedContent to identify and filter error messages from stderr
* output. The tags allow for precise control over which error messages are
* filtered while preserving the original error content.
*
* @param args The arguments to log as an error message
*/
const DISABLE_TAGS = process.env.ELECTRON_ENABLE_LOGGING === '1'
export const logError = (...args: any[]) => {
// When electron debug is enabled, the output will not be filtered, so
// these tags are not needed.
if (DISABLE_TAGS) {
// eslint-disable-next-line no-console
console.error(...args)
} else {
// eslint-disable-next-line no-console
console.error(START_TAG, ...args, END_TAG)
}
}

View File

@@ -0,0 +1,20 @@
import { Writable } from 'stream'
export async function writeWithBackpressure<T> (toStream: Writable, chunk: T): Promise<void> {
return new Promise((resolve, reject) => {
try {
const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(String(chunk))
const ret = toStream.write(buffer)
if (ret) {
resolve()
} else {
toStream.once('drain', () => {
resolve()
})
}
} catch (err) {
reject(err)
}
})
}

View File

@@ -0,0 +1,31 @@
{
"name": "@packages/stderr-filtering",
"version": "0.0.0-development",
"private": true,
"main": "dist/index.js",
"scripts": {
"build": "yarn clean && tsc",
"clean": "rm -rf ./{dist,coverage}",
"lint": "eslint",
"test": "vitest",
"watch": "tsc --watch"
},
"dependencies": {
"debug": "^4.3.4"
},
"devDependencies": {
"@packages/eslint-config": "0.0.0-development",
"@types/node": "^22.13.10",
"@vitest/coverage-v8": "^3.0.9",
"eslint": "^9.22.0",
"typescript": "^5.8.2",
"vitest": "^3.0.9"
},
"files": [
"dist"
],
"types": "dist/index.d.ts",
"lint-staged": {
"**/*.{js,jsx,ts,tsx,json,eslintrc,vue}": "eslint --fix"
}
}

View File

@@ -0,0 +1,32 @@
{
"include": [
"lib/**/*.ts",
],
"exclude": [
"dist",
"node_modules",
"**/__spec__/**",
"**/*.spec.ts",
"**/*.test.ts"
],
"compilerOptions": {
"target": "ES2018",
"lib": ["es2021"],
"outDir": "./dist",
"rootDir": "./lib",
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"noImplicitAny": true,
"noImplicitThis": true,
"noImplicitReturns": false,
"strict": true,
"moduleResolution": "node",
"module": "commonjs",
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true
}
}

View File

@@ -0,0 +1,17 @@
import { defineConfig } from 'vitest/config'
export default defineConfig({
test: {
globals: true,
environment: 'node',
include: ['**/__spec__/*.spec.ts'],
exclude: ['**/node_modules/**', '**/dist/**'],
coverage: {
provider: 'v8',
include: ['lib/**/*.ts'],
exclude: ['**/node_modules/**', '**/dist/**'],
all: true,
reporter: ['clover'],
},
},
})

View File

@@ -1,6 +1,12 @@
#!/bin/bash
if [ $SKIP_DEPCHECK ]; then exit 0; fi
# Skip integrity check in CI environments to prevent patch reapplication issues
if [ "$CI" = "true" ] || [ "$CIRCLECI" = "true" ]; then
echo 'Skipping dependency integrity check in CI environment'
exit 0
fi
yarn check --integrity
if [ $? -ne 0 ]; then

View File

@@ -32,6 +32,7 @@ export const monorepoPaths = {
pkgScaffoldConfig: path.join(__dirname, '../../packages/scaffold-config'),
pkgServer: path.join(__dirname, '../../packages/server'),
pkgSocket: path.join(__dirname, '../../packages/socket'),
pkgStderrFiltering: path.join(__dirname, '../../packages/stderr-filtering'),
pkgTelemetry: path.join(__dirname, '../../packages/telemetry'),
pkgTs: path.join(__dirname, '../../packages/ts'),
pkgTypes: path.join(__dirname, '../../packages/types'),

View File

@@ -3,7 +3,11 @@
"version": "0.0.0-development",
"private": true,
"scripts": {
"build": "echo 'internal-scripts build: no build necessary'",
"lint": "eslint --ext .js,.ts,.json, ."
},
"dependencies": {
"@packages/electron": "0.0.0-development"
},
"nx": {}
}

View File

@@ -0,0 +1,6 @@
describe('page with video', () => {
it('plays the video', () => {
cy.visit('/cypress/fixtures/video.html')
cy.get('video').should('have.prop', 'paused', false)
})
})

Binary file not shown.

View File

@@ -0,0 +1,5 @@
<!DOCTYPE html>
<body>
<video src="./sample.mp4" autoplay loop></video>
</body>

View File

@@ -0,0 +1,13 @@
import systemTests from '../lib/system-tests'
describe('Electron sysctrlbyname warning', function () {
it('does not render warnings in the stdout', function () {
return systemTests.exec(this, {
project: 'e2e',
spec: 'embedded_video_with_audio.cy.js',
browser: 'electron',
}).then(({ stderr }) => {
expect(stderr).not.to.include('sysctlbyname for kern.hv_vmm_present failed with status -1')
})
})
})

View File

@@ -839,6 +839,11 @@
"./packages/socket/node_modules/socket.io/node_modules/ws/lib/constants.js",
"./packages/socket/node_modules/socket.io/node_modules/ws/lib/receiver.js",
"./packages/socket/node_modules/socket.io/node_modules/ws/lib/websocket-server.js",
"./packages/stderr-filtering/dist/FilterPrefixedContent.js",
"./packages/stderr-filtering/dist/FilterTaggedContent.js",
"./packages/stderr-filtering/dist/LineDecoder.js",
"./packages/stderr-filtering/dist/TagStream.js",
"./packages/stderr-filtering/dist/WriteToDebug.js",
"./packages/ts/register.js",
"./packages/types/index.js",
"./tooling/v8-snapshot/dist/setup/v8-snapshot-entry-cy-in-cy.js"
@@ -4151,6 +4156,11 @@
"./packages/socket/node_modules/socket.io/node_modules/ws/lib/stream.js",
"./packages/socket/node_modules/socket.io/node_modules/ws/lib/validation.js",
"./packages/socket/node_modules/socket.io/package.json",
"./packages/stderr-filtering/dist/Filter.js",
"./packages/stderr-filtering/dist/constants.js",
"./packages/stderr-filtering/dist/index.js",
"./packages/stderr-filtering/dist/logError.js",
"./packages/stderr-filtering/dist/writeWithBackpressure.js",
"./packages/telemetry/dist/detectors/circleCiDetectorSync.js",
"./packages/telemetry/dist/index.js",
"./packages/telemetry/dist/node.js",
@@ -4185,5 +4195,5 @@
"./tooling/v8-snapshot/cache/linux/snapshot-entry.js"
],
"deferredHashFile": "yarn.lock",
"deferredHash": "12f57ba8965d605eed281c5637ba10524d2f949525807eb12e1c6255601a3709"
"deferredHash": "0bd8a28484027edf51c642a44fb317008f8b1aa153d3ee3fe4f3223dcf6488af"
}

163
yarn.lock
View File

@@ -1752,7 +1752,7 @@
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.15.6.tgz#043b9aa3c303c0722e5377fef9197f4cf1796549"
integrity sha512-S/TSCcsRuCkmpUuoWijua0Snt+f3ewU/8spLo+4AXJCZfT0bVCzLD5MuOKdrx0mlAptbKzn5AdgEIIKXxXkz9Q==
"@babel/parser@7.28.0", "@babel/parser@^7.0.0", "@babel/parser@^7.1.0", "@babel/parser@^7.15.4", "@babel/parser@^7.16.4", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5", "@babel/parser@^7.23.0", "@babel/parser@^7.26.9", "@babel/parser@^7.27.2", "@babel/parser@^7.27.5", "@babel/parser@^7.28.0":
"@babel/parser@7.28.0", "@babel/parser@^7.0.0", "@babel/parser@^7.1.0", "@babel/parser@^7.15.4", "@babel/parser@^7.16.4", "@babel/parser@^7.20.7", "@babel/parser@^7.22.5", "@babel/parser@^7.23.0", "@babel/parser@^7.25.4", "@babel/parser@^7.26.9", "@babel/parser@^7.27.2", "@babel/parser@^7.27.5", "@babel/parser@^7.28.0":
version "7.28.0"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.28.0.tgz#979829fbab51a29e13901e5a80713dbcb840825e"
integrity sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==
@@ -2546,7 +2546,7 @@
"@babel/helper-validator-identifier" "^7.14.9"
to-fast-properties "^2.0.0"
"@babel/types@7.28.2", "@babel/types@^7.0.0", "@babel/types@^7.15.4", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.20.7", "@babel/types@^7.21.3", "@babel/types@^7.22.5", "@babel/types@^7.24.7", "@babel/types@^7.26.9", "@babel/types@^7.27.1", "@babel/types@^7.27.3", "@babel/types@^7.27.6", "@babel/types@^7.28.0", "@babel/types@^7.4.4":
"@babel/types@7.28.2", "@babel/types@^7.0.0", "@babel/types@^7.15.4", "@babel/types@^7.18.6", "@babel/types@^7.18.9", "@babel/types@^7.20.7", "@babel/types@^7.21.3", "@babel/types@^7.22.5", "@babel/types@^7.24.7", "@babel/types@^7.25.4", "@babel/types@^7.26.9", "@babel/types@^7.27.1", "@babel/types@^7.27.3", "@babel/types@^7.27.6", "@babel/types@^7.28.0", "@babel/types@^7.4.4":
version "7.28.2"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.28.2.tgz#da9db0856a9a88e0a13b019881d7513588cf712b"
integrity sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ==
@@ -2570,6 +2570,11 @@
check-more-types "2.24.0"
lazy-ass "1.6.0"
"@bcoe/v8-coverage@^1.0.2":
version "1.0.2"
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-1.0.2.tgz#bbe12dca5b4ef983a0d0af4b07b9bc90ea0ababa"
integrity sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==
"@colors/colors@1.5.0":
version "1.5.0"
resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9"
@@ -3398,15 +3403,15 @@
debug "^4.3.1"
minimatch "^3.1.2"
"@eslint/config-helpers@^0.3.0":
version "0.3.0"
resolved "https://registry.yarnpkg.com/@eslint/config-helpers/-/config-helpers-0.3.0.tgz#3e09a90dfb87e0005c7694791e58e97077271286"
integrity sha512-ViuymvFmcJi04qdZeDc2whTHryouGcDlaxPqarTD0ZE10ISpxGUVZGZDx4w01upyIynL3iu6IXH2bS1NhclQMw==
"@eslint/config-helpers@^0.3.1":
version "0.3.1"
resolved "https://registry.yarnpkg.com/@eslint/config-helpers/-/config-helpers-0.3.1.tgz#d316e47905bd0a1a931fa50e669b9af4104d1617"
integrity sha512-xR93k9WhrDYpXHORXpxVL5oHj3Era7wo6k/Wd8/IsQNnZUTzkGS29lyn3nAT05v6ltUuTFVCCYDEGfy2Or/sPA==
"@eslint/core@^0.15.0", "@eslint/core@^0.15.1":
version "0.15.1"
resolved "https://registry.yarnpkg.com/@eslint/core/-/core-0.15.1.tgz#d530d44209cbfe2f82ef86d6ba08760196dd3b60"
integrity sha512-bkOp+iumZCCbt1K1CmWf0R9pM5yKpDv+ZXtvSyQpudrI9kuFLp+bM2WOPXImuD/ceQuaa8f5pj93Y7zyECIGNA==
"@eslint/core@^0.15.2":
version "0.15.2"
resolved "https://registry.yarnpkg.com/@eslint/core/-/core-0.15.2.tgz#59386327d7862cc3603ebc7c78159d2dcc4a868f"
integrity sha512-78Md3/Rrxh83gCxoUc0EiciuOHsIITzLy53m3d9UyiW8y9Dj2D29FeETqyKA+BRK76tnTp6RXWb3pCay8Oyomg==
dependencies:
"@types/json-schema" "^7.0.15"
@@ -3445,10 +3450,10 @@
resolved "https://registry.yarnpkg.com/@eslint/js/-/js-8.57.0.tgz#a5417ae8427873f1dd08b70b3574b453e67b5f7f"
integrity sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==
"@eslint/js@9.31.0":
version "9.31.0"
resolved "https://registry.yarnpkg.com/@eslint/js/-/js-9.31.0.tgz#adb1f39953d8c475c4384b67b67541b0d7206ed8"
integrity sha512-LOm5OVt7D4qiKCqoiPbA7LWmI+tbw1VbTUowBcUMgQSuM6poJufkFkYDcQpo5KfgD39TnNySV26QjOh7VFpSyw==
"@eslint/js@9.33.0":
version "9.33.0"
resolved "https://registry.yarnpkg.com/@eslint/js/-/js-9.33.0.tgz#475c92fdddab59b8b8cab960e3de2564a44bf368"
integrity sha512-5K1/mKhWaMfreBGJTwval43JJmkip0RmM+3+IuqupeSKNC/Th2Kc7ucaq5ovTSra/OOKB9c58CGSz3QMVbWt0A==
"@eslint/js@^10.0.0":
version "10.0.0"
@@ -3460,12 +3465,12 @@
resolved "https://registry.yarnpkg.com/@eslint/object-schema/-/object-schema-2.1.6.tgz#58369ab5b5b3ca117880c0f6c0b0f32f6950f24f"
integrity sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==
"@eslint/plugin-kit@^0.3.1":
version "0.3.3"
resolved "https://registry.yarnpkg.com/@eslint/plugin-kit/-/plugin-kit-0.3.3.tgz#32926b59bd407d58d817941e48b2a7049359b1fd"
integrity sha512-1+WqvgNMhmlAambTvT3KPtCl/Ibr68VldY2XY40SL1CE0ZXiakFR/cbTspaF5HsnpDMvcYYoJHfl4980NBjGag==
"@eslint/plugin-kit@^0.3.5":
version "0.3.5"
resolved "https://registry.yarnpkg.com/@eslint/plugin-kit/-/plugin-kit-0.3.5.tgz#fd8764f0ee79c8ddab4da65460c641cefee017c5"
integrity sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w==
dependencies:
"@eslint/core" "^0.15.1"
"@eslint/core" "^0.15.2"
levn "^0.4.1"
"@faker-js/faker@9.6.0":
@@ -4609,6 +4614,11 @@
resolved "https://registry.yarnpkg.com/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz#291c227e93fd407a96ecd59879a35809120e432b"
integrity sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ==
"@istanbuljs/schema@^0.1.2":
version "0.1.3"
resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98"
integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==
"@jest/types@^26.3.0":
version "26.6.2"
resolved "https://registry.yarnpkg.com/@jest/types/-/types-26.6.2.tgz#bef5a532030e1d88a2f5a6d933f84e97226ed48e"
@@ -4911,7 +4921,7 @@
"@jridgewell/resolve-uri" "^3.0.3"
"@jridgewell/sourcemap-codec" "^1.4.10"
"@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25", "@jridgewell/trace-mapping@^0.3.28", "@jridgewell/trace-mapping@^0.3.9":
"@jridgewell/trace-mapping@^0.3.23", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25", "@jridgewell/trace-mapping@^0.3.28", "@jridgewell/trace-mapping@^0.3.29", "@jridgewell/trace-mapping@^0.3.9":
version "0.3.29"
resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz#a58d31eaadaf92c6695680b2e1d464a9b8fbf7fc"
integrity sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==
@@ -8190,7 +8200,7 @@
dependencies:
"@types/node" "*"
"@types/node@*", "@types/node@>=10.0.0", "@types/node@^22.17.1", "@types/node@^22.7.7":
"@types/node@*", "@types/node@>=10.0.0", "@types/node@^22.13.10", "@types/node@^22.17.1", "@types/node@^22.7.7":
version "22.17.1"
resolved "https://registry.yarnpkg.com/@types/node/-/node-22.17.1.tgz#484a755050497ebc3b37ff5adb7470f2e3ea5f5b"
integrity sha512-y3tBaz+rjspDTylNjAX37jEC3TETEFGNJL6uQDxwF9/8GLLIjW1rvVHlynyuUKMnMr1Roq8jOv3vkopBjC4/VA==
@@ -9059,6 +9069,25 @@
dependencies:
"@rolldown/pluginutils" "1.0.0-beta.19"
"@vitest/coverage-v8@^3.0.9":
version "3.2.4"
resolved "https://registry.yarnpkg.com/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz#a2d8d040288c1956a1c7d0a0e2cdcfc7a3319f13"
integrity sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==
dependencies:
"@ampproject/remapping" "^2.3.0"
"@bcoe/v8-coverage" "^1.0.2"
ast-v8-to-istanbul "^0.3.3"
debug "^4.4.1"
istanbul-lib-coverage "^3.2.2"
istanbul-lib-report "^3.0.1"
istanbul-lib-source-maps "^5.0.6"
istanbul-reports "^3.1.7"
magic-string "^0.30.17"
magicast "^0.3.5"
std-env "^3.9.0"
test-exclude "^7.0.1"
tinyrainbow "^2.0.0"
"@vitest/expect@2.1.9":
version "2.1.9"
resolved "https://registry.yarnpkg.com/@vitest/expect/-/expect-2.1.9.tgz#b566ea20d58ea6578d8dc37040d6c1a47ebe5ff8"
@@ -10680,6 +10709,15 @@ ast-types@^0.13.4:
dependencies:
tslib "^2.0.1"
ast-v8-to-istanbul@^0.3.3:
version "0.3.4"
resolved "https://registry.yarnpkg.com/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.4.tgz#1463bb1d35eb4f097c7e6161ae2f64a17e498fef"
integrity sha512-cxrAnZNLBnQwBPByK4CeDaw5sWZtMilJE/Q3iDA0aamgaIVNDF9T6K2/8DfYDZEejZ2jNnDrG9m8MY72HFd0KA==
dependencies:
"@jridgewell/trace-mapping" "^0.3.29"
estree-walker "^3.0.3"
js-tokens "^9.0.1"
astral-regex@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31"
@@ -16023,19 +16061,19 @@ eslint@^8.56.0:
strip-ansi "^6.0.1"
text-table "^0.2.0"
eslint@^9.31.0:
version "9.31.0"
resolved "https://registry.yarnpkg.com/eslint/-/eslint-9.31.0.tgz#9a488e6da75bbe05785cd62e43c5ea99356d21ba"
integrity sha512-QldCVh/ztyKJJZLr4jXNUByx3gR+TDYZCRXEktiZoUR3PGy4qCmSbkxcIle8GEwGpb5JBZazlaJ/CxLidXdEbQ==
eslint@^9.22.0, eslint@^9.31.0:
version "9.33.0"
resolved "https://registry.yarnpkg.com/eslint/-/eslint-9.33.0.tgz#cc186b3d9eb0e914539953d6a178a5b413997b73"
integrity sha512-TS9bTNIryDzStCpJN93aC5VRSW3uTx9sClUn4B87pwiCaJh220otoI0X8mJKr+VcPtniMdN8GKjlwgWGUv5ZKA==
dependencies:
"@eslint-community/eslint-utils" "^4.2.0"
"@eslint-community/regexpp" "^4.12.1"
"@eslint/config-array" "^0.21.0"
"@eslint/config-helpers" "^0.3.0"
"@eslint/core" "^0.15.0"
"@eslint/config-helpers" "^0.3.1"
"@eslint/core" "^0.15.2"
"@eslint/eslintrc" "^3.3.1"
"@eslint/js" "9.31.0"
"@eslint/plugin-kit" "^0.3.1"
"@eslint/js" "9.33.0"
"@eslint/plugin-kit" "^0.3.5"
"@humanfs/node" "^0.16.6"
"@humanwhocodes/module-importer" "^1.0.1"
"@humanwhocodes/retry" "^0.4.2"
@@ -18066,7 +18104,7 @@ glob@7.2.0:
once "^1.3.0"
path-is-absolute "^1.0.0"
glob@^10.2.2, glob@^10.3.10, glob@^10.3.12, glob@^10.3.3, glob@^10.3.7, glob@^10.4.2:
glob@^10.2.2, glob@^10.3.10, glob@^10.3.12, glob@^10.3.3, glob@^10.3.7, glob@^10.4.1, glob@^10.4.2:
version "10.4.5"
resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956"
integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==
@@ -18941,7 +18979,7 @@ html-entities@^2.4.0:
resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.5.2.tgz#201a3cf95d3a15be7099521620d19dfb4f65359f"
integrity sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA==
html-escaper@^2.0.2:
html-escaper@^2.0.0, html-escaper@^2.0.2:
version "2.0.2"
resolved "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453"
integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==
@@ -20461,6 +20499,37 @@ issue-parser@^6.0.0:
lodash.isstring "^4.0.1"
lodash.uniqby "^4.7.0"
istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.2:
version "3.2.2"
resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz#2d166c4b0644d43a39f04bf6c2edd1e585f31756"
integrity sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==
istanbul-lib-report@^3.0.0, istanbul-lib-report@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#908305bac9a5bd175ac6a74489eafd0fc2445a7d"
integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==
dependencies:
istanbul-lib-coverage "^3.0.0"
make-dir "^4.0.0"
supports-color "^7.1.0"
istanbul-lib-source-maps@^5.0.6:
version "5.0.6"
resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-5.0.6.tgz#acaef948df7747c8eb5fbf1265cb980f6353a441"
integrity sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==
dependencies:
"@jridgewell/trace-mapping" "^0.3.23"
debug "^4.1.1"
istanbul-lib-coverage "^3.0.0"
istanbul-reports@^3.1.7:
version "3.1.7"
resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.7.tgz#daed12b9e1dca518e15c056e1e537e741280fa0b"
integrity sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==
dependencies:
html-escaper "^2.0.0"
istanbul-lib-report "^3.0.0"
istextorbinary@6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/istextorbinary/-/istextorbinary-6.0.0.tgz#bc6e7541006bc203feffe16628d0a72893b2ad54"
@@ -22195,7 +22264,16 @@ magic-string@^0.26.7:
dependencies:
sourcemap-codec "^1.4.8"
make-dir@4.0.0:
magicast@^0.3.5:
version "0.3.5"
resolved "https://registry.yarnpkg.com/magicast/-/magicast-0.3.5.tgz#8301c3c7d66704a0771eb1bad74274f0ec036739"
integrity sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==
dependencies:
"@babel/parser" "^7.25.4"
"@babel/types" "^7.25.4"
source-map-js "^1.2.0"
make-dir@4.0.0, make-dir@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e"
integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==
@@ -29189,7 +29267,7 @@ sort-package-json@^1.22.1:
is-plain-obj "2.1.0"
sort-object-keys "^1.1.3"
"source-map-js@>=0.6.2 <2.0.0", source-map-js@^1.0.1, source-map-js@^1.0.2, source-map-js@^1.2.1:
"source-map-js@>=0.6.2 <2.0.0", source-map-js@^1.0.1, source-map-js@^1.0.2, source-map-js@^1.2.0, source-map-js@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46"
integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==
@@ -30472,6 +30550,15 @@ terser@5.39.0, terser@^5.10.0, terser@^5.31.1:
commander "^2.20.0"
source-map-support "~0.5.20"
test-exclude@^7.0.1:
version "7.0.1"
resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-7.0.1.tgz#20b3ba4906ac20994e275bbcafd68d510264c2a2"
integrity sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==
dependencies:
"@istanbuljs/schema" "^0.1.2"
glob "^10.4.1"
minimatch "^9.0.4"
text-decoder@^1.1.0:
version "1.1.1"
resolved "https://registry.npmjs.org/text-decoder/-/text-decoder-1.1.1.tgz#5df9c224cebac4a7977720b9f083f9efa1aefde8"
@@ -31324,10 +31411,10 @@ typescript@5.4.5, typescript@~5.4.5:
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.4.5.tgz#42ccef2c571fdbd0f6718b1d1f5e6e5ef006f611"
integrity sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==
"typescript@>=3 < 6", typescript@^5.4.3:
version "5.6.3"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.6.3.tgz#5f3449e31c9d94febb17de03cc081dd56d81db5b"
integrity sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==
"typescript@>=3 < 6", typescript@^5.4.3, typescript@^5.8.2:
version "5.9.2"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.9.2.tgz#d93450cddec5154a2d5cabe3b8102b83316fb2a6"
integrity sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==
ua-parser-js@0.7.33, ua-parser-js@^0.7.18:
version "0.7.33"
@@ -32266,7 +32353,7 @@ vitest@2.1.9, vitest@^2.1.9:
vite-node "2.1.9"
why-is-node-running "^2.3.0"
vitest@3.2.4:
vitest@3.2.4, vitest@^3.0.9:
version "3.2.4"
resolved "https://registry.yarnpkg.com/vitest/-/vitest-3.2.4.tgz#0637b903ad79d1539a25bc34c0ed54b5c67702ea"
integrity sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==