mirror of
https://github.com/cypress-io/cypress.git
synced 2026-02-11 09:40:11 -06:00
200 lines
7.4 KiB
Diff
200 lines
7.4 KiB
Diff
diff --git a/node_modules/socket.io-parser/dist/binary.js b/node_modules/socket.io-parser/dist/binary.js
|
|
index 50ed998..0492ec4 100644
|
|
--- a/node_modules/socket.io-parser/dist/binary.js
|
|
+++ b/node_modules/socket.io-parser/dist/binary.js
|
|
@@ -2,6 +2,9 @@
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.reconstructPacket = exports.deconstructPacket = void 0;
|
|
const is_binary_1 = require("./is-binary");
|
|
+/**
|
|
+ * This file is patched to enable circular objects to be sent over socket.io.
|
|
+ */
|
|
/**
|
|
* Replaces every Buffer | ArrayBuffer | Blob | File in packet with a numbered placeholder.
|
|
*
|
|
@@ -13,12 +16,12 @@ function deconstructPacket(packet) {
|
|
const buffers = [];
|
|
const packetData = packet.data;
|
|
const pack = packet;
|
|
- pack.data = _deconstructPacket(packetData, buffers);
|
|
+ pack.data = _deconstructPacket(packetData, buffers, [], new WeakMap());
|
|
pack.attachments = buffers.length; // number of binary 'attachments'
|
|
return { packet: pack, buffers: buffers };
|
|
}
|
|
exports.deconstructPacket = deconstructPacket;
|
|
-function _deconstructPacket(data, buffers) {
|
|
+function _deconstructPacket(data, buffers, known, retvals) {
|
|
if (!data)
|
|
return data;
|
|
if (is_binary_1.isBinary(data)) {
|
|
@@ -26,18 +29,27 @@ function _deconstructPacket(data, buffers) {
|
|
buffers.push(data);
|
|
return placeholder;
|
|
}
|
|
- else if (Array.isArray(data)) {
|
|
+ else if (retvals.has(data)) {
|
|
+ return retvals.get(data)
|
|
+ }
|
|
+ else if (known.includes(data)) {
|
|
+ return data;
|
|
+ }
|
|
+ known.push(data)
|
|
+ if (Array.isArray(data)) {
|
|
const newData = new Array(data.length);
|
|
+ retvals.set(data, newData)
|
|
for (let i = 0; i < data.length; i++) {
|
|
- newData[i] = _deconstructPacket(data[i], buffers);
|
|
+ newData[i] = _deconstructPacket(data[i], buffers, known, retvals);
|
|
}
|
|
return newData;
|
|
}
|
|
else if (typeof data === "object" && !(data instanceof Date)) {
|
|
const newData = {};
|
|
+ retvals.set(data, newData)
|
|
for (const key in data) {
|
|
- if (data.hasOwnProperty(key)) {
|
|
- newData[key] = _deconstructPacket(data[key], buffers);
|
|
+ if (Object.prototype.hasOwnProperty.call(data, key)) {
|
|
+ newData[key] = _deconstructPacket(data[key], buffers, known, retvals);
|
|
}
|
|
}
|
|
return newData;
|
|
@@ -53,12 +65,12 @@ function _deconstructPacket(data, buffers) {
|
|
* @public
|
|
*/
|
|
function reconstructPacket(packet, buffers) {
|
|
- packet.data = _reconstructPacket(packet.data, buffers);
|
|
+ packet.data = _reconstructPacket(packet.data, buffers, []);
|
|
packet.attachments = undefined; // no longer useful
|
|
return packet;
|
|
}
|
|
exports.reconstructPacket = reconstructPacket;
|
|
-function _reconstructPacket(data, buffers) {
|
|
+function _reconstructPacket(data, buffers, known) {
|
|
if (!data)
|
|
return data;
|
|
if (data && data._placeholder === true) {
|
|
@@ -71,16 +83,19 @@ function _reconstructPacket(data, buffers) {
|
|
else {
|
|
throw new Error("illegal attachments");
|
|
}
|
|
+ } else if (known.includes(data)) {
|
|
+ return data
|
|
}
|
|
- else if (Array.isArray(data)) {
|
|
+ known.push(data)
|
|
+ if (Array.isArray(data)) {
|
|
for (let i = 0; i < data.length; i++) {
|
|
- data[i] = _reconstructPacket(data[i], buffers);
|
|
+ data[i] = _reconstructPacket(data[i], buffers, known);
|
|
}
|
|
}
|
|
else if (typeof data === "object") {
|
|
for (const key in data) {
|
|
if (data.hasOwnProperty(key)) {
|
|
- data[key] = _reconstructPacket(data[key], buffers);
|
|
+ data[key] = _reconstructPacket(data[key], buffers, known);
|
|
}
|
|
}
|
|
}
|
|
diff --git a/node_modules/socket.io-parser/dist/index.js b/node_modules/socket.io-parser/dist/index.js
|
|
index b0c8727..3b2dcc3 100644
|
|
--- a/node_modules/socket.io-parser/dist/index.js
|
|
+++ b/node_modules/socket.io-parser/dist/index.js
|
|
@@ -5,6 +5,7 @@ const Emitter = require("component-emitter");
|
|
const binary_1 = require("./binary");
|
|
const is_binary_1 = require("./is-binary");
|
|
const debug = require("debug")("socket.io-parser");
|
|
+const flatted = require("flatted");
|
|
/**
|
|
* Protocol version.
|
|
*
|
|
@@ -66,7 +67,7 @@ class Encoder {
|
|
}
|
|
// json data
|
|
if (null != obj.data) {
|
|
- str += JSON.stringify(obj.data);
|
|
+ str += flatted.stringify(obj.data);
|
|
}
|
|
debug("encoded %j as %s", obj, str);
|
|
return str;
|
|
@@ -235,7 +236,7 @@ class Decoder extends Emitter {
|
|
exports.Decoder = Decoder;
|
|
function tryParse(str) {
|
|
try {
|
|
- return JSON.parse(str);
|
|
+ return flatted.parse(str);
|
|
}
|
|
catch (e) {
|
|
return false;
|
|
diff --git a/node_modules/socket.io-parser/dist/is-binary.js b/node_modules/socket.io-parser/dist/is-binary.js
|
|
index 4b7c234..73ec260 100644
|
|
--- a/node_modules/socket.io-parser/dist/is-binary.js
|
|
+++ b/node_modules/socket.io-parser/dist/is-binary.js
|
|
@@ -1,6 +1,7 @@
|
|
"use strict";
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.hasBinary = exports.isBinary = void 0;
|
|
+const withNativeBuffer = typeof Buffer === 'function' && typeof Buffer.isBuffer === 'function';
|
|
const withNativeArrayBuffer = typeof ArrayBuffer === "function";
|
|
const isView = (obj) => {
|
|
return typeof ArrayBuffer.isView === "function"
|
|
@@ -14,24 +15,38 @@ const withNativeBlob = typeof Blob === "function" ||
|
|
const withNativeFile = typeof File === "function" ||
|
|
(typeof File !== "undefined" &&
|
|
toString.call(File) === "[object FileConstructor]");
|
|
+/**
|
|
+ * Returns true if obj is an ArrayBuffer.
|
|
+ * This extra check is made because the "instanceof ArrayBuffer" check does not work
|
|
+ * across different execution contexts.
|
|
+ * @private
|
|
+ */
|
|
+function isArrayBuffer(obj) {
|
|
+ return typeof obj === 'object' && obj !== null && toString.call(obj) === '[object ArrayBuffer]';
|
|
+}
|
|
/**
|
|
* Returns true if obj is a Buffer, an ArrayBuffer, a Blob or a File.
|
|
*
|
|
* @private
|
|
*/
|
|
function isBinary(obj) {
|
|
- return ((withNativeArrayBuffer && (obj instanceof ArrayBuffer || isView(obj))) ||
|
|
+ return ((withNativeArrayBuffer && (obj instanceof ArrayBuffer || isArrayBuffer(obj) || isView(obj))) ||
|
|
(withNativeBlob && obj instanceof Blob) ||
|
|
- (withNativeFile && obj instanceof File));
|
|
+ (withNativeFile && obj instanceof File) ||
|
|
+ (withNativeBuffer && Buffer.isBuffer(obj)));
|
|
}
|
|
exports.isBinary = isBinary;
|
|
-function hasBinary(obj, toJSON) {
|
|
+function hasBinary(obj, known = [], toJSON = false) {
|
|
if (!obj || typeof obj !== "object") {
|
|
return false;
|
|
}
|
|
+ if (known.includes(obj)) {
|
|
+ return false
|
|
+ }
|
|
+ known.push(obj)
|
|
if (Array.isArray(obj)) {
|
|
for (let i = 0, l = obj.length; i < l; i++) {
|
|
- if (hasBinary(obj[i])) {
|
|
+ if (hasBinary(obj[i], known)) {
|
|
return true;
|
|
}
|
|
}
|
|
@@ -42,11 +57,11 @@ function hasBinary(obj, toJSON) {
|
|
}
|
|
if (obj.toJSON &&
|
|
typeof obj.toJSON === "function" &&
|
|
- arguments.length === 1) {
|
|
- return hasBinary(obj.toJSON(), true);
|
|
+ arguments.length === 2) {
|
|
+ return hasBinary(obj.toJSON(), known, true);
|
|
}
|
|
for (const key in obj) {
|
|
- if (Object.prototype.hasOwnProperty.call(obj, key) && hasBinary(obj[key])) {
|
|
+ if (Object.prototype.hasOwnProperty.call(obj, key) && hasBinary(obj[key], known)) {
|
|
return true;
|
|
}
|
|
}
|