mirror of
https://github.com/appium/appium.git
synced 2026-01-07 02:40:01 -06:00
chore: reformat everything
this reformats all `.js`, `.ts`, and `.d.ts` files.
This commit is contained in:
@@ -11,11 +11,7 @@
|
||||
"rules": {"func-names": "off"}
|
||||
},
|
||||
{
|
||||
"files": [
|
||||
"./packages/*/index.js",
|
||||
"./packages/*/scripts/**/*.js",
|
||||
"./test/*.js"
|
||||
],
|
||||
"files": ["./packages/*/index.js", "./packages/*/scripts/**/*.js", "./test/*.js"],
|
||||
"parserOptions": {
|
||||
"sourceType": "script"
|
||||
}
|
||||
|
||||
@@ -3,10 +3,8 @@
|
||||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
require: [
|
||||
require.resolve('./test/setup.js')
|
||||
],
|
||||
// forbids use of .only() in CI
|
||||
require: [require.resolve('./test/setup.js')],
|
||||
// forbids use of .only() in CI
|
||||
forbidOnly: Boolean(process.env.CI),
|
||||
color: true
|
||||
color: true,
|
||||
};
|
||||
|
||||
12
.prettierignore
Normal file
12
.prettierignore
Normal file
@@ -0,0 +1,12 @@
|
||||
**/node_modules/**
|
||||
**/build/**
|
||||
**/fixtures/**
|
||||
**/*.min.*
|
||||
**/*.md
|
||||
**/*.yml
|
||||
**/*.json
|
||||
**/.vscode/**
|
||||
**/*.html
|
||||
**/generated/**
|
||||
# generated
|
||||
packages/types/lib/appium-config.ts
|
||||
@@ -1,3 +1,3 @@
|
||||
const semver = require('semver');
|
||||
const { version } = require('../../package.json');
|
||||
console.log(semver(version).prerelease[0]); //eslint-disable-line no-console
|
||||
const {version} = require('../../package.json');
|
||||
console.log(semver(version).prerelease[0]); //eslint-disable-line no-console
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
const path = require('path');
|
||||
const { fs, logger } = require('appium-support');
|
||||
const { asyncify } = require('asyncbox');
|
||||
const {fs, logger} = require('appium-support');
|
||||
const {asyncify} = require('asyncbox');
|
||||
const packageJson = require('../../package.json');
|
||||
|
||||
const log = new logger.getLogger('Create Release Branch:');
|
||||
|
||||
async function setPackageJsonVersion (version = `${process.env.MINOR_BRANCH_NAME}.0-rc.0`) {
|
||||
async function setPackageJsonVersion(version = `${process.env.MINOR_BRANCH_NAME}.0-rc.0`) {
|
||||
packageJson.version = version;
|
||||
log.info(`Setting version to: ${version}`);
|
||||
await fs.writeFile(
|
||||
path.resolve(__dirname, '..', '..', 'package.json'),
|
||||
JSON.stringify(packageJson, null, 2),
|
||||
'utf8',
|
||||
'utf8'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -19,4 +19,4 @@ if (require.main === module) {
|
||||
asyncify(setPackageJsonVersion);
|
||||
}
|
||||
|
||||
module.exports = setPackageJsonVersion;
|
||||
module.exports = setPackageJsonVersion;
|
||||
|
||||
1021
docs/toc.js
1021
docs/toc.js
File diff suppressed because it is too large
Load Diff
@@ -77,7 +77,8 @@
|
||||
},
|
||||
"prettier": {
|
||||
"bracketSpacing": false,
|
||||
"singleQuote": true
|
||||
"singleQuote": true,
|
||||
"printWidth": 100
|
||||
},
|
||||
"dependencies": {
|
||||
"@appium/base-driver": "file:packages/base-driver",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
.md-source__fact--version {
|
||||
display: none;
|
||||
}
|
||||
.md-source__fact:nth-child(1n+2):before {
|
||||
.md-source__fact:nth-child(1n + 2):before {
|
||||
margin-left: 0 !important;
|
||||
}
|
||||
|
||||
@@ -2,13 +2,20 @@
|
||||
/* eslint-disable promise/prefer-await-to-callbacks */
|
||||
/* eslint-disable promise/prefer-await-to-then */
|
||||
|
||||
const { Mike } = require('@appium/docutils');
|
||||
const { log, LANGS, DOCS_DIR, DOCS_BRANCH, DOCS_PREFIX,
|
||||
DOCS_REMOTE, LATEST_ALIAS } = require('./utils');
|
||||
const {Mike} = require('@appium/docutils');
|
||||
const {
|
||||
log,
|
||||
LANGS,
|
||||
DOCS_DIR,
|
||||
DOCS_BRANCH,
|
||||
DOCS_PREFIX,
|
||||
DOCS_REMOTE,
|
||||
LATEST_ALIAS,
|
||||
} = require('./utils');
|
||||
const copyAssets = require('./copy-assets');
|
||||
const path = require('path');
|
||||
const semver = require('semver');
|
||||
const { version } = require('../../package.json');
|
||||
const {version} = require('../../package.json');
|
||||
|
||||
const branch = process.env.APPIUM_DOCS_BRANCH || DOCS_BRANCH;
|
||||
const prefix = process.env.APPIUM_DOCS_PREFIX || DOCS_PREFIX;
|
||||
@@ -16,7 +23,7 @@ const remote = process.env.APPIUM_DOCS_PREFIX || DOCS_REMOTE;
|
||||
|
||||
const shouldPush = !!process.env.APPIUM_DOCS_PUBLISH;
|
||||
|
||||
async function main () {
|
||||
async function main() {
|
||||
log.info(`Building Appium docs and committing to ${DOCS_BRANCH}`);
|
||||
|
||||
await copyAssets();
|
||||
@@ -27,7 +34,12 @@ async function main () {
|
||||
for (const lang of LANGS) {
|
||||
log.info(`Building docs for language '${lang}' and version ${majMinVer}`);
|
||||
const configFile = path.join(DOCS_DIR, `mkdocs-${lang}.yml`);
|
||||
const m = new Mike({branch, prefix: path.join(prefix, lang), remote, configFile});
|
||||
const m = new Mike({
|
||||
branch,
|
||||
prefix: path.join(prefix, lang),
|
||||
remote,
|
||||
configFile,
|
||||
});
|
||||
|
||||
const docsAlreadyExisted = (await m.list()).length >= 1;
|
||||
|
||||
@@ -36,7 +48,7 @@ async function main () {
|
||||
alias: LATEST_ALIAS,
|
||||
shouldRebase: shouldPush,
|
||||
shouldPush,
|
||||
commit: `docs(appium): auto-build docs for appium@${majMinVer}, language ${lang}`
|
||||
commit: `docs(appium): auto-build docs for appium@${majMinVer}, language ${lang}`,
|
||||
};
|
||||
await m.deploy(deployOpts);
|
||||
|
||||
|
||||
@@ -2,11 +2,11 @@
|
||||
/* eslint-disable promise/prefer-await-to-callbacks */
|
||||
/* eslint-disable promise/prefer-await-to-then */
|
||||
|
||||
const { fs } = require('@appium/support');
|
||||
const { log, LANGS, DOCS_DIR, ASSETS_DIR } = require('./utils');
|
||||
const {fs} = require('@appium/support');
|
||||
const {log, LANGS, DOCS_DIR, ASSETS_DIR} = require('./utils');
|
||||
const path = require('path');
|
||||
|
||||
async function main () {
|
||||
async function main() {
|
||||
log.info('Copying generic assets to docs language dirs');
|
||||
|
||||
for (const lang of LANGS) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// for simplicity this file is not transpiled and is run directly via an npm script
|
||||
//
|
||||
const { logger } = require('@appium/support');
|
||||
const {logger} = require('@appium/support');
|
||||
const path = require('path');
|
||||
|
||||
const log = logger.getLogger('Docs');
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const { baseConfig } = require('@appium/docutils');
|
||||
const {baseConfig} = require('@appium/docutils');
|
||||
|
||||
const lang = process.env.APPIUM_DOCS_LANG || 'en';
|
||||
|
||||
|
||||
@@ -115,9 +115,7 @@ class AppiumDriver extends DriverCore {
|
||||
*/
|
||||
get log() {
|
||||
if (!this._log) {
|
||||
const instanceName = `${this.constructor.name}@${node
|
||||
.getObjectId(this)
|
||||
.substring(0, 4)}`;
|
||||
const instanceName = `${this.constructor.name}@${node.getObjectId(this).substring(0, 4)}`;
|
||||
this._log = logger.getLogger(instanceName);
|
||||
}
|
||||
return this._log;
|
||||
@@ -191,9 +189,7 @@ class AppiumDriver extends DriverCore {
|
||||
const defaults = getDefaultsForExtension(extType, extName);
|
||||
const cliArgs = _.isEmpty(defaults)
|
||||
? allCliArgsForExt
|
||||
: _.omitBy(allCliArgsForExt, (value, key) =>
|
||||
_.isEqual(defaults[key], value)
|
||||
);
|
||||
: _.omitBy(allCliArgsForExt, (value, key) => _.isEqual(defaults[key], value));
|
||||
if (!_.isEmpty(cliArgs)) {
|
||||
extInstance.cliArgs = cliArgs;
|
||||
}
|
||||
@@ -236,14 +232,10 @@ class AppiumDriver extends DriverCore {
|
||||
defaultCapabilities
|
||||
);
|
||||
|
||||
const {
|
||||
desiredCaps,
|
||||
processedJsonwpCapabilities,
|
||||
processedW3CCapabilities,
|
||||
} = /** @type {import('./utils').ParsedDriverCaps} */ (parsedCaps);
|
||||
const {desiredCaps, processedJsonwpCapabilities, processedW3CCapabilities} =
|
||||
/** @type {import('./utils').ParsedDriverCaps} */ (parsedCaps);
|
||||
protocol = parsedCaps.protocol;
|
||||
const error = /** @type {import('./utils').InvalidCaps} */ (parsedCaps)
|
||||
.error;
|
||||
const error = /** @type {import('./utils').InvalidCaps} */ (parsedCaps).error;
|
||||
// If the parsing of the caps produced an error, throw it in here
|
||||
if (error) {
|
||||
throw error;
|
||||
@@ -254,11 +246,7 @@ class AppiumDriver extends DriverCore {
|
||||
version: driverVersion,
|
||||
driverName,
|
||||
} = this.driverConfig.findMatchingDriver(desiredCaps);
|
||||
this.printNewSessionAnnouncement(
|
||||
InnerDriver.name,
|
||||
driverVersion,
|
||||
InnerDriver.baseVersion
|
||||
);
|
||||
this.printNewSessionAnnouncement(InnerDriver.name, driverVersion, InnerDriver.baseVersion);
|
||||
|
||||
if (this.args.sessionOverride) {
|
||||
await this.deleteAllSessions();
|
||||
@@ -314,14 +302,12 @@ class AppiumDriver extends DriverCore {
|
||||
driverInstance.serverPath = this.args.basePath;
|
||||
|
||||
try {
|
||||
runningDriversData =
|
||||
(await this.curSessionDataForDriver(InnerDriver)) ?? [];
|
||||
runningDriversData = (await this.curSessionDataForDriver(InnerDriver)) ?? [];
|
||||
} catch (e) {
|
||||
throw new errors.SessionNotCreatedError(e.message);
|
||||
}
|
||||
await pendingDriversGuard.acquire(AppiumDriver.name, () => {
|
||||
this.pendingDrivers[InnerDriver.name] =
|
||||
this.pendingDrivers[InnerDriver.name] || [];
|
||||
this.pendingDrivers[InnerDriver.name] = this.pendingDrivers[InnerDriver.name] || [];
|
||||
otherPendingDriversData = _.compact(
|
||||
this.pendingDrivers[InnerDriver.name].map((drv) => drv.driverData)
|
||||
);
|
||||
@@ -360,10 +346,7 @@ class AppiumDriver extends DriverCore {
|
||||
JSON.stringify(w3cSettings)
|
||||
);
|
||||
await driverInstance.updateSettings(w3cSettings);
|
||||
} else if (
|
||||
driverInstance.isMjsonwpProtocol() &&
|
||||
!_.isEmpty(jwpSettings)
|
||||
) {
|
||||
} else if (driverInstance.isMjsonwpProtocol() && !_.isEmpty(jwpSettings)) {
|
||||
this.log.info(
|
||||
`Applying the initial values to Appium settings parsed from MJSONWP caps: ` +
|
||||
JSON.stringify(jwpSettings)
|
||||
@@ -401,16 +384,12 @@ class AppiumDriver extends DriverCore {
|
||||
);
|
||||
}
|
||||
} else {
|
||||
this.log.debug(
|
||||
`Plugin ${plugin.name} does not define an unexpected shutdown handler`
|
||||
);
|
||||
this.log.debug(`Plugin ${plugin.name} does not define an unexpected shutdown handler`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.log.info(
|
||||
`Removing session '${innerSessionId}' from our master session list`
|
||||
);
|
||||
this.log.info(`Removing session '${innerSessionId}' from our master session list`);
|
||||
delete this.sessions[innerSessionId];
|
||||
delete this.sessionPlugins[innerSessionId];
|
||||
};
|
||||
@@ -455,33 +434,26 @@ class AppiumDriver extends DriverCore {
|
||||
let protocol;
|
||||
try {
|
||||
let otherSessionsData;
|
||||
const dstSession = await sessionsListGuard.acquire(
|
||||
AppiumDriver.name,
|
||||
() => {
|
||||
if (!this.sessions[sessionId]) {
|
||||
return;
|
||||
}
|
||||
const curConstructorName = this.sessions[sessionId].constructor.name;
|
||||
otherSessionsData = _.toPairs(this.sessions)
|
||||
.filter(
|
||||
([key, value]) =>
|
||||
value.constructor.name === curConstructorName &&
|
||||
key !== sessionId
|
||||
)
|
||||
.map(([, value]) => value.driverData);
|
||||
const dstSession = this.sessions[sessionId];
|
||||
protocol = dstSession.protocol;
|
||||
this.log.info(
|
||||
`Removing session ${sessionId} from our master session list`
|
||||
);
|
||||
// regardless of whether the deleteSession completes successfully or not
|
||||
// make the session unavailable, because who knows what state it might
|
||||
// be in otherwise
|
||||
delete this.sessions[sessionId];
|
||||
delete this.sessionPlugins[sessionId];
|
||||
return dstSession;
|
||||
const dstSession = await sessionsListGuard.acquire(AppiumDriver.name, () => {
|
||||
if (!this.sessions[sessionId]) {
|
||||
return;
|
||||
}
|
||||
);
|
||||
const curConstructorName = this.sessions[sessionId].constructor.name;
|
||||
otherSessionsData = _.toPairs(this.sessions)
|
||||
.filter(
|
||||
([key, value]) => value.constructor.name === curConstructorName && key !== sessionId
|
||||
)
|
||||
.map(([, value]) => value.driverData);
|
||||
const dstSession = this.sessions[sessionId];
|
||||
protocol = dstSession.protocol;
|
||||
this.log.info(`Removing session ${sessionId} from our master session list`);
|
||||
// regardless of whether the deleteSession completes successfully or not
|
||||
// make the session unavailable, because who knows what state it might
|
||||
// be in otherwise
|
||||
delete this.sessions[sessionId];
|
||||
delete this.sessionPlugins[sessionId];
|
||||
return dstSession;
|
||||
});
|
||||
// this may not be correct, but if `dstSession` was falsy, the call to `deleteSession()` would
|
||||
// throw anyway.
|
||||
if (!dstSession) {
|
||||
@@ -508,9 +480,7 @@ class AppiumDriver extends DriverCore {
|
||||
}
|
||||
|
||||
const {force = false, reason} = opts;
|
||||
this.log.debug(
|
||||
`Cleaning up ${util.pluralize('active session', sessionsCount, true)}`
|
||||
);
|
||||
this.log.debug(`Cleaning up ${util.pluralize('active session', sessionsCount, true)}`);
|
||||
const cleanupPromises = force
|
||||
? _.values(this.sessions).map((drv) =>
|
||||
drv.startUnexpectedShutdown(reason && new Error(reason))
|
||||
@@ -639,10 +609,7 @@ class AppiumDriver extends DriverCore {
|
||||
// if we're running with plugins, make sure we log that the default behavior is actually
|
||||
// happening so we can tell when the plugin call chain is unwrapping to the default behavior
|
||||
// if that's what happens
|
||||
plugins.length &&
|
||||
this.log.info(
|
||||
`Executing default handling behavior for command '${cmd}'`
|
||||
);
|
||||
plugins.length && this.log.info(`Executing default handling behavior for command '${cmd}'`);
|
||||
|
||||
// if we make it here, we know that the default behavior is handled
|
||||
cmdHandledBy.default = true;
|
||||
@@ -669,11 +636,7 @@ class AppiumDriver extends DriverCore {
|
||||
if (isUmbrellaCmd) {
|
||||
// some commands, like deleteSession, we want to make sure to handle on *this* driver,
|
||||
// not the platform driver
|
||||
return await BaseDriver.prototype.executeCommand.call(
|
||||
this,
|
||||
cmd,
|
||||
...args
|
||||
);
|
||||
return await BaseDriver.prototype.executeCommand.call(this, cmd, ...args);
|
||||
}
|
||||
|
||||
// here we know that we are executing a session command, and have a valid session driver
|
||||
@@ -698,11 +661,7 @@ class AppiumDriver extends DriverCore {
|
||||
// And finally, if the command was createSession, we want to migrate any plugins which were
|
||||
// previously sessionless to use the new sessionId, so that plugins can share state between
|
||||
// their createSession method and other instance methods
|
||||
if (
|
||||
cmd === CREATE_SESSION_COMMAND &&
|
||||
this.sessionlessPlugins.length &&
|
||||
!res.error
|
||||
) {
|
||||
if (cmd === CREATE_SESSION_COMMAND && this.sessionlessPlugins.length && !res.error) {
|
||||
const sessionId = _.first(res.value);
|
||||
this.log.info(
|
||||
`Promoting ${this.sessionlessPlugins.length} sessionless plugins to be attached ` +
|
||||
@@ -717,9 +676,7 @@ class AppiumDriver extends DriverCore {
|
||||
|
||||
wrapCommandWithPlugins({driver, cmd, args, next, cmdHandledBy, plugins}) {
|
||||
plugins.length &&
|
||||
this.log.info(
|
||||
`Plugins which can handle cmd '${cmd}': ${plugins.map((p) => p.name)}`
|
||||
);
|
||||
this.log.info(`Plugins which can handle cmd '${cmd}': ${plugins.map((p) => p.name)}`);
|
||||
|
||||
// now we can go through each plugin and wrap `next` around its own handler, passing the *old*
|
||||
// next in so that it can call it if it wants to
|
||||
@@ -755,15 +712,11 @@ class AppiumDriver extends DriverCore {
|
||||
// interact well together, and it would be hard to debug otherwise without this kind of
|
||||
// message).
|
||||
const didHandle = Object.keys(cmdHandledBy).filter((k) => cmdHandledBy[k]);
|
||||
const didntHandle = Object.keys(cmdHandledBy).filter(
|
||||
(k) => !cmdHandledBy[k]
|
||||
);
|
||||
const didntHandle = Object.keys(cmdHandledBy).filter((k) => !cmdHandledBy[k]);
|
||||
if (didntHandle.length > 0) {
|
||||
this.log.info(
|
||||
`Command '${cmd}' was *not* handled by the following behaviours or plugins, even ` +
|
||||
`though they were registered to handle it: ${JSON.stringify(
|
||||
didntHandle
|
||||
)}. The ` +
|
||||
`though they were registered to handle it: ${JSON.stringify(didntHandle)}. The ` +
|
||||
`command *was* handled by these: ${JSON.stringify(didHandle)}.`
|
||||
);
|
||||
}
|
||||
@@ -797,11 +750,7 @@ class AppiumDriver extends DriverCore {
|
||||
|
||||
proxyActive(sessionId) {
|
||||
const dstSession = this.sessions[sessionId];
|
||||
return (
|
||||
dstSession &&
|
||||
_.isFunction(dstSession.proxyActive) &&
|
||||
dstSession.proxyActive(sessionId)
|
||||
);
|
||||
return dstSession && _.isFunction(dstSession.proxyActive) && dstSession.proxyActive(sessionId);
|
||||
}
|
||||
|
||||
getProxyAvoidList(sessionId) {
|
||||
|
||||
@@ -147,8 +147,7 @@ function makeUninstallArgs(type) {
|
||||
{
|
||||
type: 'str',
|
||||
help:
|
||||
'Name of the driver to uninstall, for example: ' + type ===
|
||||
DRIVER_TYPE
|
||||
'Name of the driver to uninstall, for example: ' + type === DRIVER_TYPE
|
||||
? DRIVER_EXAMPLE
|
||||
: PLUGIN_EXAMPLE,
|
||||
},
|
||||
@@ -205,8 +204,7 @@ function makeRunArgs(type) {
|
||||
{
|
||||
type: 'str',
|
||||
help:
|
||||
`Name of the ${type} to run a script from, for example: ` + type ===
|
||||
DRIVER_TYPE
|
||||
`Name of the ${type} to run a script from, for example: ` + type === DRIVER_TYPE
|
||||
? DRIVER_EXAMPLE
|
||||
: PLUGIN_EXAMPLE,
|
||||
},
|
||||
|
||||
@@ -3,12 +3,7 @@ import ExtensionCommand from './extension-command';
|
||||
import {KNOWN_DRIVERS} from '../constants';
|
||||
import '@colors/colors';
|
||||
|
||||
const REQ_DRIVER_FIELDS = [
|
||||
'driverName',
|
||||
'automationName',
|
||||
'platformNames',
|
||||
'mainClass',
|
||||
];
|
||||
const REQ_DRIVER_FIELDS = ['driverName', 'automationName', 'platformNames', 'mainClass'];
|
||||
|
||||
/**
|
||||
* @extends {ExtensionCommand<DriverType>}
|
||||
|
||||
@@ -84,9 +84,7 @@ class ExtensionCommand {
|
||||
* @return {Promise<ExtensionListData>} map of extension names to extension data
|
||||
*/
|
||||
async list({showInstalled, showUpdates}) {
|
||||
const lsMsg = `Listing ${showInstalled ? 'installed' : 'available'} ${
|
||||
this.type
|
||||
}s`;
|
||||
const lsMsg = `Listing ${showInstalled ? 'installed' : 'available'} ${this.type}s`;
|
||||
const installedNames = Object.keys(this.config.installedExtensions);
|
||||
const knownNames = Object.keys(this.knownExtensions);
|
||||
const exts = [...installedNames, ...knownNames].reduce(
|
||||
@@ -124,8 +122,7 @@ class ExtensionCommand {
|
||||
const updates = await this.checkForExtensionUpdate(ext);
|
||||
data.updateVersion = updates.safeUpdate;
|
||||
data.unsafeUpdateVersion = updates.unsafeUpdate;
|
||||
data.upToDate =
|
||||
updates.safeUpdate === null && updates.unsafeUpdate === null;
|
||||
data.upToDate = updates.safeUpdate === null && updates.unsafeUpdate === null;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -143,14 +140,8 @@ class ExtensionCommand {
|
||||
let upToDateTxt = '';
|
||||
let unsafeUpdateTxt = '';
|
||||
if (data.installed) {
|
||||
const {
|
||||
installType,
|
||||
installSpec,
|
||||
updateVersion,
|
||||
unsafeUpdateVersion,
|
||||
version,
|
||||
upToDate,
|
||||
} = data;
|
||||
const {installType, installSpec, updateVersion, unsafeUpdateVersion, version, upToDate} =
|
||||
data;
|
||||
let typeTxt;
|
||||
switch (installType) {
|
||||
case INSTALL_TYPE_GIT:
|
||||
@@ -163,9 +154,7 @@ class ExtensionCommand {
|
||||
default:
|
||||
typeTxt = '(NPM)';
|
||||
}
|
||||
installTxt = `@${version.yellow} ${
|
||||
('[installed ' + typeTxt + ']').green
|
||||
}`;
|
||||
installTxt = `@${version.yellow} ${('[installed ' + typeTxt + ']').green}`;
|
||||
|
||||
if (showUpdates) {
|
||||
if (updateVersion) {
|
||||
@@ -175,15 +164,12 @@ class ExtensionCommand {
|
||||
upToDateTxt = ` [Up to date]`.green;
|
||||
}
|
||||
if (unsafeUpdateVersion) {
|
||||
unsafeUpdateTxt =
|
||||
` [${unsafeUpdateVersion} available (potentially unsafe)]`.cyan;
|
||||
unsafeUpdateTxt = ` [${unsafeUpdateVersion} available (potentially unsafe)]`.cyan;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`- ${name.yellow}${installTxt}${updateTxt}${upToDateTxt}${unsafeUpdateTxt}`
|
||||
);
|
||||
console.log(`- ${name.yellow}${installTxt}${updateTxt}${upToDateTxt}${unsafeUpdateTxt}`);
|
||||
}
|
||||
|
||||
return listData;
|
||||
@@ -199,22 +185,12 @@ class ExtensionCommand {
|
||||
/** @type {ExtensionFields<typeof this.type>} */
|
||||
let extData;
|
||||
|
||||
if (
|
||||
packageName &&
|
||||
[INSTALL_TYPE_LOCAL, INSTALL_TYPE_NPM].includes(installType)
|
||||
) {
|
||||
throw new Error(
|
||||
`When using --source=${installType}, cannot also use --package`
|
||||
);
|
||||
if (packageName && [INSTALL_TYPE_LOCAL, INSTALL_TYPE_NPM].includes(installType)) {
|
||||
throw new Error(`When using --source=${installType}, cannot also use --package`);
|
||||
}
|
||||
|
||||
if (
|
||||
!packageName &&
|
||||
[INSTALL_TYPE_GIT, INSTALL_TYPE_GITHUB].includes(installType)
|
||||
) {
|
||||
throw new Error(
|
||||
`When using --source=${installType}, must also use --package`
|
||||
);
|
||||
if (!packageName && [INSTALL_TYPE_GIT, INSTALL_TYPE_GITHUB].includes(installType)) {
|
||||
throw new Error(`When using --source=${installType}, must also use --package`);
|
||||
}
|
||||
|
||||
if (installType === INSTALL_TYPE_GITHUB) {
|
||||
@@ -239,9 +215,7 @@ class ExtensionCommand {
|
||||
} else {
|
||||
let pkgName, pkgVer;
|
||||
if (installType === INSTALL_TYPE_LOCAL) {
|
||||
pkgName = path.isAbsolute(installSpec)
|
||||
? installSpec
|
||||
: path.resolve(installSpec);
|
||||
pkgName = path.isAbsolute(installSpec) ? installSpec : path.resolve(installSpec);
|
||||
} else {
|
||||
// at this point we have either an npm package or an appium verified extension
|
||||
// name or a local path. both of which will be installed via npm.
|
||||
@@ -315,8 +289,7 @@ class ExtensionCommand {
|
||||
*/
|
||||
async installViaNpm({installSpec, pkgName, pkgVer}) {
|
||||
const npmSpec = `${pkgName}${pkgVer ? '@' + pkgVer : ''}`;
|
||||
const specMsg =
|
||||
npmSpec === installSpec ? '' : ` using NPM install spec '${npmSpec}'`;
|
||||
const specMsg = npmSpec === installSpec ? '' : ` using NPM install spec '${npmSpec}'`;
|
||||
const msg = `Installing '${installSpec}'${specMsg}`;
|
||||
try {
|
||||
const pkgJsonData = await spinWith(
|
||||
@@ -329,9 +302,7 @@ class ExtensionCommand {
|
||||
);
|
||||
return this.getExtensionFields(pkgJsonData, installSpec);
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`Encountered an error when installing package: ${err.message}`
|
||||
);
|
||||
throw new Error(`Encountered an error when installing package: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -396,9 +367,7 @@ class ExtensionCommand {
|
||||
*/
|
||||
async _uninstall({installSpec}) {
|
||||
if (!this.config.isInstalled(installSpec)) {
|
||||
throw new Error(
|
||||
`Can't uninstall ${this.type} '${installSpec}'; it is not installed`
|
||||
);
|
||||
throw new Error(`Can't uninstall ${this.type} '${installSpec}'; it is not installed`);
|
||||
}
|
||||
const installPath = this.config.getInstallPath(installSpec);
|
||||
try {
|
||||
@@ -406,10 +375,7 @@ class ExtensionCommand {
|
||||
} finally {
|
||||
await this.config.removeExtension(installSpec);
|
||||
}
|
||||
log(
|
||||
this.isJsonOutput,
|
||||
`Successfully uninstalled ${this.type} '${installSpec}'`.green
|
||||
);
|
||||
log(this.isJsonOutput, `Successfully uninstalled ${this.type} '${installSpec}'`.green);
|
||||
return this.config.installedExtensions;
|
||||
}
|
||||
|
||||
@@ -423,9 +389,7 @@ class ExtensionCommand {
|
||||
const shouldUpdateAll = installSpec === UPDATE_ALL;
|
||||
// if we're specifically requesting an update for an extension, make sure it's installed
|
||||
if (!shouldUpdateAll && !this.config.isInstalled(installSpec)) {
|
||||
throw new Error(
|
||||
`The ${this.type} '${installSpec}' was not installed, so can't be updated`
|
||||
);
|
||||
throw new Error(`The ${this.type} '${installSpec}' was not installed, so can't be updated`);
|
||||
}
|
||||
const extsToUpdate = shouldUpdateAll
|
||||
? Object.keys(this.config.installedExtensions)
|
||||
@@ -442,18 +406,11 @@ class ExtensionCommand {
|
||||
|
||||
for (const e of extsToUpdate) {
|
||||
try {
|
||||
await spinWith(
|
||||
this.isJsonOutput,
|
||||
`Checking if ${this.type} '${e}' is updatable`,
|
||||
() => {
|
||||
if (
|
||||
this.config.installedExtensions[e].installType !==
|
||||
INSTALL_TYPE_NPM
|
||||
) {
|
||||
throw new NotUpdatableError();
|
||||
}
|
||||
await spinWith(this.isJsonOutput, `Checking if ${this.type} '${e}' is updatable`, () => {
|
||||
if (this.config.installedExtensions[e].installType !== INSTALL_TYPE_NPM) {
|
||||
throw new NotUpdatableError();
|
||||
}
|
||||
);
|
||||
});
|
||||
const update = await spinWith(
|
||||
this.isJsonOutput,
|
||||
`Checking if ${this.type} '${e}' needs an update`,
|
||||
@@ -472,10 +429,7 @@ class ExtensionCommand {
|
||||
`breaking changes. If you want to apply this update, re-run with --unsafe`
|
||||
);
|
||||
}
|
||||
const updateVer =
|
||||
unsafe && update.unsafeUpdate
|
||||
? update.unsafeUpdate
|
||||
: update.safeUpdate;
|
||||
const updateVer = unsafe && update.unsafeUpdate ? update.unsafeUpdate : update.safeUpdate;
|
||||
await spinWith(
|
||||
this.isJsonOutput,
|
||||
`Updating driver '${e}' from ${update.current} to ${updateVer}`,
|
||||
@@ -489,17 +443,13 @@ class ExtensionCommand {
|
||||
|
||||
log(this.isJsonOutput, 'Update report:');
|
||||
for (const [e, update] of _.toPairs(updates)) {
|
||||
log(
|
||||
this.isJsonOutput,
|
||||
`- ${this.type} ${e} updated: ${update.from} => ${update.to}`.green
|
||||
);
|
||||
log(this.isJsonOutput, `- ${this.type} ${e} updated: ${update.from} => ${update.to}`.green);
|
||||
}
|
||||
for (const [e, err] of _.toPairs(errors)) {
|
||||
if (err instanceof NotUpdatableError) {
|
||||
log(
|
||||
this.isJsonOutput,
|
||||
`- '${e}' was not installed via npm, so we could not check ` +
|
||||
`for updates`.yellow
|
||||
`- '${e}' was not installed via npm, so we could not check ` + `for updates`.yellow
|
||||
);
|
||||
} else if (err instanceof NoUpdatesAvailableError) {
|
||||
log(this.isJsonOutput, `- '${e}' had no updates available`.yellow);
|
||||
@@ -524,10 +474,7 @@ class ExtensionCommand {
|
||||
// this is a helper method, 'ext' is assumed to already be installed here, and of the npm
|
||||
// install type
|
||||
const {version, pkgName} = this.config.installedExtensions[ext];
|
||||
let unsafeUpdate = await npm.getLatestVersion(
|
||||
this.config.appiumHome,
|
||||
pkgName
|
||||
);
|
||||
let unsafeUpdate = await npm.getLatestVersion(this.config.appiumHome, pkgName);
|
||||
let safeUpdate = await npm.getLatestSafeUpgradeVersion(
|
||||
this.config.appiumHome,
|
||||
pkgName,
|
||||
|
||||
@@ -28,9 +28,7 @@ async function runExtensionCommand(args, configObject) {
|
||||
const {extensionType: type} = configObject;
|
||||
const extCmd = args[`${type}Command`];
|
||||
if (!extCmd) {
|
||||
throw new TypeError(
|
||||
`Cannot call ${type} command without a subcommand like 'install'`
|
||||
);
|
||||
throw new TypeError(`Cannot call ${type} command without a subcommand like 'install'`);
|
||||
}
|
||||
let {json, suppressOutput} = args;
|
||||
if (suppressOutput) {
|
||||
@@ -39,9 +37,7 @@ async function runExtensionCommand(args, configObject) {
|
||||
const logFn = (msg) => log(json, msg);
|
||||
let config = configObject;
|
||||
config.log = logFn;
|
||||
const CommandClass = /** @type {ExtCommand<ExtType>} */ (
|
||||
commandClasses[type]
|
||||
);
|
||||
const CommandClass = /** @type {ExtCommand<ExtType>} */ (commandClasses[type]);
|
||||
const cmd = new CommandClass({config, json});
|
||||
try {
|
||||
jsonResult = await cmd.execute(args);
|
||||
|
||||
@@ -12,15 +12,7 @@ import {getExtensionArgs, getServerArgs} from './args';
|
||||
* will automatially inject the `server` subcommand.
|
||||
*/
|
||||
const NON_SERVER_ARGS = Object.freeze(
|
||||
new Set([
|
||||
DRIVER_TYPE,
|
||||
PLUGIN_TYPE,
|
||||
SERVER_SUBCOMMAND,
|
||||
'-h',
|
||||
'--help',
|
||||
'-v',
|
||||
'--version',
|
||||
])
|
||||
new Set([DRIVER_TYPE, PLUGIN_TYPE, SERVER_SUBCOMMAND, '-h', '--help', '-v', '--version'])
|
||||
);
|
||||
|
||||
const version = fs.readPackageJsonFrom(rootDir).version;
|
||||
@@ -138,9 +130,7 @@ class ArgParser {
|
||||
args,
|
||||
(unpacked, value, key) => {
|
||||
if (!_.isUndefined(value) && hasArgSpec(key)) {
|
||||
const {dest} = /** @type {import('../schema/arg-spec').ArgSpec} */ (
|
||||
getArgSpec(key)
|
||||
);
|
||||
const {dest} = /** @type {import('../schema/arg-spec').ArgSpec} */ (getArgSpec(key));
|
||||
_.set(unpacked, dest, value);
|
||||
} else {
|
||||
// this could be anything that _isn't_ a server arg
|
||||
|
||||
@@ -12,12 +12,12 @@ export default class PluginCommand extends ExtensionCommand {
|
||||
*
|
||||
* @param {import('./extension-command').ExtensionCommandOptions<PluginType>} opts
|
||||
*/
|
||||
constructor ({config, json}) {
|
||||
constructor({config, json}) {
|
||||
super({config, json});
|
||||
this.knownExtensions = KNOWN_PLUGINS;
|
||||
}
|
||||
|
||||
async install ({plugin, installType, packageName}) {
|
||||
async install({plugin, installType, packageName}) {
|
||||
return await super._install({
|
||||
installSpec: plugin,
|
||||
installType,
|
||||
@@ -25,19 +25,19 @@ export default class PluginCommand extends ExtensionCommand {
|
||||
});
|
||||
}
|
||||
|
||||
async uninstall ({plugin}) {
|
||||
async uninstall({plugin}) {
|
||||
return await super._uninstall({installSpec: plugin});
|
||||
}
|
||||
|
||||
async update ({plugin, unsafe}) {
|
||||
async update({plugin, unsafe}) {
|
||||
return await super._update({installSpec: plugin, unsafe});
|
||||
}
|
||||
|
||||
async run ({plugin, scriptName}) {
|
||||
async run({plugin, scriptName}) {
|
||||
return await super._run({installSpec: plugin, scriptName});
|
||||
}
|
||||
|
||||
getPostInstallText ({extName, extData}) {
|
||||
getPostInstallText({extName, extData}) {
|
||||
return `Plugin ${extName}@${extData.version} successfully installed`.green;
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ export default class PluginCommand extends ExtensionCommand {
|
||||
* @param {string} installSpec
|
||||
* @returns {void}
|
||||
*/
|
||||
validateExtensionFields (pluginMetadata, installSpec) {
|
||||
validateExtensionFields(pluginMetadata, installSpec) {
|
||||
const missingFields = REQ_PLUGIN_FIELDS.reduce(
|
||||
(acc, field) => (pluginMetadata[field] ? acc : [...acc, field]),
|
||||
[]
|
||||
|
||||
@@ -9,7 +9,7 @@ const JSON_SPACES = 4;
|
||||
* @param {boolean} json - whether we should log json or text
|
||||
* @param {any} msg - error message, object, Error instance, etc.
|
||||
*/
|
||||
function errAndQuit (json, msg) {
|
||||
function errAndQuit(json, msg) {
|
||||
if (json) {
|
||||
console.log(JSON.stringify({error: `${msg}`}, null, JSON_SPACES));
|
||||
} else {
|
||||
@@ -26,7 +26,7 @@ function errAndQuit (json, msg) {
|
||||
* @param {boolean} json - whether we are in json mode (and should therefore not log)
|
||||
* @param {string} msg - string to log
|
||||
*/
|
||||
function log (json, msg) {
|
||||
function log(json, msg) {
|
||||
!json && console.log(msg);
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ function log (json, msg) {
|
||||
* @param {string} msg - string to log
|
||||
* @param {function} fn - function to wrap with spinning
|
||||
*/
|
||||
async function spinWith (json, msg, fn) {
|
||||
async function spinWith(json, msg, fn) {
|
||||
if (json) {
|
||||
return await fn();
|
||||
}
|
||||
@@ -53,17 +53,17 @@ async function spinWith (json, msg, fn) {
|
||||
}
|
||||
|
||||
class RingBuffer {
|
||||
constructor (size = 50) {
|
||||
constructor(size = 50) {
|
||||
this.size = size;
|
||||
this.buffer = [];
|
||||
}
|
||||
getBuff () {
|
||||
getBuff() {
|
||||
return this.buffer;
|
||||
}
|
||||
dequeue () {
|
||||
dequeue() {
|
||||
this.buffer.shift();
|
||||
}
|
||||
enqueue (item) {
|
||||
enqueue(item) {
|
||||
if (this.buffer.length >= this.size) {
|
||||
this.dequeue();
|
||||
}
|
||||
@@ -71,10 +71,4 @@ class RingBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
errAndQuit,
|
||||
log,
|
||||
spinWith,
|
||||
JSON_SPACES,
|
||||
RingBuffer
|
||||
};
|
||||
export {errAndQuit, log, spinWith, JSON_SPACES, RingBuffer};
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
|
||||
import betterAjvErrors from '@sidvind/better-ajv-errors';
|
||||
import { lilconfig } from 'lilconfig';
|
||||
import {lilconfig} from 'lilconfig';
|
||||
import _ from 'lodash';
|
||||
import yaml from 'yaml';
|
||||
import { getSchema, validate } from './schema/schema';
|
||||
import {getSchema, validate} from './schema/schema';
|
||||
|
||||
/**
|
||||
* lilconfig loader to handle `.yaml` files
|
||||
* @type {import('lilconfig').LoaderSync}
|
||||
*/
|
||||
function yamlLoader (filepath, content) {
|
||||
function yamlLoader(filepath, content) {
|
||||
return yaml.parse(content);
|
||||
}
|
||||
|
||||
@@ -26,7 +25,7 @@ const rawConfig = new Map();
|
||||
* If it weren't for this cache, this would be unnecessary.
|
||||
* @type {import('lilconfig').LoaderSync}
|
||||
*/
|
||||
function jsonLoader (filepath, content) {
|
||||
function jsonLoader(filepath, content) {
|
||||
rawConfig.set(filepath, content);
|
||||
return JSON.parse(content);
|
||||
}
|
||||
@@ -37,13 +36,15 @@ function jsonLoader (filepath, content) {
|
||||
* @param {string} filepath - Path to config file
|
||||
* @returns {Promise<import('lilconfig').LilconfigResult>}
|
||||
*/
|
||||
async function loadConfigFile (lc, filepath) {
|
||||
async function loadConfigFile(lc, filepath) {
|
||||
try {
|
||||
// removing "await" will cause any rejection to _not_ be caught in this block!
|
||||
return await lc.load(filepath);
|
||||
} catch (/** @type {unknown} */err) {
|
||||
if (/** @type {NodeJS.ErrnoException} */(err).code === 'ENOENT') {
|
||||
/** @type {NodeJS.ErrnoException} */(err).message = `Config file not found at user-provided path: ${filepath}`;
|
||||
} catch (/** @type {unknown} */ err) {
|
||||
if (/** @type {NodeJS.ErrnoException} */ (err).code === 'ENOENT') {
|
||||
/** @type {NodeJS.ErrnoException} */ (
|
||||
err
|
||||
).message = `Config file not found at user-provided path: ${filepath}`;
|
||||
throw err;
|
||||
} else if (err instanceof SyntaxError) {
|
||||
// generally invalid JSON
|
||||
@@ -59,7 +60,7 @@ async function loadConfigFile (lc, filepath) {
|
||||
* @param {LilconfigAsyncSearcher} lc - lilconfig instance
|
||||
* @returns {Promise<import('lilconfig').LilconfigResult>}
|
||||
*/
|
||||
async function searchConfigFile (lc) {
|
||||
async function searchConfigFile(lc) {
|
||||
return await lc.search();
|
||||
}
|
||||
|
||||
@@ -78,7 +79,7 @@ async function searchConfigFile (lc) {
|
||||
* @throws {TypeError} If `errors` is empty
|
||||
* @returns {string}
|
||||
*/
|
||||
export function formatErrors (errors = [], config = {}, opts = {}) {
|
||||
export function formatErrors(errors = [], config = {}, opts = {}) {
|
||||
if (errors && !errors.length) {
|
||||
throw new TypeError('Array of errors must be non-empty');
|
||||
}
|
||||
@@ -97,7 +98,7 @@ export function formatErrors (errors = [], config = {}, opts = {}) {
|
||||
* @public
|
||||
* @returns {Promise<ReadConfigFileResult>} Contains config and filepath, if found, and any errors
|
||||
*/
|
||||
export async function readConfigFile (filepath, opts = {}) {
|
||||
export async function readConfigFile(filepath, opts = {}) {
|
||||
const lc = lilconfig('appium', {
|
||||
loaders: {
|
||||
'.yaml': yamlLoader,
|
||||
@@ -105,12 +106,10 @@ export async function readConfigFile (filepath, opts = {}) {
|
||||
'.json': jsonLoader,
|
||||
noExt: jsonLoader,
|
||||
},
|
||||
packageProp: 'appiumConfig'
|
||||
packageProp: 'appiumConfig',
|
||||
});
|
||||
|
||||
const result = filepath
|
||||
? await loadConfigFile(lc, filepath)
|
||||
: await searchConfigFile(lc);
|
||||
const result = filepath ? await loadConfigFile(lc, filepath) : await searchConfigFile(lc);
|
||||
|
||||
if (result?.filepath && !result?.isEmpty) {
|
||||
const {pretty = true} = opts;
|
||||
@@ -124,15 +123,11 @@ export async function readConfigFile (filepath, opts = {}) {
|
||||
json: rawConfig.get(result.filepath),
|
||||
pretty,
|
||||
});
|
||||
configResult = reason
|
||||
? {...result, errors, reason}
|
||||
: {...result, errors};
|
||||
configResult = reason ? {...result, errors, reason} : {...result, errors};
|
||||
}
|
||||
|
||||
// normalize (to camel case) all top-level property names of the config file
|
||||
configResult.config = normalizeConfig(
|
||||
/** @type {AppiumConfig} */ (configResult.config),
|
||||
);
|
||||
configResult.config = normalizeConfig(/** @type {AppiumConfig} */ (configResult.config));
|
||||
|
||||
return configResult;
|
||||
} finally {
|
||||
@@ -148,7 +143,7 @@ export async function readConfigFile (filepath, opts = {}) {
|
||||
* @param {AppiumConfig} config - Configuration object
|
||||
* @returns {NormalizedAppiumConfig} New object with camel-cased keys (or `dest` keys).
|
||||
*/
|
||||
export function normalizeConfig (config) {
|
||||
export function normalizeConfig(config) {
|
||||
const schema = getSchema();
|
||||
/**
|
||||
* @param {AppiumConfig} config
|
||||
@@ -159,8 +154,9 @@ export function normalizeConfig (config) {
|
||||
const normalize = (config, section) => {
|
||||
const obj = _.isUndefined(section) ? config : _.get(config, section, config);
|
||||
|
||||
const mappedObj = _.mapKeys(obj, (__, prop) =>
|
||||
schema.properties[prop]?.appiumCliDest ?? _.camelCase(prop),
|
||||
const mappedObj = _.mapKeys(
|
||||
obj,
|
||||
(__, prop) => schema.properties[prop]?.appiumCliDest ?? _.camelCase(prop)
|
||||
);
|
||||
|
||||
return _.mapValues(mappedObj, (value, property) => {
|
||||
|
||||
@@ -25,9 +25,7 @@ const BUILD_INFO = {
|
||||
};
|
||||
|
||||
function getNodeVersion() {
|
||||
return /** @type {import('semver').SemVer} */ (
|
||||
semver.coerce(process.version)
|
||||
);
|
||||
return /** @type {import('semver').SemVer} */ (semver.coerce(process.version));
|
||||
}
|
||||
|
||||
async function updateBuildInfo(useGithubApiFallback = false) {
|
||||
@@ -96,13 +94,9 @@ async function getGitTimestamp(commitSha, useGithubApiFallback = false) {
|
||||
const gitRoot = await findGitRoot();
|
||||
if (gitRoot) {
|
||||
try {
|
||||
const {stdout} = await exec(
|
||||
GIT_BINARY,
|
||||
['show', '-s', '--format=%ci', commitSha],
|
||||
{
|
||||
cwd: gitRoot,
|
||||
}
|
||||
);
|
||||
const {stdout} = await exec(GIT_BINARY, ['show', '-s', '--format=%ci', commitSha], {
|
||||
cwd: gitRoot,
|
||||
});
|
||||
return stdout.trim();
|
||||
} catch (ign) {}
|
||||
}
|
||||
@@ -145,9 +139,7 @@ function getBuildInfo() {
|
||||
function checkNodeOk() {
|
||||
const version = getNodeVersion();
|
||||
if (!semver.satisfies(version, MIN_NODE_VERSION)) {
|
||||
logger.errorAndThrow(
|
||||
`Node version must be ${MIN_NODE_VERSION}. Currently ${version.version}`
|
||||
);
|
||||
logger.errorAndThrow(`Node version must be ${MIN_NODE_VERSION}. Currently ${version.version}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -206,8 +198,7 @@ function getNonDefaultServerArgs(parsedArgs) {
|
||||
const defaultValueIsArray = /** @param {string} dest */ (dest) =>
|
||||
_.isArray(defaultsFromSchema[dest]);
|
||||
|
||||
const argsValueIsArray = /** @param {string} dest */ (dest) =>
|
||||
_.isArray(args[dest].value);
|
||||
const argsValueIsArray = /** @param {string} dest */ (dest) => _.isArray(args[dest].value);
|
||||
|
||||
const arraysDiffer = /** @param {string} dest */ (dest) =>
|
||||
_.gt(_.size(_.difference(args[dest].value, defaultsFromSchema[dest])), 0);
|
||||
@@ -220,10 +211,7 @@ function getNonDefaultServerArgs(parsedArgs) {
|
||||
|
||||
// note that `_.overEvery` is like an "AND", and `_.overSome` is like an "OR"
|
||||
|
||||
const argValueNotArrayOrArraysDiffer = _.overSome([
|
||||
_.negate(argsValueIsArray),
|
||||
arraysDiffer,
|
||||
]);
|
||||
const argValueNotArrayOrArraysDiffer = _.overSome([_.negate(argsValueIsArray), arraysDiffer]);
|
||||
|
||||
const defaultValueNotArrayAndValuesDiffer = _.overEvery([
|
||||
_.negate(defaultValueIsArray),
|
||||
@@ -272,9 +260,7 @@ const compactConfig = _.partial(
|
||||
_.omitBy,
|
||||
_,
|
||||
(value, key) =>
|
||||
key === 'subcommand' ||
|
||||
_.isUndefined(value) ||
|
||||
(_.isObject(value) && _.isEmpty(value))
|
||||
key === 'subcommand' || _.isUndefined(value) || (_.isObject(value) && _.isEmpty(value))
|
||||
);
|
||||
|
||||
/**
|
||||
@@ -288,12 +274,7 @@ const compactConfig = _.partial(
|
||||
* @param {Partial<ParsedArgs>} defaults - Configuration defaults from schemas
|
||||
* @param {ParsedArgs} parsedArgs - Entire parsed args object
|
||||
*/
|
||||
function showConfig(
|
||||
nonDefaultPreConfigParsedArgs,
|
||||
configResult,
|
||||
defaults,
|
||||
parsedArgs
|
||||
) {
|
||||
function showConfig(nonDefaultPreConfigParsedArgs, configResult, defaults, parsedArgs) {
|
||||
console.log('Appium Configuration\n');
|
||||
console.log('from defaults:\n');
|
||||
console.dir(compactConfig(defaults));
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
import path from 'path';
|
||||
|
||||
/**
|
||||
@@ -30,7 +29,7 @@ export const KNOWN_PLUGINS = Object.freeze(
|
||||
images: '@appium/images-plugin',
|
||||
'execute-driver': '@appium/execute-driver-plugin',
|
||||
'relaxed-caps': '@appium/relaxed-caps-plugin',
|
||||
}),
|
||||
})
|
||||
);
|
||||
|
||||
// This is a map of driver names to npm packages representing those drivers.
|
||||
@@ -50,26 +49,18 @@ export const KNOWN_DRIVERS = Object.freeze(
|
||||
flutter: 'appium-flutter-driver',
|
||||
safari: 'appium-safari-driver',
|
||||
gecko: 'appium-geckodriver',
|
||||
}),
|
||||
})
|
||||
);
|
||||
|
||||
/**
|
||||
* Relative path to directory containing any Appium internal files
|
||||
*/
|
||||
export const CACHE_DIR_RELATIVE_PATH = path.join(
|
||||
'node_modules',
|
||||
'.cache',
|
||||
'appium',
|
||||
);
|
||||
export const CACHE_DIR_RELATIVE_PATH = path.join('node_modules', '.cache', 'appium');
|
||||
|
||||
/**
|
||||
* Relative path to hashfile (from `APPIUM_HOME`) of consuming project's `package.json` (if it exists)
|
||||
*/
|
||||
export const PKG_HASHFILE_RELATIVE_PATH = path.join(
|
||||
CACHE_DIR_RELATIVE_PATH,
|
||||
'package.hash',
|
||||
);
|
||||
|
||||
export const PKG_HASHFILE_RELATIVE_PATH = path.join(CACHE_DIR_RELATIVE_PATH, 'package.hash');
|
||||
|
||||
export const EXT_SUBCOMMAND_LIST = 'list';
|
||||
export const EXT_SUBCOMMAND_INSTALL = 'install';
|
||||
|
||||
@@ -192,8 +192,7 @@ export class DriverConfig extends ExtensionConfig {
|
||||
const drivers = this.installedExtensions;
|
||||
for (const [driverName, driverData] of _.toPairs(drivers)) {
|
||||
const {automationName, platformNames} = driverData;
|
||||
const aNameMatches =
|
||||
automationName.toLowerCase() === matchAutomationName.toLowerCase();
|
||||
const aNameMatches = automationName.toLowerCase() === matchAutomationName.toLowerCase();
|
||||
const pNameMatches = _.includes(
|
||||
platformNames.map(_.toLower),
|
||||
matchPlatformName.toLowerCase()
|
||||
|
||||
@@ -71,12 +71,8 @@ export class ExtensionConfig {
|
||||
* @param {ExtRecord<ExtType>} exts - Extension data
|
||||
*/
|
||||
validate(exts) {
|
||||
const foundProblems =
|
||||
/** @type {Record<ExtName<ExtType>,Problem[]>} */ ({});
|
||||
for (const [
|
||||
extName,
|
||||
extData,
|
||||
] of /** @type {[ExtName<ExtType>, ExtManifest<ExtType>][]} */ (
|
||||
const foundProblems = /** @type {Record<ExtName<ExtType>,Problem[]>} */ ({});
|
||||
for (const [extName, extData] of /** @type {[ExtName<ExtType>, ExtManifest<ExtType>][]} */ (
|
||||
_.toPairs(exts)
|
||||
)) {
|
||||
foundProblems[extName] = [
|
||||
@@ -94,13 +90,11 @@ export class ExtensionConfig {
|
||||
// remove this extension from the list since it's not valid
|
||||
delete exts[extName];
|
||||
problemSummaries.push(
|
||||
`${this.extensionType} ${extName} had errors and will not ` +
|
||||
`be available. Errors:`
|
||||
`${this.extensionType} ${extName} had errors and will not ` + `be available. Errors:`
|
||||
);
|
||||
for (const problem of problems) {
|
||||
problemSummaries.push(
|
||||
` - ${problem.err} (Actual value: ` +
|
||||
`${JSON.stringify(problem.val)})`
|
||||
` - ${problem.err} (Actual value: ` + `${JSON.stringify(problem.val)})`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -276,10 +270,7 @@ export class ExtensionConfig {
|
||||
}
|
||||
|
||||
log.info(`Available ${this.configKey}:`);
|
||||
for (const [
|
||||
extName,
|
||||
extData,
|
||||
] of /** @type {[string, ExtManifest<ExtType>][]} */ (
|
||||
for (const [extName, extData] of /** @type {[string, ExtManifest<ExtType>][]} */ (
|
||||
_.toPairs(this.installedExtensions)
|
||||
)) {
|
||||
log.info(` - ${this.extensionDesc(extName, extData)}`);
|
||||
@@ -303,11 +294,7 @@ export class ExtensionConfig {
|
||||
* @returns {string}
|
||||
*/
|
||||
getInstallPath(extName) {
|
||||
return path.join(
|
||||
this.appiumHome,
|
||||
'node_modules',
|
||||
this.installedExtensions[extName].pkgName
|
||||
);
|
||||
return path.join(this.appiumHome, 'node_modules', this.installedExtensions[extName].pkgName);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -355,18 +342,13 @@ export class ExtensionConfig {
|
||||
}
|
||||
let moduleObject;
|
||||
if (_.isString(argSchemaPath)) {
|
||||
const schemaPath = resolveFrom(
|
||||
appiumHome,
|
||||
path.join(pkgName, argSchemaPath)
|
||||
);
|
||||
const schemaPath = resolveFrom(appiumHome, path.join(pkgName, argSchemaPath));
|
||||
moduleObject = require(schemaPath);
|
||||
} else {
|
||||
moduleObject = argSchemaPath;
|
||||
}
|
||||
// this sucks. default exports should be destroyed
|
||||
const schema = moduleObject.__esModule
|
||||
? moduleObject.default
|
||||
: moduleObject;
|
||||
const schema = moduleObject.__esModule ? moduleObject.default : moduleObject;
|
||||
registerSchema(extType, extName, schema);
|
||||
return schema;
|
||||
}
|
||||
@@ -399,13 +381,7 @@ export class ExtensionConfig {
|
||||
}
|
||||
}
|
||||
|
||||
export {
|
||||
INSTALL_TYPE_NPM,
|
||||
INSTALL_TYPE_GIT,
|
||||
INSTALL_TYPE_LOCAL,
|
||||
INSTALL_TYPE_GITHUB,
|
||||
INSTALL_TYPES,
|
||||
};
|
||||
export {INSTALL_TYPE_NPM, INSTALL_TYPE_GIT, INSTALL_TYPE_LOCAL, INSTALL_TYPE_GITHUB, INSTALL_TYPES};
|
||||
|
||||
/**
|
||||
* Config problem
|
||||
|
||||
@@ -20,11 +20,9 @@ export async function loadExtensions(appiumHome) {
|
||||
const manifest = Manifest.getInstance(appiumHome);
|
||||
const {drivers, plugins} = await manifest.read();
|
||||
const driverConfig =
|
||||
DriverConfig.getInstance(manifest) ??
|
||||
DriverConfig.create(manifest, {extData: drivers});
|
||||
DriverConfig.getInstance(manifest) ?? DriverConfig.create(manifest, {extData: drivers});
|
||||
const pluginConfig =
|
||||
PluginConfig.getInstance(manifest) ??
|
||||
PluginConfig.create(manifest, {extData: plugins});
|
||||
PluginConfig.getInstance(manifest) ?? PluginConfig.create(manifest, {extData: plugins});
|
||||
return {driverConfig, pluginConfig};
|
||||
}
|
||||
|
||||
@@ -75,10 +73,7 @@ export function getActivePlugins(pluginConfig, usePlugins = []) {
|
||||
export function getActiveDrivers(driverConfig, useDrivers = []) {
|
||||
return _.compact(
|
||||
Object.keys(driverConfig.installedExtensions)
|
||||
.filter(
|
||||
(driverName) =>
|
||||
_.includes(useDrivers, driverName) || useDrivers.length === 0
|
||||
)
|
||||
.filter((driverName) => _.includes(useDrivers, driverName) || useDrivers.length === 0)
|
||||
.map((driverName) => {
|
||||
try {
|
||||
log.info(`Attempting to load driver ${driverName}...`);
|
||||
|
||||
@@ -182,20 +182,14 @@ export class Manifest {
|
||||
const walkOpts = _.defaults({depthLimit}, DEFAULT_FIND_EXTENSIONS_OPTS);
|
||||
// this could be parallelized, but we can't use fs.walk as an async iterator
|
||||
let didChange = false;
|
||||
for await (const {stats, path: filepath} of fs.walk(
|
||||
this._appiumHome,
|
||||
walkOpts
|
||||
)) {
|
||||
for await (const {stats, path: filepath} of fs.walk(this._appiumHome, walkOpts)) {
|
||||
if (filepath !== this._appiumHome && stats.isDirectory()) {
|
||||
try {
|
||||
const pkg = await env.readPackageInDir(filepath);
|
||||
if (pkg && isExtension(pkg)) {
|
||||
// it's possible that this extension already exists in the manifest,
|
||||
// so only update `didChange` if it's new.
|
||||
const added = this.addExtensionFromPackage(
|
||||
pkg,
|
||||
path.join(filepath, 'package.json')
|
||||
);
|
||||
const added = this.addExtensionFromPackage(pkg, path.join(filepath, 'package.json'));
|
||||
didChange = didChange || added;
|
||||
}
|
||||
} catch {}
|
||||
@@ -262,9 +256,7 @@ export class Manifest {
|
||||
return false;
|
||||
} else {
|
||||
throw new TypeError(
|
||||
`The extension in ${path.dirname(
|
||||
pkgPath
|
||||
)} is neither a valid driver nor a valid plugin.`
|
||||
`The extension in ${path.dirname(pkgPath)} is neither a valid driver nor a valid plugin.`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -419,11 +411,7 @@ export class Manifest {
|
||||
);
|
||||
}
|
||||
try {
|
||||
await fs.writeFile(
|
||||
this._manifestPath,
|
||||
YAML.stringify(this._data),
|
||||
'utf8'
|
||||
);
|
||||
await fs.writeFile(this._manifestPath, YAML.stringify(this._data), 'utf8');
|
||||
return true;
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
|
||||
import { fs } from '@appium/support';
|
||||
import { isPackageChanged } from 'package-changed';
|
||||
import {fs} from '@appium/support';
|
||||
import {isPackageChanged} from 'package-changed';
|
||||
import path from 'path';
|
||||
import { PKG_HASHFILE_RELATIVE_PATH } from '../constants';
|
||||
import {PKG_HASHFILE_RELATIVE_PATH} from '../constants';
|
||||
import log from '../logger';
|
||||
|
||||
/**
|
||||
@@ -14,7 +13,7 @@ import log from '../logger';
|
||||
* @param {string} appiumHome
|
||||
* @returns {Promise<boolean>} `true` if `package.json` `appiumHome` changed
|
||||
*/
|
||||
export async function packageDidChange (appiumHome) {
|
||||
export async function packageDidChange(appiumHome) {
|
||||
const hashFilename = path.join(appiumHome, PKG_HASHFILE_RELATIVE_PATH);
|
||||
|
||||
// XXX: the types in `package-changed` seem to be wrong.
|
||||
@@ -35,7 +34,7 @@ export async function packageDidChange (appiumHome) {
|
||||
await fs.mkdirp(hashFilenameDir);
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`Appium could not create the directory for hash file: ${hashFilenameDir}. Original error: ${err.message}`,
|
||||
`Appium could not create the directory for hash file: ${hashFilenameDir}. Original error: ${err.message}`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -51,10 +50,12 @@ export async function packageDidChange (appiumHome) {
|
||||
if (isChanged) {
|
||||
try {
|
||||
writeHash();
|
||||
log.debug(`Updated hash of ${appiumHome}/package.json from: ${oldHash ?? '(none)'} to: ${hash}`);
|
||||
log.debug(
|
||||
`Updated hash of ${appiumHome}/package.json from: ${oldHash ?? '(none)'} to: ${hash}`
|
||||
);
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`Appium could not write hash file: ${hashFilenameDir}. Original error: ${err.message}`,
|
||||
`Appium could not write hash file: ${hashFilenameDir}. Original error: ${err.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,9 +89,7 @@ export class PluginConfig extends ExtensionConfig {
|
||||
}
|
||||
|
||||
log.info(`Available plugins:`);
|
||||
for (const [pluginName, pluginData] of _.toPairs(
|
||||
this.installedExtensions
|
||||
)) {
|
||||
for (const [pluginName, pluginData] of _.toPairs(this.installedExtensions)) {
|
||||
const activeTxt = _.includes(activeNames, pluginName) ? ' (ACTIVE)' : '';
|
||||
log.info(` - ${this.extensionDesc(pluginName, pluginData)}${activeTxt}`);
|
||||
}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import axios from 'axios';
|
||||
import { fs } from '@appium/support';
|
||||
import {fs} from '@appium/support';
|
||||
import logger from './logger';
|
||||
import _ from 'lodash';
|
||||
|
||||
|
||||
const hubUri = (config) => {
|
||||
const protocol = config.hubProtocol || 'http';
|
||||
return `${protocol}://${config.hubHost}:${config.hubPort}`;
|
||||
@@ -16,20 +15,24 @@ const hubUri = (config) => {
|
||||
* @param {number} [port] - Bind to this port
|
||||
* @param {string} [basePath] - Base path for the grid
|
||||
*/
|
||||
async function registerNode (data, addr, port, basePath) {
|
||||
async function registerNode(data, addr, port, basePath) {
|
||||
let configFilePath;
|
||||
if (_.isString(data)) {
|
||||
configFilePath = data;
|
||||
try {
|
||||
data = await fs.readFile(data, 'utf-8');
|
||||
} catch (err) {
|
||||
logger.error(`Unable to load node configuration file ${configFilePath} to register with grid: ${err.message}`);
|
||||
logger.error(
|
||||
`Unable to load node configuration file ${configFilePath} to register with grid: ${err.message}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
data = JSON.parse(data);
|
||||
} catch (err) {
|
||||
logger.errorAndThrow(`Syntax error in node configuration file ${configFilePath}: ${err.message}`);
|
||||
logger.errorAndThrow(
|
||||
`Syntax error in node configuration file ${configFilePath}: ${err.message}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -37,20 +40,21 @@ async function registerNode (data, addr, port, basePath) {
|
||||
postRequest(data, addr, port, basePath);
|
||||
}
|
||||
|
||||
async function registerToGrid (postOptions, configHolder) {
|
||||
async function registerToGrid(postOptions, configHolder) {
|
||||
try {
|
||||
const {status} = await axios(postOptions);
|
||||
if (status !== 200) {
|
||||
throw new Error(`Request failed with code ${status}`);
|
||||
}
|
||||
logger.debug(`Appium successfully registered with the the grid on ` +
|
||||
hubUri(configHolder.configuration));
|
||||
logger.debug(
|
||||
`Appium successfully registered with the the grid on ` + hubUri(configHolder.configuration)
|
||||
);
|
||||
} catch (err) {
|
||||
logger.error(`An attempt to register with the grid was unsuccessful: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function postRequest (configHolder, addr, port, basePath) {
|
||||
function postRequest(configHolder, addr, port, basePath) {
|
||||
// Move Selenium 3 configuration properties to configuration object
|
||||
if (!_.has(configHolder, 'configuration')) {
|
||||
let configuration = {};
|
||||
@@ -68,7 +72,11 @@ function postRequest (configHolder, addr, port, basePath) {
|
||||
// otherwise, we will take whatever the user setup
|
||||
// because we will always set localhost/127.0.0.1. this won't work if your
|
||||
// node and grid aren't in the same place
|
||||
if (!configHolder.configuration.url || !configHolder.configuration.host || !configHolder.configuration.port) {
|
||||
if (
|
||||
!configHolder.configuration.url ||
|
||||
!configHolder.configuration.host ||
|
||||
!configHolder.configuration.port
|
||||
) {
|
||||
configHolder.configuration.url = `http://${addr}:${port}${basePath}`;
|
||||
configHolder.configuration.host = addr;
|
||||
configHolder.configuration.port = port;
|
||||
@@ -92,26 +100,30 @@ function postRequest (configHolder, addr, port, basePath) {
|
||||
|
||||
const registerCycleInterval = configHolder.configuration.registerCycle;
|
||||
if (isNaN(registerCycleInterval) || registerCycleInterval <= 0) {
|
||||
logger.warn(`'registerCycle' is not a valid positive number. ` +
|
||||
`No registration request will be sent to the grid.`);
|
||||
logger.warn(
|
||||
`'registerCycle' is not a valid positive number. ` +
|
||||
`No registration request will be sent to the grid.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
// initiate a new Thread
|
||||
let first = true;
|
||||
logger.debug(`Starting auto register thread for the grid. ` +
|
||||
`Will try to register every ${registerCycleInterval} ms.`);
|
||||
setInterval(async function registerRetry () {
|
||||
logger.debug(
|
||||
`Starting auto register thread for the grid. ` +
|
||||
`Will try to register every ${registerCycleInterval} ms.`
|
||||
);
|
||||
setInterval(async function registerRetry() {
|
||||
if (first) {
|
||||
first = false;
|
||||
await registerToGrid(regRequest, configHolder);
|
||||
} else if (!await isAlreadyRegistered(configHolder)) {
|
||||
} else if (!(await isAlreadyRegistered(configHolder))) {
|
||||
// make the http POST to the grid for registration
|
||||
await registerToGrid(regRequest, configHolder);
|
||||
}
|
||||
}, registerCycleInterval);
|
||||
}
|
||||
|
||||
async function isAlreadyRegistered (configHolder) {
|
||||
async function isAlreadyRegistered(configHolder) {
|
||||
//check if node is already registered
|
||||
const id = configHolder.configuration.id;
|
||||
try {
|
||||
@@ -132,5 +144,4 @@ async function isAlreadyRegistered (configHolder) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export default registerNode;
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { logger } from '@appium/support';
|
||||
|
||||
import {logger} from '@appium/support';
|
||||
|
||||
let log = logger.getLogger('Appium');
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import npmlog from 'npmlog';
|
||||
import { createLogger, format, transports } from 'winston';
|
||||
import { fs, logger } from '@appium/support';
|
||||
import {createLogger, format, transports} from 'winston';
|
||||
import {fs, logger} from '@appium/support';
|
||||
import _ from 'lodash';
|
||||
|
||||
|
||||
// set up distributed logging before everything else
|
||||
logger.patchLogger(npmlog);
|
||||
global._global_npmlog = npmlog;
|
||||
@@ -39,16 +38,13 @@ let useLocalTimeZone = false;
|
||||
|
||||
// add the timestamp in the correct format to the log info object
|
||||
const timestampFormat = format.timestamp({
|
||||
format () {
|
||||
format() {
|
||||
let date = new Date();
|
||||
if (useLocalTimeZone) {
|
||||
date = new Date(date.valueOf() - date.getTimezoneOffset() * 60000);
|
||||
}
|
||||
// '2012-11-04T14:51:06.157Z' -> '2012-11-04 14:51:06:157'
|
||||
return date.toISOString()
|
||||
.replace(/[TZ]/g, ' ')
|
||||
.replace(/\./g, ':')
|
||||
.trim();
|
||||
return date.toISOString().replace(/[TZ]/g, ' ').replace(/\./g, ':').trim();
|
||||
},
|
||||
});
|
||||
|
||||
@@ -58,14 +54,14 @@ const colorizeFormat = format.colorize({
|
||||
});
|
||||
|
||||
// Strip the color marking within messages
|
||||
const stripColorFormat = format(function stripColor (info) {
|
||||
const stripColorFormat = format(function stripColor(info) {
|
||||
const code = /\u001b\[(\d+(;\d+)*)?m/g; // eslint-disable-line no-control-regex
|
||||
info.message = info.message.replace(code, '');
|
||||
return info;
|
||||
})();
|
||||
|
||||
function createConsoleTransport (args, logLvl) {
|
||||
return new (transports.Console)({
|
||||
function createConsoleTransport(args, logLvl) {
|
||||
return new transports.Console({
|
||||
// `name` is unsupported per winston's type declarations
|
||||
// @ts-expect-error
|
||||
name: 'console',
|
||||
@@ -75,7 +71,7 @@ function createConsoleTransport (args, logLvl) {
|
||||
level: logLvl,
|
||||
stderrLevels: ['error'],
|
||||
format: format.combine(
|
||||
format(function adjustDebug (info) {
|
||||
format(function adjustDebug(info) {
|
||||
// prepend debug marker, and shift to `info` log level
|
||||
if (info.level === 'debug') {
|
||||
info.level = 'info';
|
||||
@@ -85,15 +81,15 @@ function createConsoleTransport (args, logLvl) {
|
||||
})(),
|
||||
timestampFormat,
|
||||
args.logNoColors ? stripColorFormat : colorizeFormat,
|
||||
format.printf(function printInfo (info) {
|
||||
format.printf(function printInfo(info) {
|
||||
return `${args.logTimestamp ? `${info.timestamp} - ` : ''}${info.message}`;
|
||||
})
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
function createFileTransport (args, logLvl) {
|
||||
return new (transports.File)({
|
||||
function createFileTransport(args, logLvl) {
|
||||
return new transports.File({
|
||||
// @ts-expect-error
|
||||
name: 'file',
|
||||
filename: args.logFile,
|
||||
@@ -105,14 +101,14 @@ function createFileTransport (args, logLvl) {
|
||||
format: format.combine(
|
||||
stripColorFormat,
|
||||
timestampFormat,
|
||||
format.printf(function printInfo (info) {
|
||||
format.printf(function printInfo(info) {
|
||||
return `${info.timestamp} ${info.message}`;
|
||||
})
|
||||
)
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
function createHttpTransport (args, logLvl) {
|
||||
function createHttpTransport(args, logLvl) {
|
||||
let host = '127.0.0.1';
|
||||
let port = 9003;
|
||||
|
||||
@@ -122,7 +118,7 @@ function createHttpTransport (args, logLvl) {
|
||||
port = parseInt(hostAndPort[1], 10);
|
||||
}
|
||||
|
||||
return new (transports.Http)({
|
||||
return new transports.Http({
|
||||
// @ts-expect-error
|
||||
name: 'http',
|
||||
host,
|
||||
@@ -134,14 +130,14 @@ function createHttpTransport (args, logLvl) {
|
||||
level: logLvl,
|
||||
format: format.combine(
|
||||
stripColorFormat,
|
||||
format.printf(function printInfo (info) {
|
||||
format.printf(function printInfo(info) {
|
||||
return `${info.timestamp} ${info.message}`;
|
||||
})
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
async function createTransports (args) {
|
||||
async function createTransports(args) {
|
||||
let transports = [];
|
||||
let consoleLogLevel = null;
|
||||
let fileLogLevel = null;
|
||||
@@ -169,8 +165,9 @@ async function createTransports (args) {
|
||||
transports.push(createFileTransport(args, fileLogLevel));
|
||||
} catch (e) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Tried to attach logging to file '${args.logFile}' but an error ` +
|
||||
`occurred: ${e.message}`);
|
||||
console.log(
|
||||
`Tried to attach logging to file '${args.logFile}' but an error ` + `occurred: ${e.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -179,15 +176,17 @@ async function createTransports (args) {
|
||||
transports.push(createHttpTransport(args, fileLogLevel));
|
||||
} catch (e) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Tried to attach logging to Http at ${args.webhook} but ` +
|
||||
`an error occurred: ${e.message}`);
|
||||
console.log(
|
||||
`Tried to attach logging to Http at ${args.webhook} but ` +
|
||||
`an error occurred: ${e.message}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return transports;
|
||||
}
|
||||
|
||||
async function init (args) {
|
||||
async function init(args) {
|
||||
// set de facto param passed to timestamp function
|
||||
useLocalTimeZone = args.localTimezone;
|
||||
|
||||
@@ -211,11 +210,10 @@ async function init (args) {
|
||||
if (args.logHandler && _.isFunction(args.logHandler)) {
|
||||
args.logHandler(logObj.level, msg);
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function clear () {
|
||||
function clear() {
|
||||
if (log) {
|
||||
for (let transport of _.keys(log.transports)) {
|
||||
log.remove(transport);
|
||||
@@ -224,6 +222,5 @@ function clear () {
|
||||
npmlog.removeAllListeners('log');
|
||||
}
|
||||
|
||||
|
||||
export { init, clear };
|
||||
export {init, clear};
|
||||
export default init;
|
||||
|
||||
@@ -3,10 +3,7 @@
|
||||
import {init as logsinkInit} from './logsink'; // this import needs to come first since it sets up global npmlog
|
||||
import logger from './logger'; // logger needs to remain second
|
||||
// @ts-ignore
|
||||
import {
|
||||
routeConfiguringFunction as makeRouter,
|
||||
server as baseServer,
|
||||
} from '@appium/base-driver';
|
||||
import {routeConfiguringFunction as makeRouter, server as baseServer} from '@appium/base-driver';
|
||||
import {logger as logFactory, util, env} from '@appium/support';
|
||||
import {asyncify} from 'asyncbox';
|
||||
import _ from 'lodash';
|
||||
@@ -115,8 +112,7 @@ async function logStartupInfo(args) {
|
||||
* @returns {void}
|
||||
*/
|
||||
function logServerPort(address, port) {
|
||||
let logMessage =
|
||||
`Appium REST http interface listener started on ` + `${address}:${port}`;
|
||||
let logMessage = `Appium REST http interface listener started on ` + `${address}:${port}`;
|
||||
logger.info(logMessage);
|
||||
}
|
||||
|
||||
@@ -140,7 +136,7 @@ function getExtraMethodMap(driverClasses, pluginClasses) {
|
||||
return [...driverClasses, ...pluginClasses].reduce(
|
||||
(map, klass) => ({
|
||||
...map,
|
||||
.../** @type {DriverClass} */ ((klass).newMethodMap ?? {}),
|
||||
.../** @type {DriverClass} */ (klass.newMethodMap ?? {}),
|
||||
}),
|
||||
{}
|
||||
);
|
||||
@@ -228,29 +224,19 @@ async function init(args) {
|
||||
const defaults = getDefaultsForSchema(false);
|
||||
|
||||
/** @type {ParsedArgs} */
|
||||
const serverArgs = _.defaultsDeep(
|
||||
preConfigArgs,
|
||||
configResult.config?.server,
|
||||
defaults
|
||||
);
|
||||
const serverArgs = _.defaultsDeep(preConfigArgs, configResult.config?.server, defaults);
|
||||
|
||||
if (preConfigArgs.showConfig) {
|
||||
showConfig(
|
||||
getNonDefaultServerArgs(preConfigArgs),
|
||||
configResult,
|
||||
defaults,
|
||||
serverArgs
|
||||
);
|
||||
showConfig(getNonDefaultServerArgs(preConfigArgs), configResult, defaults, serverArgs);
|
||||
return {};
|
||||
}
|
||||
|
||||
await logsinkInit(serverArgs);
|
||||
|
||||
if (serverArgs.logFilters) {
|
||||
const {issues, rules} =
|
||||
await logFactory.loadSecureValuesPreprocessingRules(
|
||||
serverArgs.logFilters
|
||||
);
|
||||
const {issues, rules} = await logFactory.loadSecureValuesPreprocessingRules(
|
||||
serverArgs.logFilters
|
||||
);
|
||||
if (!_.isEmpty(issues)) {
|
||||
throw new Error(
|
||||
`The log filtering rules config '${serverArgs.logFilters}' has issues: ` +
|
||||
@@ -263,11 +249,9 @@ async function init(args) {
|
||||
);
|
||||
} else {
|
||||
logger.info(
|
||||
`Loaded ${util.pluralize(
|
||||
'filtering rule',
|
||||
rules.length,
|
||||
true
|
||||
)} from '${serverArgs.logFilters}'`
|
||||
`Loaded ${util.pluralize('filtering rule', rules.length, true)} from '${
|
||||
serverArgs.logFilters
|
||||
}'`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -294,8 +278,9 @@ async function init(args) {
|
||||
* @returns {Promise<import('@appium/types').AppiumServer|undefined>}
|
||||
*/
|
||||
async function main(args) {
|
||||
const {appiumDriver, parsedArgs, pluginConfig, driverConfig} =
|
||||
/** @type {ServerInitResult} */ (await init(args));
|
||||
const {appiumDriver, parsedArgs, pluginConfig, driverConfig} = /** @type {ServerInitResult} */ (
|
||||
await init(args)
|
||||
);
|
||||
|
||||
if (!appiumDriver || !parsedArgs || !pluginConfig || !driverConfig) {
|
||||
// if this branch is taken, we've run a different subcommand, so there's nothing
|
||||
|
||||
@@ -96,7 +96,7 @@ export class ArgSpec {
|
||||
* @param {string} name
|
||||
* @param {ArgSpecOptions<D>} [opts]
|
||||
*/
|
||||
constructor (name, {extType, extName, dest, defaultValue} = {}) {
|
||||
constructor(name, {extType, extName, dest, defaultValue} = {}) {
|
||||
// we must normalize the extension name to fit into our convention for CLI
|
||||
// args.
|
||||
const arg = ArgSpec.toArg(name, extType, extName);
|
||||
@@ -107,8 +107,7 @@ export class ArgSpec {
|
||||
// to use bracket syntax when accessing props on the parsed args object.
|
||||
const rawDest = _.camelCase(dest ?? name);
|
||||
|
||||
const destKeypath =
|
||||
extType && extName ? [extType, extName, rawDest].join('.') : rawDest;
|
||||
const destKeypath = extType && extName ? [extType, extName, rawDest].join('.') : rawDest;
|
||||
|
||||
this.defaultValue = defaultValue;
|
||||
this.name = name;
|
||||
@@ -129,7 +128,7 @@ export class ArgSpec {
|
||||
* @param {string} [extName] - Extension name
|
||||
* @returns {string} Schema ID
|
||||
*/
|
||||
static toSchemaRef (name, extType, extName) {
|
||||
static toSchemaRef(name, extType, extName) {
|
||||
const baseRef = ArgSpec.toSchemaBaseRef(extType, extName);
|
||||
if (extType && extName) {
|
||||
return [`${baseRef}#`, PROPERTIES, name].join('/');
|
||||
@@ -142,7 +141,7 @@ export class ArgSpec {
|
||||
* @param {ExtensionType} [extType] - Extension type
|
||||
* @param {string} [extName] - Extension name
|
||||
*/
|
||||
static toSchemaBaseRef (extType, extName) {
|
||||
static toSchemaBaseRef(extType, extName) {
|
||||
if (extType && extName) {
|
||||
return `${extType}-${ArgSpec.toNormalizedExtName(extName)}.json`;
|
||||
}
|
||||
@@ -156,7 +155,7 @@ export class ArgSpec {
|
||||
* @param {string} [extName] - Extension name
|
||||
* @returns {string} Unique ID
|
||||
*/
|
||||
static toArg (name, extType, extName) {
|
||||
static toArg(name, extType, extName) {
|
||||
const properName = _.kebabCase(name.replace(/^--?/, ''));
|
||||
if (extType && extName) {
|
||||
return [extType, _.kebabCase(extName), properName].join('-');
|
||||
@@ -169,7 +168,7 @@ export class ArgSpec {
|
||||
* @param {string} extName - Extension name
|
||||
* @returns {string} Normalized extension name
|
||||
*/
|
||||
static toNormalizedExtName (extName) {
|
||||
static toNormalizedExtName(extName) {
|
||||
return _.kebabCase(extName);
|
||||
}
|
||||
|
||||
@@ -178,13 +177,11 @@ export class ArgSpec {
|
||||
* @param {string} schemaId - Root schema ID
|
||||
* @returns { {extType?: ExtensionType, normalizedExtName?: string} }
|
||||
*/
|
||||
static extensionInfoFromRootSchemaId (schemaId) {
|
||||
static extensionInfoFromRootSchemaId(schemaId) {
|
||||
const matches = schemaId.match(SCHEMA_ID_REGEXP);
|
||||
if (matches?.groups) {
|
||||
const {extType, normalizedExtName} =
|
||||
/** @type { {extType: ExtensionType, normalizedExtName: string} } */ (
|
||||
matches.groups
|
||||
);
|
||||
/** @type { {extType: ExtensionType, normalizedExtName: string} } */ (matches.groups);
|
||||
return {extType, normalizedExtName};
|
||||
}
|
||||
return {};
|
||||
@@ -199,7 +196,7 @@ export class ArgSpec {
|
||||
* @param {ArgSpecOptions<D>} [opts] - Options
|
||||
* @returns {Readonly<ArgSpec>}
|
||||
*/
|
||||
static create (name, opts) {
|
||||
static create(name, opts) {
|
||||
return Object.freeze(new ArgSpec(name, opts));
|
||||
}
|
||||
|
||||
@@ -208,7 +205,7 @@ export class ArgSpec {
|
||||
* @returns {string}
|
||||
*/
|
||||
/* istanbul ignore next */
|
||||
toString () {
|
||||
toString() {
|
||||
let str = `[ArgSpec] ${this.name} (${this.ref})`;
|
||||
if (this.extType && this.extName) {
|
||||
str += ` (ext: ${this.extType}/${this.extName})`;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
import {ArgumentTypeError} from 'argparse';
|
||||
import _ from 'lodash';
|
||||
import {formatErrors as formatErrors} from '../config-file';
|
||||
@@ -36,7 +35,7 @@ const SHORT_ARG_CUTOFF = 3;
|
||||
* @param {string} [alias] - the alias to convert to a flag
|
||||
* @returns {string} the flag
|
||||
*/
|
||||
function aliasToFlag (argSpec, alias) {
|
||||
function aliasToFlag(argSpec, alias) {
|
||||
const {extType, extName, name} = argSpec;
|
||||
const arg = alias ?? name;
|
||||
const isShort = arg.length < SHORT_ARG_CUTOFF;
|
||||
@@ -64,7 +63,7 @@ const screamingSnakeCase = _.flow(_.snakeCase, _.toUpper);
|
||||
* constructor options
|
||||
* @returns
|
||||
*/
|
||||
function getSchemaValidator ({ref: schemaId}, coerce = _.identity) {
|
||||
function getSchemaValidator({ref: schemaId}, coerce = _.identity) {
|
||||
/** @param {string} value */
|
||||
return (value) => {
|
||||
const coerced = coerce(value);
|
||||
@@ -72,9 +71,7 @@ function getSchemaValidator ({ref: schemaId}, coerce = _.identity) {
|
||||
if (_.isEmpty(errors)) {
|
||||
return coerced;
|
||||
}
|
||||
throw new ArgumentTypeError(
|
||||
'\n\n' + formatErrors(errors, value, {schemaId}),
|
||||
);
|
||||
throw new ArgumentTypeError('\n\n' + formatErrors(errors, value, {schemaId}));
|
||||
};
|
||||
}
|
||||
|
||||
@@ -83,7 +80,7 @@ function getSchemaValidator ({ref: schemaId}, coerce = _.identity) {
|
||||
* @param {AppiumJSONSchema} schema
|
||||
* @returns {string}
|
||||
*/
|
||||
function makeDescription (schema) {
|
||||
function makeDescription(schema) {
|
||||
const {appiumCliDescription, description = '', appiumDeprecated} = schema;
|
||||
let desc = appiumCliDescription ?? description;
|
||||
if (appiumDeprecated) {
|
||||
@@ -99,27 +96,20 @@ function makeDescription (schema) {
|
||||
* @param {ArgSpec} argSpec - Argument spec tuple
|
||||
* @returns {[string[], import('argparse').ArgumentOptions]} Tuple of flag and options
|
||||
*/
|
||||
function subSchemaToArgDef (subSchema, argSpec) {
|
||||
let {
|
||||
type,
|
||||
appiumCliAliases,
|
||||
appiumCliTransformer,
|
||||
enum: enumValues,
|
||||
} = subSchema;
|
||||
function subSchemaToArgDef(subSchema, argSpec) {
|
||||
let {type, appiumCliAliases, appiumCliTransformer, enum: enumValues} = subSchema;
|
||||
|
||||
const {name, arg} = argSpec;
|
||||
|
||||
const aliases = [
|
||||
aliasToFlag(argSpec),
|
||||
.../** @type {string[]} */ (appiumCliAliases ?? []).map((alias) =>
|
||||
aliasToFlag(argSpec, alias),
|
||||
),
|
||||
.../** @type {string[]} */ (appiumCliAliases ?? []).map((alias) => aliasToFlag(argSpec, alias)),
|
||||
];
|
||||
|
||||
/** @type {import('argparse').ArgumentOptions} */
|
||||
let argOpts = {
|
||||
required: false,
|
||||
help: makeDescription(subSchema)
|
||||
help: makeDescription(subSchema),
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -182,9 +172,7 @@ function subSchemaToArgDef (subSchema, argSpec) {
|
||||
case TYPENAMES.NULL:
|
||||
// falls through
|
||||
default: {
|
||||
throw new TypeError(
|
||||
`Schema property "${arg}": \`${type}\` type unknown or disallowed`,
|
||||
);
|
||||
throw new TypeError(`Schema property "${arg}": \`${type}\` type unknown or disallowed`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -198,15 +186,8 @@ function subSchemaToArgDef (subSchema, argSpec) {
|
||||
// by ajv during schema validation in `finalizeSchema()`. the `array` &
|
||||
// `object` types have already added a formatter (see above, so we don't do it
|
||||
// twice).
|
||||
if (
|
||||
type !== TYPENAMES.ARRAY &&
|
||||
type !== TYPENAMES.OBJECT &&
|
||||
appiumCliTransformer
|
||||
) {
|
||||
argTypeFunction = _.flow(
|
||||
argTypeFunction ?? _.identity,
|
||||
transformers[appiumCliTransformer],
|
||||
);
|
||||
if (type !== TYPENAMES.ARRAY && type !== TYPENAMES.OBJECT && appiumCliTransformer) {
|
||||
argTypeFunction = _.flow(argTypeFunction ?? _.identity, transformers[appiumCliTransformer]);
|
||||
}
|
||||
|
||||
if (argTypeFunction) {
|
||||
@@ -221,7 +202,7 @@ function subSchemaToArgDef (subSchema, argSpec) {
|
||||
argOpts.choices = enumValues.map(String);
|
||||
} else {
|
||||
throw new TypeError(
|
||||
`Problem with schema for ${arg}; \`enum\` is only supported for \`type: 'string'\``,
|
||||
`Problem with schema for ${arg}; \`enum\` is only supported for \`type: 'string'\``
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -237,13 +218,9 @@ function subSchemaToArgDef (subSchema, argSpec) {
|
||||
* @returns {import('../cli/args').ArgumentDefinitions} A map of arryas of
|
||||
* aliases to `argparse` arguments; empty if no schema found
|
||||
*/
|
||||
export function toParserArgs () {
|
||||
export function toParserArgs() {
|
||||
const flattened = flattenSchema().filter(({schema}) => !schema.appiumCliIgnored);
|
||||
return new Map(
|
||||
_.map(flattened, ({schema, argSpec}) =>
|
||||
subSchemaToArgDef(schema, argSpec),
|
||||
),
|
||||
);
|
||||
return new Map(_.map(flattened, ({schema, argSpec}) => subSchemaToArgDef(schema, argSpec)));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
|
||||
import { ArgumentTypeError } from 'argparse';
|
||||
import { readFileSync } from 'fs';
|
||||
import {ArgumentTypeError} from 'argparse';
|
||||
import {readFileSync} from 'fs';
|
||||
import _ from 'lodash';
|
||||
|
||||
/**
|
||||
@@ -17,7 +16,7 @@ import _ from 'lodash';
|
||||
* @param {string} value
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function parseCsvLine (value) {
|
||||
function parseCsvLine(value) {
|
||||
return value
|
||||
.split(',')
|
||||
.map((v) => v.trim())
|
||||
@@ -29,7 +28,7 @@ function parseCsvLine (value) {
|
||||
* @param {string} value
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function parseCsvFile (value) {
|
||||
function parseCsvFile(value) {
|
||||
return value
|
||||
.split(/\r?\n/)
|
||||
.map((v) => v.trim())
|
||||
@@ -67,18 +66,14 @@ export const transformers = {
|
||||
body = readFileSync(value, 'utf8');
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw new ArgumentTypeError(
|
||||
`Could not read file ${body}: ${err.message}`,
|
||||
);
|
||||
throw new ArgumentTypeError(`Could not read file ${body}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return body ? parseCsvFile(body) : parseCsvLine(value);
|
||||
} catch (err) {
|
||||
throw new ArgumentTypeError(
|
||||
'Must be a comma-delimited string, e.g., "foo,bar,baz"',
|
||||
);
|
||||
throw new ArgumentTypeError('Must be a comma-delimited string, e.g., "foo,bar,baz"');
|
||||
}
|
||||
},
|
||||
|
||||
@@ -107,9 +102,7 @@ export const transformers = {
|
||||
try {
|
||||
const result = JSON.parse(json);
|
||||
if (!_.isPlainObject(result)) {
|
||||
throw new Error(
|
||||
`'${_.truncate(result, {length: 100})}' is not an object`,
|
||||
);
|
||||
throw new Error(`'${_.truncate(result, {length: 100})}' is not an object`);
|
||||
}
|
||||
return result;
|
||||
} catch (e) {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
|
||||
import { transformers } from './cli-transformers';
|
||||
import {transformers} from './cli-transformers';
|
||||
|
||||
/**
|
||||
* Collection of keyword definitions to add to the singleton `Ajv` instance.
|
||||
@@ -27,7 +26,8 @@ export const keywords = {
|
||||
},
|
||||
minItems: 1,
|
||||
uniqueItems: true,
|
||||
description: 'List of aliases for the argument. Aliases shorter than three (3) characters will be prefixed with a single dash; otherwise two (2).'
|
||||
description:
|
||||
'List of aliases for the argument. Aliases shorter than three (3) characters will be prefixed with a single dash; otherwise two (2).',
|
||||
},
|
||||
},
|
||||
/**
|
||||
@@ -45,7 +45,7 @@ export const keywords = {
|
||||
metaSchema: {
|
||||
type: 'string',
|
||||
minLength: 1,
|
||||
description: 'Name of the associated property in the parsed CLI arguments object'
|
||||
description: 'Name of the associated property in the parsed CLI arguments object',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -63,7 +63,7 @@ export const keywords = {
|
||||
metaSchema: {
|
||||
type: 'string',
|
||||
minLength: 1,
|
||||
description: 'Description to provide in the --help text of the CLI. Overrides `description`'
|
||||
description: 'Description to provide in the --help text of the CLI. Overrides `description`',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -77,7 +77,8 @@ export const keywords = {
|
||||
metaSchema: {
|
||||
type: 'string',
|
||||
enum: Object.keys(transformers),
|
||||
description: 'The name of a custom transformer to run against the value as provided via the CLI.'
|
||||
description:
|
||||
'The name of a custom transformer to run against the value as provided via the CLI.',
|
||||
},
|
||||
},
|
||||
|
||||
@@ -89,9 +90,10 @@ export const keywords = {
|
||||
keyword: 'appiumCliIgnored',
|
||||
metaSchema: {
|
||||
type: 'boolean',
|
||||
description: 'If `true`, Appium will not provide this property as a CLI argument. This is NOT the same as a "hidden" argument.',
|
||||
enum: [true]
|
||||
}
|
||||
description:
|
||||
'If `true`, Appium will not provide this property as a CLI argument. This is NOT the same as a "hidden" argument.',
|
||||
enum: [true],
|
||||
},
|
||||
},
|
||||
|
||||
/**
|
||||
@@ -104,9 +106,10 @@ export const keywords = {
|
||||
type: 'boolean',
|
||||
description: 'If `true`, this property will be displayed as "deprecated" to the user',
|
||||
enum: [true],
|
||||
$comment: 'JSON schema draft-2019-09 keyword `deprecated` serves the same purpose. This keyword should itself be deprecated if we move to draft-2019-09!'
|
||||
}
|
||||
}
|
||||
$comment:
|
||||
'JSON schema draft-2019-09 keyword `deprecated` serves the same purpose. This keyword should itself be deprecated if we move to draft-2019-09!',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -128,7 +131,6 @@ export const keywords = {
|
||||
* @property {boolean} [appiumDeprecated]
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* @typedef {import('ajv').KeywordDefinition} KeywordDefinition
|
||||
*/
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
|
||||
import Ajv from 'ajv';
|
||||
import addFormats from 'ajv-formats';
|
||||
import _ from 'lodash';
|
||||
import path from 'path';
|
||||
import { DRIVER_TYPE, PLUGIN_TYPE } from '../constants';
|
||||
import { AppiumConfigJsonSchema } from '@appium/schema';
|
||||
import { APPIUM_CONFIG_SCHEMA_ID, ArgSpec, SERVER_PROP_NAME } from './arg-spec';
|
||||
import { keywords } from './keywords';
|
||||
import {DRIVER_TYPE, PLUGIN_TYPE} from '../constants';
|
||||
import {AppiumConfigJsonSchema} from '@appium/schema';
|
||||
import {APPIUM_CONFIG_SCHEMA_ID, ArgSpec, SERVER_PROP_NAME} from './arg-spec';
|
||||
import {keywords} from './keywords';
|
||||
|
||||
/**
|
||||
* Key/value pairs go in... but they don't come out.
|
||||
@@ -19,7 +18,7 @@ export class RoachHotelMap extends Map {
|
||||
* @param {K} key
|
||||
* @param {V} value
|
||||
*/
|
||||
set (key, value) {
|
||||
set(key, value) {
|
||||
if (this.has(key)) {
|
||||
throw new Error(`${key} is already set`);
|
||||
}
|
||||
@@ -30,11 +29,11 @@ export class RoachHotelMap extends Map {
|
||||
* @param {K} key
|
||||
*/
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
delete (key) {
|
||||
delete(key) {
|
||||
return false;
|
||||
}
|
||||
|
||||
clear () {
|
||||
clear() {
|
||||
throw new Error(`Cannot clear RoachHotelMap`);
|
||||
}
|
||||
}
|
||||
@@ -100,7 +99,7 @@ class AppiumSchema {
|
||||
* @see https://npm.im/ajv-formats
|
||||
* @private
|
||||
*/
|
||||
constructor () {
|
||||
constructor() {
|
||||
this._ajv = AppiumSchema._instantiateAjv();
|
||||
}
|
||||
|
||||
@@ -111,7 +110,7 @@ class AppiumSchema {
|
||||
* Binds public methods to the instance.
|
||||
* @returns {AppiumSchema}
|
||||
*/
|
||||
static create () {
|
||||
static create() {
|
||||
if (!AppiumSchema._instance) {
|
||||
const instance = new AppiumSchema();
|
||||
AppiumSchema._instance = instance;
|
||||
@@ -143,7 +142,7 @@ class AppiumSchema {
|
||||
* @param {string} extName - Name
|
||||
* @returns {boolean} If registered
|
||||
*/
|
||||
hasRegisteredSchema (extType, extName) {
|
||||
hasRegisteredSchema(extType, extName) {
|
||||
return this._registeredSchemas[extType].has(extName);
|
||||
}
|
||||
|
||||
@@ -152,11 +151,11 @@ class AppiumSchema {
|
||||
* successfully and {@link AppiumSchema.reset reset} has not been called since.
|
||||
* @returns {boolean} If finalized
|
||||
*/
|
||||
isFinalized () {
|
||||
isFinalized() {
|
||||
return Boolean(this._finalizedSchemas);
|
||||
}
|
||||
|
||||
getAllArgSpecs () {
|
||||
getAllArgSpecs() {
|
||||
return this._argSpecs;
|
||||
}
|
||||
|
||||
@@ -179,11 +178,9 @@ class AppiumSchema {
|
||||
* @throws {Error} If the schema is not valid
|
||||
* @returns {Readonly<Record<string,StrictSchemaObject>>} Record of schema IDs to full schema objects
|
||||
*/
|
||||
finalize () {
|
||||
finalize() {
|
||||
if (this.isFinalized()) {
|
||||
return /** @type {NonNullable<typeof this._finalizedSchemas>} */ (
|
||||
this._finalizedSchemas
|
||||
);
|
||||
return /** @type {NonNullable<typeof this._finalizedSchemas>} */ (this._finalizedSchemas);
|
||||
}
|
||||
|
||||
const ajv = this._ajv;
|
||||
@@ -210,12 +207,7 @@ class AppiumSchema {
|
||||
}
|
||||
};
|
||||
|
||||
addArgSpecs(
|
||||
_.omit(baseSchema.properties.server.properties, [
|
||||
DRIVER_TYPE,
|
||||
PLUGIN_TYPE,
|
||||
]),
|
||||
);
|
||||
addArgSpecs(_.omit(baseSchema.properties.server.properties, [DRIVER_TYPE, PLUGIN_TYPE]));
|
||||
|
||||
/**
|
||||
* @type {Record<string,StrictSchemaObject>}
|
||||
@@ -234,8 +226,10 @@ class AppiumSchema {
|
||||
const $ref = ArgSpec.toSchemaBaseRef(extType, extName);
|
||||
schema.$id = $ref;
|
||||
schema.additionalProperties = false; // this makes `schema` become a `StrictSchemaObject`
|
||||
baseSchema.properties.server.properties[extType].properties[extName] =
|
||||
{$ref, $comment: extName};
|
||||
baseSchema.properties.server.properties[extType].properties[extName] = {
|
||||
$ref,
|
||||
$comment: extName,
|
||||
};
|
||||
ajv.validateSchema(schema, true);
|
||||
addArgSpecs(schema.properties, extType, extName);
|
||||
ajv.addSchema(schema, $ref);
|
||||
@@ -243,7 +237,7 @@ class AppiumSchema {
|
||||
});
|
||||
return baseSchema;
|
||||
},
|
||||
baseSchema,
|
||||
baseSchema
|
||||
);
|
||||
|
||||
ajv.addSchema(finalSchema, APPIUM_CONFIG_SCHEMA_ID);
|
||||
@@ -259,12 +253,12 @@ class AppiumSchema {
|
||||
* @private
|
||||
* @returns {Ajv}
|
||||
*/
|
||||
static _instantiateAjv () {
|
||||
static _instantiateAjv() {
|
||||
const ajv = addFormats(
|
||||
new Ajv({
|
||||
// without this not much validation actually happens
|
||||
allErrors: true,
|
||||
}),
|
||||
})
|
||||
);
|
||||
|
||||
// add custom keywords to ajv. see schema-keywords.js
|
||||
@@ -286,7 +280,7 @@ class AppiumSchema {
|
||||
* If you need to call {@link AppiumSchema.finalize} again, you'll want to call this first.
|
||||
* @returns {void}
|
||||
*/
|
||||
reset () {
|
||||
reset() {
|
||||
for (const schemaId of Object.keys(this._finalizedSchemas ?? {})) {
|
||||
this._ajv.removeSchema(schemaId);
|
||||
}
|
||||
@@ -313,11 +307,9 @@ class AppiumSchema {
|
||||
* @throws {SchemaNameConflictError} If the schema is an invalid
|
||||
* @returns {void}
|
||||
*/
|
||||
registerSchema (extType, extName, schema) {
|
||||
registerSchema(extType, extName, schema) {
|
||||
if (!(extType && extName) || _.isUndefined(schema)) {
|
||||
throw new TypeError(
|
||||
'Expected extension type, extension name, and a defined schema',
|
||||
);
|
||||
throw new TypeError('Expected extension type, extension name, and a defined schema');
|
||||
}
|
||||
if (!AppiumSchema.isSupportedSchemaType(schema)) {
|
||||
throw new SchemaUnsupportedSchemaError(schema, extType, extName);
|
||||
@@ -341,7 +333,7 @@ class AppiumSchema {
|
||||
* @param {string} [extName] - Extension name
|
||||
* @returns {ArgSpec|undefined} ArgSpec or `undefined` if not found
|
||||
*/
|
||||
getArgSpec (name, extType, extName) {
|
||||
getArgSpec(name, extType, extName) {
|
||||
return this._argSpecs.get(ArgSpec.toArg(name, extType, extName));
|
||||
}
|
||||
|
||||
@@ -352,7 +344,7 @@ class AppiumSchema {
|
||||
* @param {string} [extName] - Extension name
|
||||
* @returns {boolean} `true` if such an {@link ArgSpec} exists
|
||||
*/
|
||||
hasArgSpec (name, extType, extName) {
|
||||
hasArgSpec(name, extType, extName) {
|
||||
return this._argSpecs.has(ArgSpec.toArg(name, extType, extName));
|
||||
}
|
||||
|
||||
@@ -368,7 +360,7 @@ class AppiumSchema {
|
||||
* properties. Base arguments (server arguments) are always at the top level.
|
||||
* @returns {DefaultValues<Flattened>}
|
||||
*/
|
||||
getDefaults (flatten = /** @type {Flattened} */ (true)) {
|
||||
getDefaults(flatten = /** @type {Flattened} */ (true)) {
|
||||
if (!this.isFinalized()) {
|
||||
throw new SchemaFinalizationError();
|
||||
}
|
||||
@@ -383,17 +375,17 @@ class AppiumSchema {
|
||||
/** @type {DefaultReducer} */
|
||||
const reducer = flatten
|
||||
? (defaults, {defaultValue, dest}) => {
|
||||
if (!_.isUndefined(defaultValue)) {
|
||||
defaults[dest] = defaultValue;
|
||||
if (!_.isUndefined(defaultValue)) {
|
||||
defaults[dest] = defaultValue;
|
||||
}
|
||||
return defaults;
|
||||
}
|
||||
return defaults;
|
||||
}
|
||||
: (defaults, {defaultValue, dest}) => {
|
||||
if (!_.isUndefined(defaultValue)) {
|
||||
_.set(defaults, dest, defaultValue);
|
||||
}
|
||||
return defaults;
|
||||
};
|
||||
if (!_.isUndefined(defaultValue)) {
|
||||
_.set(defaults, dest, defaultValue);
|
||||
}
|
||||
return defaults;
|
||||
};
|
||||
|
||||
/** @type {DefaultValues<Flattened>} */
|
||||
const retval = {};
|
||||
@@ -407,12 +399,12 @@ class AppiumSchema {
|
||||
* @param {string} extName - Extension name
|
||||
* @returns {Record<string,ArgSpecDefaultValue>}
|
||||
*/
|
||||
getDefaultsForExtension (extType, extName) {
|
||||
getDefaultsForExtension(extType, extName) {
|
||||
if (!this.isFinalized()) {
|
||||
throw new SchemaFinalizationError();
|
||||
}
|
||||
const specs = [...this._argSpecs.values()].filter(
|
||||
(spec) => spec.extType === extType && spec.extName === extName,
|
||||
(spec) => spec.extType === extType && spec.extName === extName
|
||||
);
|
||||
return specs.reduce((defaults, {defaultValue, rawDest}) => {
|
||||
if (!_.isUndefined(defaultValue)) {
|
||||
@@ -436,7 +428,7 @@ class AppiumSchema {
|
||||
* @throws If {@link AppiumSchema.finalize} has not been called yet.
|
||||
* @returns {FlattenedSchema}
|
||||
*/
|
||||
flatten () {
|
||||
flatten() {
|
||||
const schema = this.getSchema();
|
||||
|
||||
/** @type { {properties: SchemaObject, prefix: string[]}[] } */
|
||||
@@ -463,12 +455,11 @@ class AppiumSchema {
|
||||
// this can happen if an extension schema supplies a $ref to a non-existent schema
|
||||
throw new SchemaUnknownSchemaError($ref);
|
||||
}
|
||||
const {normalizedExtName} =
|
||||
ArgSpec.extensionInfoFromRootSchemaId($ref);
|
||||
const {normalizedExtName} = ArgSpec.extensionInfoFromRootSchemaId($ref);
|
||||
if (!normalizedExtName) {
|
||||
/* istanbul ignore next */
|
||||
throw new ReferenceError(
|
||||
`Could not determine extension name from schema ID ${$ref}. This is a bug.`,
|
||||
`Could not determine extension name from schema ID ${$ref}. This is a bug.`
|
||||
);
|
||||
}
|
||||
stack.push({
|
||||
@@ -477,15 +468,11 @@ class AppiumSchema {
|
||||
});
|
||||
} else if (key !== DRIVER_TYPE && key !== PLUGIN_TYPE) {
|
||||
const [extType, extName] = prefix;
|
||||
const argSpec = this.getArgSpec(
|
||||
key,
|
||||
/** @type {ExtensionType} */ (extType),
|
||||
extName,
|
||||
);
|
||||
const argSpec = this.getArgSpec(key, /** @type {ExtensionType} */ (extType), extName);
|
||||
if (!argSpec) {
|
||||
/* istanbul ignore next */
|
||||
throw new ReferenceError(
|
||||
`Unknown argument with key ${key}, extType ${extType} and extName ${extName}. This is a bug.`,
|
||||
`Unknown argument with key ${key}, extType ${extType} and extName ${extName}. This is a bug.`
|
||||
);
|
||||
}
|
||||
flattened.push({schema: _.cloneDeep(value), argSpec});
|
||||
@@ -503,7 +490,7 @@ class AppiumSchema {
|
||||
* @throws If the schema has not yet been finalized
|
||||
* @returns {SchemaObject}
|
||||
*/
|
||||
getSchema (ref = APPIUM_CONFIG_SCHEMA_ID) {
|
||||
getSchema(ref = APPIUM_CONFIG_SCHEMA_ID) {
|
||||
return /** @type {SchemaObject} */ (this._getValidator(ref).schema);
|
||||
}
|
||||
|
||||
@@ -513,7 +500,7 @@ class AppiumSchema {
|
||||
* @private
|
||||
* @returns {import('ajv').ValidateFunction}
|
||||
*/
|
||||
_getValidator (id = APPIUM_CONFIG_SCHEMA_ID) {
|
||||
_getValidator(id = APPIUM_CONFIG_SCHEMA_ID) {
|
||||
const validator = this._ajv.getSchema(id);
|
||||
if (!validator) {
|
||||
if (id === APPIUM_CONFIG_SCHEMA_ID) {
|
||||
@@ -533,11 +520,9 @@ class AppiumSchema {
|
||||
* @public
|
||||
* @returns {import('ajv').ErrorObject[]} Array of errors, if any.
|
||||
*/
|
||||
validate (value, ref = APPIUM_CONFIG_SCHEMA_ID) {
|
||||
validate(value, ref = APPIUM_CONFIG_SCHEMA_ID) {
|
||||
const validator = this._getValidator(ref);
|
||||
return !validator(value) && _.isArray(validator.errors)
|
||||
? [...validator.errors]
|
||||
: [];
|
||||
return !validator(value) && _.isArray(validator.errors) ? [...validator.errors] : [];
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -545,7 +530,7 @@ class AppiumSchema {
|
||||
* @param {string} filename
|
||||
* @returns {boolean}
|
||||
*/
|
||||
static isAllowedSchemaFileExtension (filename) {
|
||||
static isAllowedSchemaFileExtension(filename) {
|
||||
return ALLOWED_SCHEMA_EXTENSIONS.has(path.extname(filename));
|
||||
}
|
||||
|
||||
@@ -554,7 +539,7 @@ class AppiumSchema {
|
||||
* @param {any} schema - Schema to check
|
||||
* @returns {schema is SchemaObject}
|
||||
*/
|
||||
static isSupportedSchemaType (schema) {
|
||||
static isSupportedSchemaType(schema) {
|
||||
return _.isPlainObject(schema) && schema.$async !== true;
|
||||
}
|
||||
}
|
||||
@@ -569,7 +554,7 @@ export class SchemaFinalizationError extends Error {
|
||||
*/
|
||||
code = 'APPIUMERR_SCHEMA_FINALIZATION';
|
||||
|
||||
constructor () {
|
||||
constructor() {
|
||||
super('Schema not yet finalized; `finalize()` must be called first.');
|
||||
}
|
||||
}
|
||||
@@ -594,10 +579,8 @@ export class SchemaNameConflictError extends Error {
|
||||
* @param {ExtensionType} extType
|
||||
* @param {string} extName
|
||||
*/
|
||||
constructor (extType, extName) {
|
||||
super(
|
||||
`Name for ${extType} schema "${extName}" conflicts with an existing schema`,
|
||||
);
|
||||
constructor(extType, extName) {
|
||||
super(`Name for ${extType} schema "${extName}" conflicts with an existing schema`);
|
||||
this.data = {extType, extName};
|
||||
}
|
||||
}
|
||||
@@ -619,7 +602,7 @@ export class SchemaUnknownSchemaError extends ReferenceError {
|
||||
/**
|
||||
* @param {string} schemaId
|
||||
*/
|
||||
constructor (schemaId) {
|
||||
constructor(schemaId) {
|
||||
super(`Unknown schema: "${schemaId}"`);
|
||||
this.data = {schemaId};
|
||||
}
|
||||
@@ -647,7 +630,7 @@ export class SchemaUnsupportedSchemaError extends TypeError {
|
||||
* @param {ExtensionType} extType
|
||||
* @param {string} extName
|
||||
*/
|
||||
constructor (schema, extType, extName) {
|
||||
constructor(schema, extType, extName) {
|
||||
// https://github.com/Microsoft/TypeScript/issues/8277
|
||||
super(
|
||||
(() => {
|
||||
@@ -662,12 +645,12 @@ export class SchemaUnsupportedSchemaError extends TypeError {
|
||||
/* istanbul ignore next */
|
||||
throw new TypeError(
|
||||
`schema IS supported; this error should not be thrown (this is a bug). value of schema: ${JSON.stringify(
|
||||
schema,
|
||||
)}`,
|
||||
schema
|
||||
)}`
|
||||
);
|
||||
}
|
||||
return `${msg} schema must be a plain object without a true "$async" property`;
|
||||
})(),
|
||||
})()
|
||||
);
|
||||
this.data = {schema, extType, extName};
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
|
||||
import _ from 'lodash';
|
||||
import logger from './logger';
|
||||
import { processCapabilities, PROTOCOLS } from '@appium/base-driver';
|
||||
import { inspect as dump } from 'util';
|
||||
import {processCapabilities, PROTOCOLS} from '@appium/base-driver';
|
||||
import {inspect as dump} from 'util';
|
||||
|
||||
const W3C_APPIUM_PREFIX = 'appium';
|
||||
|
||||
@@ -22,12 +21,13 @@ const isStdoutTTY = process.stdout.isTTY;
|
||||
*/
|
||||
const inspect = _.flow(
|
||||
_.partialRight(
|
||||
/** @type {(object: any, options: import('util').InspectOptions) => string} */(dump),
|
||||
/** @type {(object: any, options: import('util').InspectOptions) => string} */ (dump),
|
||||
{colors: true, depth: null, compact: !isStdoutTTY}
|
||||
),
|
||||
(...args) => {
|
||||
logger.info(...args);
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
/**
|
||||
* Takes the caps that were provided in the request and translates them
|
||||
@@ -39,19 +39,25 @@ const inspect = _.flow(
|
||||
* @param {import('@appium/types').DefaultCapabilitiesConfig} [defaultCapabilities]
|
||||
* @returns {ParsedDriverCaps|InvalidCaps}
|
||||
*/
|
||||
function parseCapsForInnerDriver (jsonwpCapabilities, w3cCapabilities, constraints = {}, defaultCapabilities = {}) {
|
||||
function parseCapsForInnerDriver(
|
||||
jsonwpCapabilities,
|
||||
w3cCapabilities,
|
||||
constraints = {},
|
||||
defaultCapabilities = {}
|
||||
) {
|
||||
// Check if the caller sent JSONWP caps, W3C caps, or both
|
||||
const hasW3CCaps = _.isPlainObject(w3cCapabilities) &&
|
||||
const hasW3CCaps =
|
||||
_.isPlainObject(w3cCapabilities) &&
|
||||
(_.has(w3cCapabilities, 'alwaysMatch') || _.has(w3cCapabilities, 'firstMatch'));
|
||||
const hasJSONWPCaps = _.isPlainObject(jsonwpCapabilities);
|
||||
let desiredCaps = /** @type {ParsedDriverCaps['desiredCaps']} */({});
|
||||
let desiredCaps = /** @type {ParsedDriverCaps['desiredCaps']} */ ({});
|
||||
/** @type {ParsedDriverCaps['processedW3CCapabilities']} */
|
||||
let processedW3CCapabilities;
|
||||
/** @type {ParsedDriverCaps['processedJsonwpCapabilities']} */
|
||||
let processedJsonwpCapabilities;
|
||||
|
||||
if (!hasW3CCaps) {
|
||||
return /** @type {InvalidCaps} */({
|
||||
return /** @type {InvalidCaps} */ ({
|
||||
protocol: PROTOCOLS.W3C,
|
||||
error: new Error('W3C capabilities should be provided'),
|
||||
});
|
||||
@@ -70,16 +76,23 @@ function parseCapsForInnerDriver (jsonwpCapabilities, w3cCapabilities, constrain
|
||||
for (const [defaultCapKey, defaultCapValue] of _.toPairs(defaultCapabilities)) {
|
||||
let isCapAlreadySet = false;
|
||||
// Check if the key is already present in firstMatch entries
|
||||
for (const firstMatchEntry of (w3cCapabilities.firstMatch || [])) {
|
||||
if (_.isPlainObject(firstMatchEntry)
|
||||
&& _.has(removeAppiumPrefixes(firstMatchEntry), removeAppiumPrefix(defaultCapKey))) {
|
||||
for (const firstMatchEntry of w3cCapabilities.firstMatch || []) {
|
||||
if (
|
||||
_.isPlainObject(firstMatchEntry) &&
|
||||
_.has(removeAppiumPrefixes(firstMatchEntry), removeAppiumPrefix(defaultCapKey))
|
||||
) {
|
||||
isCapAlreadySet = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Check if the key is already present in alwaysMatch entries
|
||||
isCapAlreadySet = isCapAlreadySet || (_.isPlainObject(w3cCapabilities.alwaysMatch)
|
||||
&& _.has(removeAppiumPrefixes(w3cCapabilities.alwaysMatch), removeAppiumPrefix(defaultCapKey)));
|
||||
isCapAlreadySet =
|
||||
isCapAlreadySet ||
|
||||
(_.isPlainObject(w3cCapabilities.alwaysMatch) &&
|
||||
_.has(
|
||||
removeAppiumPrefixes(w3cCapabilities.alwaysMatch),
|
||||
removeAppiumPrefix(defaultCapKey)
|
||||
));
|
||||
if (isCapAlreadySet) {
|
||||
// Skip if the key is already present in the provided caps
|
||||
continue;
|
||||
@@ -94,7 +107,10 @@ function parseCapsForInnerDriver (jsonwpCapabilities, w3cCapabilities, constrain
|
||||
}
|
||||
}
|
||||
if (hasJSONWPCaps) {
|
||||
jsonwpCapabilities = {...removeAppiumPrefixes(defaultCapabilities), ...jsonwpCapabilities};
|
||||
jsonwpCapabilities = {
|
||||
...removeAppiumPrefixes(defaultCapabilities),
|
||||
...jsonwpCapabilities,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -111,7 +127,7 @@ function parseCapsForInnerDriver (jsonwpCapabilities, w3cCapabilities, constrain
|
||||
desiredCaps = processCapabilities(w3cCapabilities, constraints, true);
|
||||
} catch (error) {
|
||||
logger.info(`Could not parse W3C capabilities: ${error.message}`);
|
||||
return /** @type {InvalidCaps} */({
|
||||
return /** @type {InvalidCaps} */ ({
|
||||
desiredCaps,
|
||||
processedJsonwpCapabilities,
|
||||
processedW3CCapabilities,
|
||||
@@ -127,7 +143,12 @@ function parseCapsForInnerDriver (jsonwpCapabilities, w3cCapabilities, constrain
|
||||
};
|
||||
}
|
||||
|
||||
return /** @type {ParsedDriverCaps} */({desiredCaps, processedJsonwpCapabilities, processedW3CCapabilities, protocol});
|
||||
return /** @type {ParsedDriverCaps} */ ({
|
||||
desiredCaps,
|
||||
processedJsonwpCapabilities,
|
||||
processedW3CCapabilities,
|
||||
protocol,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -135,7 +156,7 @@ function parseCapsForInnerDriver (jsonwpCapabilities, w3cCapabilities, constrain
|
||||
* @param {Capabilities} caps Desired capabilities object
|
||||
* @returns {AppiumW3CCapabilities}
|
||||
*/
|
||||
function insertAppiumPrefixes (caps) {
|
||||
function insertAppiumPrefixes(caps) {
|
||||
// Standard, non-prefixed capabilities (see https://www.w3.org/TR/webdriver/#dfn-table-of-standard-capabilities)
|
||||
const STANDARD_CAPS = [
|
||||
'browserName',
|
||||
@@ -146,7 +167,7 @@ function insertAppiumPrefixes (caps) {
|
||||
'proxy',
|
||||
'setWindowRect',
|
||||
'timeouts',
|
||||
'unhandledPromptBehavior'
|
||||
'unhandledPromptBehavior',
|
||||
];
|
||||
|
||||
let prefixedCaps = {};
|
||||
@@ -165,7 +186,7 @@ function insertAppiumPrefixes (caps) {
|
||||
* @param {AppiumW3CCapabilities} caps
|
||||
* @returns {Capabilities}
|
||||
*/
|
||||
function removeAppiumPrefixes (caps) {
|
||||
function removeAppiumPrefixes(caps) {
|
||||
if (!_.isPlainObject(caps)) {
|
||||
return caps;
|
||||
}
|
||||
@@ -178,12 +199,12 @@ function removeAppiumPrefixes (caps) {
|
||||
return fixedCaps;
|
||||
}
|
||||
|
||||
function removeAppiumPrefix (key) {
|
||||
function removeAppiumPrefix(key) {
|
||||
const prefix = `${W3C_APPIUM_PREFIX}:`;
|
||||
return _.startsWith(key, prefix) ? key.substring(prefix.length) : key;
|
||||
}
|
||||
|
||||
function getPackageVersion (pkgName) {
|
||||
function getPackageVersion(pkgName) {
|
||||
const pkgInfo = require(`${pkgName}/package.json`) || {};
|
||||
return pkgInfo.version;
|
||||
}
|
||||
@@ -204,7 +225,7 @@ function getPackageVersion (pkgName) {
|
||||
* setting items or a dictionary containing parsed Appium setting names along with
|
||||
* their values.
|
||||
*/
|
||||
function pullSettings (caps) {
|
||||
function pullSettings(caps) {
|
||||
if (!_.isPlainObject(caps) || _.isEmpty(caps)) {
|
||||
return {};
|
||||
}
|
||||
@@ -223,8 +244,12 @@ function pullSettings (caps) {
|
||||
}
|
||||
|
||||
export {
|
||||
inspect, parseCapsForInnerDriver, insertAppiumPrefixes,
|
||||
getPackageVersion, pullSettings, removeAppiumPrefixes
|
||||
inspect,
|
||||
parseCapsForInnerDriver,
|
||||
insertAppiumPrefixes,
|
||||
getPackageVersion,
|
||||
pullSettings,
|
||||
removeAppiumPrefixes,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
const { remote } = require('webdriverio')
|
||||
const {remote} = require('webdriverio');
|
||||
|
||||
const capabilities = {
|
||||
'platformName': 'Android',
|
||||
platformName: 'Android',
|
||||
'appium:automationName': 'UiAutomator2',
|
||||
'appium:deviceName': 'Android',
|
||||
'appium:appPackage': 'com.android.settings',
|
||||
'appium:appActivity': '.Settings',
|
||||
}
|
||||
};
|
||||
|
||||
const wdOpts = {
|
||||
host: process.env.APPIUM_HOST || 'localhost',
|
||||
port: parseInt(process.env.APPIUM_PORT, 10) || 4723,
|
||||
logLevel: 'info',
|
||||
capabilities,
|
||||
}
|
||||
};
|
||||
|
||||
async function runTest () {
|
||||
const driver = await remote(wdOpts)
|
||||
async function runTest() {
|
||||
const driver = await remote(wdOpts);
|
||||
try {
|
||||
const batteryItem = await driver.$('//*[@text="Battery"]')
|
||||
await batteryItem.click()
|
||||
const batteryItem = await driver.$('//*[@text="Battery"]');
|
||||
await batteryItem.click();
|
||||
} finally {
|
||||
await driver.pause(1000)
|
||||
await driver.deleteSession()
|
||||
await driver.pause(1000);
|
||||
await driver.deleteSession();
|
||||
}
|
||||
}
|
||||
|
||||
runTest().catch(console.error)
|
||||
runTest().catch(console.error);
|
||||
|
||||
@@ -4,7 +4,12 @@ const path = require('path');
|
||||
const childProcess = require('child_process');
|
||||
const _ = require('lodash');
|
||||
|
||||
const res = JSON.parse(childProcess.execSync('npm pack --dry-run --json --ignore-scripts', {cwd: path.join(__dirname, '..'), encoding: 'utf8'}))[0];
|
||||
const res = JSON.parse(
|
||||
childProcess.execSync('npm pack --dry-run --json --ignore-scripts', {
|
||||
cwd: path.join(__dirname, '..'),
|
||||
encoding: 'utf8',
|
||||
})
|
||||
)[0];
|
||||
|
||||
// List of files we are testing to make sure they are included in package
|
||||
const testFiles = [
|
||||
@@ -16,8 +21,10 @@ const testFiles = [
|
||||
const missingFiles = _.without(testFiles, ..._.map(res.files, 'path'));
|
||||
|
||||
if (!_.isEmpty(missingFiles)) {
|
||||
throw new Error(`Files [${missingFiles.join(', ')}] are not included in package.json "files". ` +
|
||||
`Please make sure these files are included before publishing.`);
|
||||
throw new Error(
|
||||
`Files [${missingFiles.join(', ')}] are not included in package.json "files". ` +
|
||||
`Please make sure these files are included before publishing.`
|
||||
);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
|
||||
@@ -4,21 +4,21 @@
|
||||
|
||||
const path = require('path');
|
||||
const yaml = require('yaml');
|
||||
const { fs, util } = require('@appium/support');
|
||||
const {fs, util} = require('@appium/support');
|
||||
const validate = require('validate.js');
|
||||
const Handlebars = require('handlebars');
|
||||
const _ = require('lodash');
|
||||
const { asyncify } = require('asyncbox');
|
||||
const {asyncify} = require('asyncbox');
|
||||
const url = require('url');
|
||||
const log = require('fancy-log');
|
||||
|
||||
validate.validators.array = function array (value, options, key, attributes) {
|
||||
validate.validators.array = function array(value, options, key, attributes) {
|
||||
if (attributes[key] && !validate.isArray(attributes[key])) {
|
||||
return `must be an array`;
|
||||
}
|
||||
};
|
||||
|
||||
validate.validators.hasAttributes = function hasAttributes (value, options) {
|
||||
validate.validators.hasAttributes = function hasAttributes(value, options) {
|
||||
if (!value) {
|
||||
return;
|
||||
}
|
||||
@@ -36,7 +36,7 @@ validate.validators.hasAttributes = function hasAttributes (value, options) {
|
||||
}
|
||||
};
|
||||
|
||||
validate.validators.hasPossibleAttributes = function hasPossibleAttributes (value, options) {
|
||||
validate.validators.hasPossibleAttributes = function hasPossibleAttributes(value, options) {
|
||||
if (!value) {
|
||||
return;
|
||||
}
|
||||
@@ -62,30 +62,40 @@ const CLIENT_URL_TYPES = {
|
||||
};
|
||||
|
||||
const validator = {
|
||||
'name': {presence: true},
|
||||
'short_description': {presence: true},
|
||||
'example_usage': {},
|
||||
name: {presence: true},
|
||||
short_description: {presence: true},
|
||||
example_usage: {},
|
||||
'example_usage.java': {},
|
||||
'example_usage.javascript_wdio': {},
|
||||
'example_usage.javascript_wd': {},
|
||||
'example_usage.ruby': {},
|
||||
'example_usage.ruby_core': {},
|
||||
'example_usage.csharp': {},
|
||||
'description': {},
|
||||
description: {},
|
||||
'client_docs.java': {hasPossibleAttributes: _.keys(CLIENT_URL_TYPES)},
|
||||
'client_docs.javascript_wdio': {hasPossibleAttributes: _.keys(CLIENT_URL_TYPES)},
|
||||
'client_docs.javascript_wd': {hasPossibleAttributes: _.keys(CLIENT_URL_TYPES)},
|
||||
'client_docs.javascript_wdio': {
|
||||
hasPossibleAttributes: _.keys(CLIENT_URL_TYPES),
|
||||
},
|
||||
'client_docs.javascript_wd': {
|
||||
hasPossibleAttributes: _.keys(CLIENT_URL_TYPES),
|
||||
},
|
||||
'client_docs.ruby': {hasPossibleAttributes: _.keys(CLIENT_URL_TYPES)},
|
||||
'client_docs.ruby_core': {hasPossibleAttributes: _.keys(CLIENT_URL_TYPES)},
|
||||
'client_docs.csharp': {hasPossibleAttributes: _.keys(CLIENT_URL_TYPES)},
|
||||
'endpoint': {presence: true},
|
||||
'driver_support': {presence: true},
|
||||
endpoint: {presence: true},
|
||||
driver_support: {presence: true},
|
||||
'endpoint.url': {presence: true},
|
||||
'endpoint.url_parameters': {array: true, hasAttributes: ['name', 'description']},
|
||||
'endpoint.json_parameters': {array: true, hasAttributes: ['name', 'description']},
|
||||
'endpoint.response': {hasAttributes: ['type', 'description'] },
|
||||
'specifications': {presence: true},
|
||||
'links': {array: true, hasAttributes: ['name', 'url']},
|
||||
'endpoint.url_parameters': {
|
||||
array: true,
|
||||
hasAttributes: ['name', 'description'],
|
||||
},
|
||||
'endpoint.json_parameters': {
|
||||
array: true,
|
||||
hasAttributes: ['name', 'description'],
|
||||
},
|
||||
'endpoint.response': {hasAttributes: ['type', 'description']},
|
||||
specifications: {presence: true},
|
||||
links: {array: true, hasAttributes: ['name', 'url']},
|
||||
};
|
||||
|
||||
// What range of platforms do the driver's support
|
||||
@@ -111,7 +121,7 @@ const appiumRanges = {
|
||||
const rootFolder = path.join(__dirname, '..', '..', '..');
|
||||
|
||||
// Create Handlebars helper that shows a version range
|
||||
Handlebars.registerHelper('versions', function versionHelper (object, name, driverName) {
|
||||
Handlebars.registerHelper('versions', function versionHelper(object, name, driverName) {
|
||||
if (!object) {
|
||||
return 'None';
|
||||
}
|
||||
@@ -153,7 +163,7 @@ Handlebars.registerHelper('versions', function versionHelper (object, name, driv
|
||||
Handlebars.registerHelper('hyphenate', (str) => str.replace('_', '-'));
|
||||
Handlebars.registerHelper('uppercase', (str) => str.toUpperCase());
|
||||
|
||||
Handlebars.registerHelper('capitalize', function capitalizeDriver (driverName) {
|
||||
Handlebars.registerHelper('capitalize', function capitalizeDriver(driverName) {
|
||||
switch (driverName.toLowerCase()) {
|
||||
case 'xcuitest':
|
||||
return 'XCUITest';
|
||||
@@ -166,11 +176,13 @@ Handlebars.registerHelper('capitalize', function capitalizeDriver (driverName) {
|
||||
case 'espresso':
|
||||
return 'Espresso';
|
||||
default:
|
||||
return driverName.length === 0 ? driverName : driverName[0].toUpperCase() + driverName.substr(1);
|
||||
return driverName.length === 0
|
||||
? driverName
|
||||
: driverName[0].toUpperCase() + driverName.substr(1);
|
||||
}
|
||||
});
|
||||
|
||||
Handlebars.registerHelper('if_eq', function ifEq (a, b, opts) {
|
||||
Handlebars.registerHelper('if_eq', function ifEq(a, b, opts) {
|
||||
if (a === b) {
|
||||
return opts.fn(this);
|
||||
} else {
|
||||
@@ -178,21 +190,21 @@ Handlebars.registerHelper('if_eq', function ifEq (a, b, opts) {
|
||||
}
|
||||
});
|
||||
|
||||
function getBaseHostname (fullUrl) {
|
||||
function getBaseHostname(fullUrl) {
|
||||
const baseUrl = url.parse(fullUrl);
|
||||
return baseUrl.hostname;
|
||||
}
|
||||
|
||||
Handlebars.registerHelper('base_url', function baseUrl (fullUrl) {
|
||||
Handlebars.registerHelper('base_url', function baseUrl(fullUrl) {
|
||||
return getBaseHostname(fullUrl);
|
||||
});
|
||||
|
||||
Handlebars.registerHelper('client_url', function clientUrl (clientUrl) {
|
||||
Handlebars.registerHelper('client_url', function clientUrl(clientUrl) {
|
||||
if (!clientUrl) {
|
||||
return;
|
||||
}
|
||||
|
||||
const createUrlString = function createUrlString (clientUrl, name = getBaseHostname(clientUrl)) {
|
||||
const createUrlString = function createUrlString(clientUrl, name = getBaseHostname(clientUrl)) {
|
||||
return `[${name}](${clientUrl})`;
|
||||
};
|
||||
|
||||
@@ -204,20 +216,24 @@ Handlebars.registerHelper('client_url', function clientUrl (clientUrl) {
|
||||
for (const item of clientUrl) {
|
||||
for (let [key, value] of _.toPairs(item)) {
|
||||
key = key.toLowerCase();
|
||||
const urlStr = CLIENT_URL_TYPES[key] === 'hostname'
|
||||
? createUrlString(value)
|
||||
: createUrlString(value, CLIENT_URL_TYPES[key]);
|
||||
const urlStr =
|
||||
CLIENT_URL_TYPES[key] === 'hostname'
|
||||
? createUrlString(value)
|
||||
: createUrlString(value, CLIENT_URL_TYPES[key]);
|
||||
urlStrings.push(urlStr);
|
||||
}
|
||||
}
|
||||
return urlStrings.join(' ');
|
||||
});
|
||||
|
||||
async function registerSpecUrlHelper () {
|
||||
const routesFile = await fs.readFile(require.resolve('@appium/base-driver/lib/protocol/routes.js'), 'utf8');
|
||||
async function registerSpecUrlHelper() {
|
||||
const routesFile = await fs.readFile(
|
||||
require.resolve('@appium/base-driver/lib/protocol/routes.js'),
|
||||
'utf8'
|
||||
);
|
||||
const routesFileLines = routesFile.split('\n');
|
||||
|
||||
Handlebars.registerHelper('spec_url', function specUrl (specUrl, endpoint) {
|
||||
Handlebars.registerHelper('spec_url', function specUrl(specUrl, endpoint) {
|
||||
// return the url if it is not a link to our routes doc
|
||||
if (!specUrl.includes('routes.js')) {
|
||||
return specUrl;
|
||||
@@ -245,7 +261,9 @@ async function registerSpecUrlHelper () {
|
||||
}
|
||||
}
|
||||
if (_.isUndefined(index)) {
|
||||
throw new Error(`Unable to find entry in 'appium-base-driver#routes' for endpoint '${endpoint}'`);
|
||||
throw new Error(
|
||||
`Unable to find entry in 'appium-base-driver#routes' for endpoint '${endpoint}'`
|
||||
);
|
||||
}
|
||||
|
||||
return `${specUrl}#L${index}`;
|
||||
@@ -254,7 +272,7 @@ async function registerSpecUrlHelper () {
|
||||
|
||||
const YAML_DIR = path.join(__dirname, '..', 'commands-yml');
|
||||
|
||||
async function generateCommands () {
|
||||
async function generateCommands() {
|
||||
await registerSpecUrlHelper();
|
||||
|
||||
const commands = path.resolve(YAML_DIR, 'commands/**/*.yml');
|
||||
@@ -262,7 +280,10 @@ async function generateCommands () {
|
||||
await fs.rimraf(path.resolve(rootFolder, 'docs', 'en', 'commands'));
|
||||
|
||||
// get the template from which the md files will be created
|
||||
const template = Handlebars.compile(await fs.readFile(path.resolve(YAML_DIR, 'template.md'), 'utf8'), {noEscape: true, strict: true});
|
||||
const template = Handlebars.compile(
|
||||
await fs.readFile(path.resolve(YAML_DIR, 'template.md'), 'utf8'),
|
||||
{noEscape: true, strict: true}
|
||||
);
|
||||
|
||||
let fileCount = 0;
|
||||
for (const filename of await fs.glob(commands)) {
|
||||
@@ -283,7 +304,10 @@ async function generateCommands () {
|
||||
|
||||
// Write the markdown to its right place
|
||||
const ext = path.extname(relativeFilename);
|
||||
const markdownPath = `${relativeFilename.substring(0, relativeFilename.length - ext.length)}.md`;
|
||||
const markdownPath = `${relativeFilename.substring(
|
||||
0,
|
||||
relativeFilename.length - ext.length
|
||||
)}.md`;
|
||||
const outfile = path.resolve(rootFolder, 'docs', 'en', markdownPath);
|
||||
log(` Writing to: ${outfile}`);
|
||||
await fs.mkdirp(path.dirname(outfile));
|
||||
@@ -294,8 +318,8 @@ async function generateCommands () {
|
||||
log(`Done writing ${fileCount} command documents`);
|
||||
}
|
||||
|
||||
async function generateCommandIndex () {
|
||||
function getTree (element, path) {
|
||||
async function generateCommandIndex() {
|
||||
function getTree(element, path) {
|
||||
let node = {
|
||||
name: element[0],
|
||||
};
|
||||
@@ -321,9 +345,12 @@ async function generateCommandIndex () {
|
||||
commands.push(getTree(el, '/docs/en/commands'));
|
||||
}
|
||||
|
||||
const commandTemplate = Handlebars.compile(await fs.readFile(path.resolve(YAML_DIR, 'api-template.md'), 'utf8'), {noEscape: true, strict: true});
|
||||
const commandTemplate = Handlebars.compile(
|
||||
await fs.readFile(path.resolve(YAML_DIR, 'api-template.md'), 'utf8'),
|
||||
{noEscape: true, strict: true}
|
||||
);
|
||||
|
||||
async function writeIndex (index, commands, indexPath) {
|
||||
async function writeIndex(index, commands, indexPath) {
|
||||
log(`Creating API index '${index}'`);
|
||||
const commandMarkdown = commandTemplate({
|
||||
commands,
|
||||
@@ -336,7 +363,7 @@ async function generateCommandIndex () {
|
||||
await writeIndex(apiIndex, commands);
|
||||
log(`Done writing main API index`);
|
||||
|
||||
async function writeIndividualIndexes (command) {
|
||||
async function writeIndividualIndexes(command) {
|
||||
if (!util.hasValue(command.commands)) {
|
||||
// this is a leaf, so end
|
||||
return;
|
||||
@@ -361,7 +388,7 @@ async function generateCommandIndex () {
|
||||
}
|
||||
}
|
||||
|
||||
async function main () {
|
||||
async function main() {
|
||||
await generateCommands();
|
||||
await generateCommandIndex();
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
/* eslint-disable no-console, promise/prefer-await-to-then */
|
||||
|
||||
async function main () {
|
||||
async function main() {
|
||||
const driverEnv = process.env.npm_config_drivers;
|
||||
const pluginEnv = process.env.npm_config_plugins;
|
||||
|
||||
@@ -14,20 +14,29 @@ async function main () {
|
||||
try {
|
||||
extension = require('../build/lib/cli/extension');
|
||||
} catch (e) {
|
||||
throw new Error(`Could not load extension CLI file; has the project been transpiled? ` +
|
||||
`(${e.message})`);
|
||||
throw new Error(
|
||||
`Could not load extension CLI file; has the project been transpiled? ` + `(${e.message})`
|
||||
);
|
||||
}
|
||||
|
||||
const {DEFAULT_APPIUM_HOME, DRIVER_TYPE, PLUGIN_TYPE} = require('./build/lib/extension-config');
|
||||
const {runExtensionCommand} = extension;
|
||||
const appiumHome = process.env.npm_config_appium_home || DEFAULT_APPIUM_HOME;
|
||||
const specs = [[DRIVER_TYPE, driverEnv], [PLUGIN_TYPE, pluginEnv]];
|
||||
const specs = [
|
||||
[DRIVER_TYPE, driverEnv],
|
||||
[PLUGIN_TYPE, pluginEnv],
|
||||
];
|
||||
|
||||
for (const [type, extEnv] of specs) {
|
||||
if (extEnv) {
|
||||
for (const ext of extEnv.split(',')) {
|
||||
try {
|
||||
await checkAndInstallExtension({runExtensionCommand, appiumHome, type, ext});
|
||||
await checkAndInstallExtension({
|
||||
runExtensionCommand,
|
||||
appiumHome,
|
||||
type,
|
||||
ext,
|
||||
});
|
||||
} catch (e) {
|
||||
console.log(`There was an error checking and installing ${type} ${ext}: ${e.message}`);
|
||||
}
|
||||
@@ -36,36 +45,39 @@ async function main () {
|
||||
}
|
||||
}
|
||||
|
||||
async function checkAndInstallExtension ({
|
||||
runExtensionCommand,
|
||||
appiumHome,
|
||||
type,
|
||||
ext,
|
||||
}) {
|
||||
const extList = await runExtensionCommand({
|
||||
appiumHome,
|
||||
[`${type}Command`]: 'list',
|
||||
showInstalled: true,
|
||||
suppressOutput: true,
|
||||
}, type);
|
||||
async function checkAndInstallExtension({runExtensionCommand, appiumHome, type, ext}) {
|
||||
const extList = await runExtensionCommand(
|
||||
{
|
||||
appiumHome,
|
||||
[`${type}Command`]: 'list',
|
||||
showInstalled: true,
|
||||
suppressOutput: true,
|
||||
},
|
||||
type
|
||||
);
|
||||
if (extList[ext]) {
|
||||
console.log(`The ${type} ${ext} was already installed, skipping...`);
|
||||
return;
|
||||
}
|
||||
console.log(`Installing the ${type} ${ext}...`);
|
||||
await runExtensionCommand({
|
||||
appiumHome,
|
||||
[`${type}Command`]: 'install',
|
||||
[type]: ext,
|
||||
suppressOutput: true,
|
||||
}, type);
|
||||
await runExtensionCommand(
|
||||
{
|
||||
appiumHome,
|
||||
[`${type}Command`]: 'install',
|
||||
[type]: ext,
|
||||
suppressOutput: true,
|
||||
},
|
||||
type
|
||||
);
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main().then(() => {
|
||||
process.exit(0);
|
||||
}).catch((e) => {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
});
|
||||
main()
|
||||
.then(() => {
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -16,16 +16,7 @@ try {
|
||||
/** @type {import('../lib/cli/args').ArgumentDefinitions} */
|
||||
const appiumArguments = parser.getParser().rawArgs;
|
||||
const docFile = path.normalize(
|
||||
path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'docs',
|
||||
'en',
|
||||
'writing-running-appium',
|
||||
'server-args.md',
|
||||
),
|
||||
path.join(__dirname, '..', '..', '..', 'docs', 'en', 'writing-running-appium', 'server-args.md')
|
||||
);
|
||||
let md = `
|
||||
<!-- THIS FILE IS AUTO-GENERATED BY scripts/write-server-args-docs.js; DO NOT EDIT -->
|
||||
@@ -44,7 +35,7 @@ All flags are optional, but some are required in conjunction with certain others
|
||||
|Flag|Default|Description|Example|
|
||||
|----|-------|-----------|-------|
|
||||
`;
|
||||
appiumArguments.forEach(function handleArguments (argOpts, argNames) {
|
||||
appiumArguments.forEach(function handleArguments(argOpts, argNames) {
|
||||
// handle empty objects
|
||||
if (JSON.stringify(argOpts.default) === '{}') {
|
||||
argOpts.default = '{}';
|
||||
@@ -62,7 +53,7 @@ try {
|
||||
console.error(
|
||||
'New docs written! Do not forget to commit:\ngit add -A %s && git commit -m "Update %s"',
|
||||
path.relative(process.cwd(), docFile),
|
||||
path.basename(docFile),
|
||||
path.basename(docFile)
|
||||
);
|
||||
} catch (err) {
|
||||
console.error('Could not write to file %s: %s', docFile, err);
|
||||
|
||||
@@ -34,9 +34,7 @@ describe('CLI behavior', function () {
|
||||
*/
|
||||
let appiumHome;
|
||||
|
||||
const testDriverPath = path.dirname(
|
||||
resolveFixture('test-driver/package.json')
|
||||
);
|
||||
const testDriverPath = path.dirname(resolveFixture('test-driver/package.json'));
|
||||
|
||||
beforeEach(function () {
|
||||
this.timeout(30000);
|
||||
@@ -80,10 +78,7 @@ describe('CLI behavior', function () {
|
||||
appiumHomePkgPath = path.join(appiumHome, 'package.json');
|
||||
runJson = runAppiumJson(appiumHome);
|
||||
// an example package.json referencing appium dependency
|
||||
await fs.copyFile(
|
||||
resolveFixture('cli/appium-dependency.package.json'),
|
||||
appiumHomePkgPath
|
||||
);
|
||||
await fs.copyFile(resolveFixture('cli/appium-dependency.package.json'), appiumHomePkgPath);
|
||||
});
|
||||
|
||||
after(async function () {
|
||||
@@ -92,9 +87,7 @@ describe('CLI behavior', function () {
|
||||
|
||||
describe('without drivers installed', function () {
|
||||
it('should list no drivers', async function () {
|
||||
const res = /** @type {ExtensionListData} */ (
|
||||
await runJson([DRIVER_TYPE, LIST])
|
||||
);
|
||||
const res = /** @type {ExtensionListData} */ (await runJson([DRIVER_TYPE, LIST]));
|
||||
res.should.satisfy(
|
||||
/** @param {typeof res} value */ (value) =>
|
||||
Object.values(value).every(({installed}) => !installed)
|
||||
@@ -108,18 +101,12 @@ describe('CLI behavior', function () {
|
||||
});
|
||||
|
||||
it('should list the driver', async function () {
|
||||
const res = /** @type {ExtensionListData} */ (
|
||||
await runJson([DRIVER_TYPE, LIST])
|
||||
);
|
||||
const res = /** @type {ExtensionListData} */ (await runJson([DRIVER_TYPE, LIST]));
|
||||
res.should.have.property('fake');
|
||||
});
|
||||
|
||||
it('should be resolvable from the local directory', function () {
|
||||
(() =>
|
||||
resolveFrom(
|
||||
appiumHome,
|
||||
'@appium/fake-driver/package.json'
|
||||
)).should.not.throw();
|
||||
(() => resolveFrom(appiumHome, '@appium/fake-driver/package.json')).should.not.throw();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -139,9 +126,7 @@ describe('CLI behavior', function () {
|
||||
let res;
|
||||
|
||||
beforeEach(async function () {
|
||||
res = /** @type {ExtensionListData} */ (
|
||||
await runJson([DRIVER_TYPE, LIST])
|
||||
);
|
||||
res = /** @type {ExtensionListData} */ (await runJson([DRIVER_TYPE, LIST]));
|
||||
});
|
||||
it('should list the driver', function () {
|
||||
res.should.have.property('fake');
|
||||
@@ -168,9 +153,7 @@ describe('CLI behavior', function () {
|
||||
});
|
||||
|
||||
it('should update package.json', async function () {
|
||||
const newPkg = JSON.parse(
|
||||
await fs.readFile(appiumHomePkgPath, 'utf8')
|
||||
);
|
||||
const newPkg = JSON.parse(await fs.readFile(appiumHomePkgPath, 'utf8'));
|
||||
expect(newPkg).to.have.nested.property('devDependencies.test-driver');
|
||||
});
|
||||
|
||||
@@ -188,8 +171,7 @@ describe('CLI behavior', function () {
|
||||
});
|
||||
|
||||
it('should actually install both drivers', function () {
|
||||
expect(() => resolveFrom(appiumHome, '@appium/fake-driver')).not.to
|
||||
.throw;
|
||||
expect(() => resolveFrom(appiumHome, '@appium/fake-driver')).not.to.throw;
|
||||
expect(() => resolveFrom(appiumHome, 'test-driver')).not.to.throw;
|
||||
});
|
||||
});
|
||||
@@ -234,21 +216,15 @@ describe('CLI behavior', function () {
|
||||
before(function () {
|
||||
const run = runAppiumJson(appiumHome);
|
||||
runInstall = async (args) =>
|
||||
/** @type {ReturnType<typeof runInstall>} */ (
|
||||
await run([DRIVER_TYPE, INSTALL, ...args])
|
||||
);
|
||||
/** @type {ReturnType<typeof runInstall>} */ (await run([DRIVER_TYPE, INSTALL, ...args]));
|
||||
runUninstall = async (args) =>
|
||||
/** @type {ReturnType<typeof runUninstall>} */ (
|
||||
await run([DRIVER_TYPE, UNINSTALL, ...args])
|
||||
);
|
||||
runList = async (args = []) =>
|
||||
/** @type {ReturnType<typeof runList>} */ (
|
||||
await run([DRIVER_TYPE, LIST, ...args])
|
||||
);
|
||||
/** @type {ReturnType<typeof runList>} */ (await run([DRIVER_TYPE, LIST, ...args]));
|
||||
runRun = async (args) =>
|
||||
/** @type {ReturnType<typeof runRun>} */ (
|
||||
await run([DRIVER_TYPE, RUN, ...args])
|
||||
);
|
||||
/** @type {ReturnType<typeof runRun>} */ (await run([DRIVER_TYPE, RUN, ...args]));
|
||||
});
|
||||
|
||||
describe(LIST, function () {
|
||||
@@ -290,14 +266,8 @@ describe('CLI behavior', function () {
|
||||
penultimateFakeDriverVersionAsOfRightNow
|
||||
).should.be.true;
|
||||
// TODO: this could probably be replaced by looking at updateVersion in the JSON
|
||||
const stdout = await runAppium(appiumHome, [
|
||||
DRIVER_TYPE,
|
||||
LIST,
|
||||
'--updates',
|
||||
]);
|
||||
stdout.should.match(
|
||||
new RegExp(`fake.+[${fake.updateVersion} available]`)
|
||||
);
|
||||
const stdout = await runAppium(appiumHome, [DRIVER_TYPE, LIST, '--updates']);
|
||||
stdout.should.match(new RegExp(`fake.+[${fake.updateVersion} available]`));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -314,11 +284,7 @@ describe('CLI behavior', function () {
|
||||
});
|
||||
it('should install a driver from npm', async function () {
|
||||
await clear();
|
||||
const ret = await runInstall([
|
||||
'@appium/fake-driver',
|
||||
'--source',
|
||||
'npm',
|
||||
]);
|
||||
const ret = await runInstall(['@appium/fake-driver', '--source', 'npm']);
|
||||
ret.fake.pkgName.should.eql('@appium/fake-driver');
|
||||
ret.fake.installType.should.eql('npm');
|
||||
ret.fake.installSpec.should.eql('@appium/fake-driver');
|
||||
@@ -334,9 +300,7 @@ describe('CLI behavior', function () {
|
||||
const list = await runList(['--installed']);
|
||||
expect(list.fake).to.exist;
|
||||
expect(list.test).to.exist;
|
||||
expect(() =>
|
||||
resolveFrom(appiumHome, '@appium/fake-driver')
|
||||
).not.to.throw;
|
||||
expect(() => resolveFrom(appiumHome, '@appium/fake-driver')).not.to.throw;
|
||||
expect(() => resolveFrom(appiumHome, 'test-driver')).not.to.throw;
|
||||
});
|
||||
|
||||
@@ -348,12 +312,8 @@ describe('CLI behavior', function () {
|
||||
const list = await runList(['--installed']);
|
||||
expect(list.fake).to.exist;
|
||||
expect(list.uiautomator2).to.exist;
|
||||
expect(() =>
|
||||
resolveFrom(appiumHome, '@appium/fake-driver')
|
||||
).not.to.throw;
|
||||
expect(() =>
|
||||
resolveFrom(appiumHome, 'appium-uiautomator2-driver')
|
||||
).not.to.throw;
|
||||
expect(() => resolveFrom(appiumHome, '@appium/fake-driver')).not.to.throw;
|
||||
expect(() => resolveFrom(appiumHome, 'appium-uiautomator2-driver')).not.to.throw;
|
||||
});
|
||||
|
||||
it('should install a driver from npm with a specific version/tag', async function () {
|
||||
@@ -411,9 +371,7 @@ describe('CLI behavior', function () {
|
||||
]);
|
||||
ret.fake.pkgName.should.eql('appium-fake-driver');
|
||||
ret.fake.installType.should.eql('git');
|
||||
ret.fake.installSpec.should.eql(
|
||||
'git+https://github.com/appium/appium-fake-driver'
|
||||
);
|
||||
ret.fake.installSpec.should.eql('git+https://github.com/appium/appium-fake-driver');
|
||||
const list = await runList(['--installed']);
|
||||
delete list.fake.installed;
|
||||
list.should.eql(ret);
|
||||
@@ -422,11 +380,7 @@ describe('CLI behavior', function () {
|
||||
await clear();
|
||||
// take advantage of the fact that we know we have fake driver installed as a dependency in
|
||||
// this module, so we know its local path on disk
|
||||
const ret = await installLocalExtension(
|
||||
appiumHome,
|
||||
DRIVER_TYPE,
|
||||
FAKE_DRIVER_DIR
|
||||
);
|
||||
const ret = await installLocalExtension(appiumHome, DRIVER_TYPE, FAKE_DRIVER_DIR);
|
||||
ret.fake.pkgName.should.eql('@appium/fake-driver');
|
||||
ret.fake.installType.should.eql('local');
|
||||
ret.fake.installSpec.should.eql(FAKE_DRIVER_DIR);
|
||||
@@ -446,11 +400,7 @@ describe('CLI behavior', function () {
|
||||
describe('uninstall', function () {
|
||||
it('should uninstall a driver based on its driver name', async function () {
|
||||
await clear();
|
||||
const ret = await runInstall([
|
||||
'@appium/fake-driver',
|
||||
'--source',
|
||||
'npm',
|
||||
]);
|
||||
const ret = await runInstall(['@appium/fake-driver', '--source', 'npm']);
|
||||
// this will throw if the file doesn't exist
|
||||
const installPath = resolveFrom(appiumHome, ret.fake.pkgName);
|
||||
let list = await runList(['--installed']);
|
||||
@@ -479,34 +429,24 @@ describe('CLI behavior', function () {
|
||||
});
|
||||
it('should take a valid driver, invalid script, and throw an error', async function () {
|
||||
const driverName = 'fake';
|
||||
await expect(
|
||||
runRun([driverName, 'foo'])
|
||||
).to.eventually.be.rejectedWith(Error);
|
||||
await expect(runRun([driverName, 'foo'])).to.eventually.be.rejectedWith(Error);
|
||||
});
|
||||
it('should take an invalid driver, invalid script, and throw an error', async function () {
|
||||
const driverName = 'foo';
|
||||
await expect(
|
||||
runRun([driverName, 'bar'])
|
||||
).to.eventually.be.rejectedWith(Error);
|
||||
await expect(runRun([driverName, 'bar'])).to.eventually.be.rejectedWith(Error);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Plugin CLI', function () {
|
||||
const FAKE_PLUGIN_DIR = path.dirname(
|
||||
require.resolve('@appium/fake-plugin/package.json')
|
||||
);
|
||||
const FAKE_PLUGIN_DIR = path.dirname(require.resolve('@appium/fake-plugin/package.json'));
|
||||
|
||||
before(function () {
|
||||
const run = runAppiumJson(appiumHome);
|
||||
runList = async (args = []) =>
|
||||
/** @type {ReturnType<typeof runList>} */ (
|
||||
await run([PLUGIN_TYPE, LIST, ...args])
|
||||
);
|
||||
/** @type {ReturnType<typeof runList>} */ (await run([PLUGIN_TYPE, LIST, ...args]));
|
||||
runRun = async (args) =>
|
||||
/** @type {ReturnType<typeof runRun>} */ (
|
||||
await run([PLUGIN_TYPE, RUN, ...args])
|
||||
);
|
||||
/** @type {ReturnType<typeof runRun>} */ (await run([PLUGIN_TYPE, RUN, ...args]));
|
||||
});
|
||||
|
||||
describe('run', function () {
|
||||
@@ -527,14 +467,10 @@ describe('CLI behavior', function () {
|
||||
});
|
||||
it('should take a valid plugin, invalid script, and throw an error', async function () {
|
||||
const pluginName = 'fake';
|
||||
await expect(
|
||||
runRun([pluginName, 'foo', '--json'])
|
||||
).to.eventually.be.rejectedWith(Error);
|
||||
await expect(runRun([pluginName, 'foo', '--json'])).to.eventually.be.rejectedWith(Error);
|
||||
});
|
||||
it('should take an invalid plugin, invalid script, and throw an error', async function () {
|
||||
await expect(
|
||||
runRun(['foo', 'bar', '--json'])
|
||||
).to.eventually.be.rejectedWith(Error);
|
||||
await expect(runRun(['foo', 'bar', '--json'])).to.eventually.be.rejectedWith(Error);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,45 +2,23 @@
|
||||
|
||||
import {DRIVER_TYPE} from '../../lib/constants';
|
||||
import {readConfigFile, normalizeConfig} from '../../lib/config-file';
|
||||
import {
|
||||
finalizeSchema,
|
||||
registerSchema,
|
||||
resetSchema,
|
||||
} from '../../lib/schema/schema';
|
||||
import {finalizeSchema, registerSchema, resetSchema} from '../../lib/schema/schema';
|
||||
import extSchema from '../fixtures/driver.schema.js';
|
||||
import {resolveFixture} from '../helpers';
|
||||
|
||||
describe('config file behavior', function () {
|
||||
const GOOD_FILEPATH = resolveFixture('config', 'appium.config.good.json');
|
||||
const BAD_NODECONFIG_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.bad-nodeconfig.json'
|
||||
);
|
||||
const BAD_NODECONFIG_FILEPATH = resolveFixture('config', 'appium.config.bad-nodeconfig.json');
|
||||
const BAD_FILEPATH = resolveFixture('config', 'appium.config.bad.json');
|
||||
const INVALID_JSON_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.invalid.json'
|
||||
);
|
||||
const SECURITY_ARRAY_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.security-array.json'
|
||||
);
|
||||
const INVALID_JSON_FILEPATH = resolveFixture('config', 'appium.config.invalid.json');
|
||||
const SECURITY_ARRAY_FILEPATH = resolveFixture('config', 'appium.config.security-array.json');
|
||||
const SECURITY_DELIMITED_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.security-delimited.json'
|
||||
);
|
||||
const SECURITY_PATH_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.security-path.json'
|
||||
);
|
||||
const UNKNOWN_PROPS_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.ext-unknown-props.json'
|
||||
);
|
||||
const EXT_PROPS_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.ext-good.json'
|
||||
);
|
||||
const SECURITY_PATH_FILEPATH = resolveFixture('config', 'appium.config.security-path.json');
|
||||
const UNKNOWN_PROPS_FILEPATH = resolveFixture('config', 'appium.config.ext-unknown-props.json');
|
||||
const EXT_PROPS_FILEPATH = resolveFixture('config', 'appium.config.ext-good.json');
|
||||
|
||||
beforeEach(function () {
|
||||
finalizeSchema();
|
||||
@@ -65,10 +43,7 @@ describe('config file behavior', function () {
|
||||
describe('when a string', function () {
|
||||
it('should return errors', async function () {
|
||||
const result = await readConfigFile(BAD_NODECONFIG_FILEPATH);
|
||||
result.should.have.nested.property(
|
||||
'errors[0].instancePath',
|
||||
'/server/nodeconfig'
|
||||
);
|
||||
result.should.have.nested.property('errors[0].instancePath', '/server/nodeconfig');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -84,20 +59,14 @@ describe('config file behavior', function () {
|
||||
describe('when a string path', function () {
|
||||
it('should return errors', async function () {
|
||||
const result = await readConfigFile(SECURITY_PATH_FILEPATH);
|
||||
result.should.have.nested.property(
|
||||
'errors[0].instancePath',
|
||||
'/server/allow-insecure'
|
||||
);
|
||||
result.should.have.nested.property('errors[0].instancePath', '/server/allow-insecure');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when a comma-delimited string', function () {
|
||||
it('should return errors', async function () {
|
||||
const result = await readConfigFile(SECURITY_DELIMITED_FILEPATH);
|
||||
result.should.have.nested.property(
|
||||
'errors[0].instancePath',
|
||||
'/server/allow-insecure'
|
||||
);
|
||||
result.should.have.nested.property('errors[0].instancePath', '/server/allow-insecure');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -118,10 +87,7 @@ describe('config file behavior', function () {
|
||||
describe('without extensions', function () {
|
||||
it('should return an object containing errors', async function () {
|
||||
const result = await readConfigFile(BAD_FILEPATH);
|
||||
result.should.have.deep.property(
|
||||
'config',
|
||||
normalizeConfig(require(BAD_FILEPATH))
|
||||
);
|
||||
result.should.have.deep.property('config', normalizeConfig(require(BAD_FILEPATH)));
|
||||
result.should.have.property('filepath', BAD_FILEPATH);
|
||||
result.should.have.deep
|
||||
.property('errors')
|
||||
@@ -147,8 +113,7 @@ describe('config file behavior', function () {
|
||||
},
|
||||
{
|
||||
instancePath: '/server/allow-insecure',
|
||||
schemaPath:
|
||||
'#/properties/server/properties/allow-insecure/type',
|
||||
schemaPath: '#/properties/server/properties/allow-insecure/type',
|
||||
keyword: 'type',
|
||||
params: {
|
||||
type: 'array',
|
||||
@@ -157,8 +122,7 @@ describe('config file behavior', function () {
|
||||
},
|
||||
{
|
||||
instancePath: '/server/callback-port',
|
||||
schemaPath:
|
||||
'#/properties/server/properties/callback-port/maximum',
|
||||
schemaPath: '#/properties/server/properties/callback-port/maximum',
|
||||
keyword: 'maximum',
|
||||
params: {
|
||||
comparison: '<=',
|
||||
|
||||
@@ -1,13 +1,7 @@
|
||||
import { createSandbox } from 'sinon';
|
||||
import {
|
||||
getGitRev,
|
||||
getBuildInfo,
|
||||
updateBuildInfo,
|
||||
APPIUM_VER,
|
||||
} from '../../lib/config';
|
||||
import {createSandbox} from 'sinon';
|
||||
import {getGitRev, getBuildInfo, updateBuildInfo, APPIUM_VER} from '../../lib/config';
|
||||
import axios from 'axios';
|
||||
import { fs } from '@appium/support';
|
||||
|
||||
import {fs} from '@appium/support';
|
||||
|
||||
describe('Config', function () {
|
||||
let sandbox;
|
||||
@@ -29,7 +23,7 @@ describe('Config', function () {
|
||||
});
|
||||
});
|
||||
describe('getBuildInfo', function () {
|
||||
async function verifyBuildInfoUpdate (useLocalGit) {
|
||||
async function verifyBuildInfoUpdate(useLocalGit) {
|
||||
const buildInfo = getBuildInfo();
|
||||
mockFs.expects('exists').atLeast(1).returns(useLocalGit);
|
||||
buildInfo['git-sha'] = undefined;
|
||||
@@ -60,10 +54,8 @@ describe('Config', function () {
|
||||
data: [
|
||||
{
|
||||
name: `v${APPIUM_VER}`,
|
||||
zipball_url:
|
||||
'https://api.github.com/repos/appium/appium/zipball/v1.9.0-beta.1',
|
||||
tarball_url:
|
||||
'https://api.github.com/repos/appium/appium/tarball/v1.9.0-beta.1',
|
||||
zipball_url: 'https://api.github.com/repos/appium/appium/zipball/v1.9.0-beta.1',
|
||||
tarball_url: 'https://api.github.com/repos/appium/appium/tarball/v1.9.0-beta.1',
|
||||
commit: {
|
||||
sha: '3c2752f9f9c56000705a4ae15b3ba68a5d2e644c',
|
||||
url: 'https://api.github.com/repos/appium/appium/commits/3c2752f9f9c56000705a4ae15b3ba68a5d2e644c',
|
||||
@@ -72,10 +64,8 @@ describe('Config', function () {
|
||||
},
|
||||
{
|
||||
name: 'v1.8.2-beta',
|
||||
zipball_url:
|
||||
'https://api.github.com/repos/appium/appium/zipball/v1.8.2-beta',
|
||||
tarball_url:
|
||||
'https://api.github.com/repos/appium/appium/tarball/v1.8.2-beta',
|
||||
zipball_url: 'https://api.github.com/repos/appium/appium/zipball/v1.8.2-beta',
|
||||
tarball_url: 'https://api.github.com/repos/appium/appium/tarball/v1.8.2-beta',
|
||||
commit: {
|
||||
sha: '5b98b9197e75aa85e7507d21d3126c1a63d1ce8f',
|
||||
url: 'https://api.github.com/repos/appium/appium/commits/5b98b9197e75aa85e7507d21d3126c1a63d1ce8f',
|
||||
@@ -87,8 +77,7 @@ describe('Config', function () {
|
||||
getStub.onCall(1).returns({
|
||||
data: {
|
||||
sha: '3c2752f9f9c56000705a4ae15b3ba68a5d2e644c',
|
||||
node_id:
|
||||
'MDY6Q29tbWl0NzUzMDU3MDozYzI3NTJmOWY5YzU2MDAwNzA1YTRhZTE1YjNiYTY4YTVkMmU2NDRj',
|
||||
node_id: 'MDY6Q29tbWl0NzUzMDU3MDozYzI3NTJmOWY5YzU2MDAwNzA1YTRhZTE1YjNiYTY4YTVkMmU2NDRj',
|
||||
commit: {
|
||||
author: {
|
||||
name: 'Isaac Murchie',
|
||||
|
||||
@@ -7,13 +7,7 @@ import axios from 'axios';
|
||||
import {remote as wdio} from 'webdriverio';
|
||||
import {main as appiumServer} from '../../lib/main';
|
||||
import {INSTALL_TYPE_LOCAL} from '../../lib/extension/extension-config';
|
||||
import {
|
||||
W3C_PREFIXED_CAPS,
|
||||
TEST_FAKE_APP,
|
||||
TEST_HOST,
|
||||
getTestPort,
|
||||
PROJECT_ROOT,
|
||||
} from '../helpers';
|
||||
import {W3C_PREFIXED_CAPS, TEST_FAKE_APP, TEST_HOST, getTestPort, PROJECT_ROOT} from '../helpers';
|
||||
import {BaseDriver} from '@appium/base-driver';
|
||||
import {loadExtensions} from '../../lib/extension';
|
||||
import {runExtensionCommand} from '../../lib/cli/extension';
|
||||
@@ -127,9 +121,7 @@ describe('FakeDriver - via HTTP', function () {
|
||||
let driver = await wdio({...wdOpts, capabilities: caps});
|
||||
const {sessionId} = driver;
|
||||
try {
|
||||
const {data} = await axios.get(
|
||||
`${testServerBaseSessionUrl}/${sessionId}/fakedriverargs`
|
||||
);
|
||||
const {data} = await axios.get(`${testServerBaseSessionUrl}/${sessionId}/fakedriverargs`);
|
||||
should.not.exist(data.value.sillyWebServerPort);
|
||||
should.not.exist(data.value.sillyWebServerHost);
|
||||
} finally {
|
||||
@@ -153,9 +145,7 @@ describe('FakeDriver - via HTTP', function () {
|
||||
let driver = await wdio({...wdOpts, capabilities: caps});
|
||||
const {sessionId} = driver;
|
||||
try {
|
||||
const {data} = await axios.get(
|
||||
`${testServerBaseSessionUrl}/${sessionId}/fakedriverargs`
|
||||
);
|
||||
const {data} = await axios.get(`${testServerBaseSessionUrl}/${sessionId}/fakedriverargs`);
|
||||
data.value.sillyWebServerPort.should.eql(sillyWebServerPort);
|
||||
data.value.sillyWebServerHost.should.eql(sillyWebServerHost);
|
||||
} finally {
|
||||
@@ -206,9 +196,7 @@ describe('FakeDriver - via HTTP', function () {
|
||||
should.exist(driver.sessionId);
|
||||
|
||||
await B.delay(250);
|
||||
await driver
|
||||
.getPageSource()
|
||||
.should.eventually.be.rejectedWith(/terminated/);
|
||||
await driver.getPageSource().should.eventually.be.rejectedWith(/terminated/);
|
||||
});
|
||||
|
||||
it('should accept valid W3C capabilities and start a W3C session', async function () {
|
||||
@@ -216,16 +204,12 @@ describe('FakeDriver - via HTTP', function () {
|
||||
const w3cCaps = {
|
||||
capabilities: {
|
||||
alwaysMatch: {'appium:automationName': 'Fake', platformName: 'Fake'},
|
||||
firstMatch: [
|
||||
{'appium:deviceName': 'Fake', 'appium:app': TEST_FAKE_APP},
|
||||
],
|
||||
firstMatch: [{'appium:deviceName': 'Fake', 'appium:app': TEST_FAKE_APP}],
|
||||
},
|
||||
};
|
||||
|
||||
// Create the session
|
||||
const {status, value, sessionId} = (
|
||||
await axios.post(testServerBaseSessionUrl, w3cCaps)
|
||||
).data;
|
||||
const {status, value, sessionId} = (await axios.post(testServerBaseSessionUrl, w3cCaps)).data;
|
||||
try {
|
||||
should.not.exist(status); // Test that it's a W3C session by checking that 'status' is not in the response
|
||||
should.not.exist(sessionId);
|
||||
@@ -249,10 +233,10 @@ describe('FakeDriver - via HTTP', function () {
|
||||
|
||||
// Now use that sessionID to call an arbitrary W3C-only endpoint that isn't implemented to see if it responds with correct error
|
||||
await axios
|
||||
.post(
|
||||
`${testServerBaseSessionUrl}/${value.sessionId}/execute/async`,
|
||||
{script: '', args: ['a']}
|
||||
)
|
||||
.post(`${testServerBaseSessionUrl}/${value.sessionId}/execute/async`, {
|
||||
script: '',
|
||||
args: ['a'],
|
||||
})
|
||||
.should.eventually.be.rejectedWith(/405/);
|
||||
} finally {
|
||||
// End session
|
||||
@@ -264,9 +248,7 @@ describe('FakeDriver - via HTTP', function () {
|
||||
const badW3Ccaps = {
|
||||
capabilities: {
|
||||
alwaysMatch: {},
|
||||
firstMatch: [
|
||||
{'appium:deviceName': 'Fake', 'appium:app': TEST_FAKE_APP},
|
||||
],
|
||||
firstMatch: [{'appium:deviceName': 'Fake', 'appium:app': TEST_FAKE_APP}],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -291,9 +273,8 @@ describe('FakeDriver - via HTTP', function () {
|
||||
},
|
||||
};
|
||||
|
||||
const {status, value, sessionId} = (
|
||||
await axios.post(testServerBaseSessionUrl, combinedCaps)
|
||||
).data;
|
||||
const {status, value, sessionId} = (await axios.post(testServerBaseSessionUrl, combinedCaps))
|
||||
.data;
|
||||
try {
|
||||
should.not.exist(status); // If it's a W3C session, should not respond with 'status'
|
||||
should.not.exist(sessionId);
|
||||
@@ -317,9 +298,7 @@ describe('FakeDriver - via HTTP', function () {
|
||||
},
|
||||
},
|
||||
};
|
||||
await axios
|
||||
.post(testServerBaseSessionUrl, w3cCaps)
|
||||
.should.eventually.be.rejectedWith(/500/);
|
||||
await axios.post(testServerBaseSessionUrl, w3cCaps).should.eventually.be.rejectedWith(/500/);
|
||||
});
|
||||
|
||||
it('should accept capabilities that are provided in the firstMatch array', async function () {
|
||||
@@ -334,9 +313,7 @@ describe('FakeDriver - via HTTP', function () {
|
||||
],
|
||||
},
|
||||
};
|
||||
const {value, sessionId, status} = (
|
||||
await axios.post(testServerBaseSessionUrl, w3cCaps)
|
||||
).data;
|
||||
const {value, sessionId, status} = (await axios.post(testServerBaseSessionUrl, w3cCaps)).data;
|
||||
try {
|
||||
should.not.exist(status);
|
||||
should.not.exist(sessionId);
|
||||
@@ -387,10 +364,7 @@ describe('FakeDriver - via HTTP', function () {
|
||||
const createSessionStub = sandbox
|
||||
.stub(FakeDriver.prototype, 'createSession')
|
||||
.callsFake(async function (jsonwpCaps) {
|
||||
const res = await BaseDriver.prototype.createSession.call(
|
||||
this,
|
||||
jsonwpCaps
|
||||
);
|
||||
const res = await BaseDriver.prototype.createSession.call(this, jsonwpCaps);
|
||||
this.protocol.should.equal('MJSONWP');
|
||||
return res;
|
||||
});
|
||||
@@ -409,10 +383,9 @@ describe('FakeDriver - via HTTP', function () {
|
||||
let driver = await wdio({...wdOpts, capabilities: caps});
|
||||
const {sessionId} = driver;
|
||||
try {
|
||||
await axios.post(
|
||||
`${testServerBaseSessionUrl}/${sessionId}/fakedriver`,
|
||||
{thing: {yes: 'lolno'}}
|
||||
);
|
||||
await axios.post(`${testServerBaseSessionUrl}/${sessionId}/fakedriver`, {
|
||||
thing: {yes: 'lolno'},
|
||||
});
|
||||
(
|
||||
await axios.get(`${testServerBaseSessionUrl}/${sessionId}/fakedriver`)
|
||||
).data.value.should.eql({yes: 'lolno'});
|
||||
|
||||
@@ -136,13 +136,7 @@ export const runAppiumJson = /**
|
||||
export async function installLocalExtension(appiumHome, type, pathToExtension) {
|
||||
return /** @type {import('appium/types').ExtRecord<ExtType>} */ (
|
||||
/** @type {unknown} */ (
|
||||
await runAppiumJson(appiumHome, [
|
||||
type,
|
||||
'install',
|
||||
'--source',
|
||||
'local',
|
||||
pathToExtension,
|
||||
])
|
||||
await runAppiumJson(appiumHome, [type, 'install', '--source', 'local', pathToExtension])
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -7,12 +7,7 @@ import {remote as wdio} from 'webdriverio';
|
||||
import axios from 'axios';
|
||||
import {main as appiumServer} from '../../lib/main';
|
||||
import {INSTALL_TYPE_LOCAL} from '../../lib/extension/extension-config';
|
||||
import {
|
||||
W3C_PREFIXED_CAPS,
|
||||
TEST_HOST,
|
||||
getTestPort,
|
||||
PROJECT_ROOT,
|
||||
} from '../helpers';
|
||||
import {W3C_PREFIXED_CAPS, TEST_HOST, getTestPort, PROJECT_ROOT} from '../helpers';
|
||||
import {runExtensionCommand} from '../../lib/cli/extension';
|
||||
import {tempDir, fs} from '@appium/support';
|
||||
import {loadExtensions} from '../../lib/extension';
|
||||
@@ -29,12 +24,7 @@ const wdOpts = {
|
||||
capabilities: W3C_PREFIXED_CAPS,
|
||||
};
|
||||
const FAKE_DRIVER_DIR = path.join(PROJECT_ROOT, 'packages', 'fake-driver');
|
||||
const FAKE_PLUGIN_DIR = path.join(
|
||||
PROJECT_ROOT,
|
||||
'node_modules',
|
||||
'@appium',
|
||||
'fake-plugin'
|
||||
);
|
||||
const FAKE_PLUGIN_DIR = path.join(PROJECT_ROOT, 'node_modules', '@appium', 'fake-plugin');
|
||||
|
||||
describe('FakePlugin', function () {
|
||||
/** @type {string} */
|
||||
@@ -127,9 +117,7 @@ describe('FakePlugin', function () {
|
||||
});
|
||||
|
||||
it('should not update the server if plugin is not activated', async function () {
|
||||
await axios
|
||||
.post(`http://${TEST_HOST}:${port}/fake`)
|
||||
.should.eventually.be.rejectedWith(/404/);
|
||||
await axios.post(`http://${TEST_HOST}:${port}/fake`).should.eventually.be.rejectedWith(/404/);
|
||||
});
|
||||
it('should not update method map if plugin is not activated', async function () {
|
||||
const driver = await wdio(wdOpts);
|
||||
@@ -167,8 +155,7 @@ describe('FakePlugin', function () {
|
||||
let server;
|
||||
before(async function () {
|
||||
// then start server if we need to
|
||||
const usePlugins =
|
||||
registrationType === 'explicit' ? ['fake', 'p2', 'p3'] : ['all'];
|
||||
const usePlugins = registrationType === 'explicit' ? ['fake', 'p2', 'p3'] : ['all'];
|
||||
const args = {
|
||||
appiumHome,
|
||||
port,
|
||||
@@ -188,23 +175,18 @@ describe('FakePlugin', function () {
|
||||
});
|
||||
it('should update the server', async function () {
|
||||
const res = {fake: 'fakeResponse'};
|
||||
(await axios.post(`http://${TEST_HOST}:${port}/fake`)).data.should.eql(
|
||||
res
|
||||
);
|
||||
(await axios.post(`http://${TEST_HOST}:${port}/fake`)).data.should.eql(res);
|
||||
});
|
||||
|
||||
it('should modify the method map with new commands', async function () {
|
||||
const driver = await wdio(wdOpts);
|
||||
const {sessionId} = driver;
|
||||
try {
|
||||
await axios.post(
|
||||
`${testServerBaseSessionUrl}/${sessionId}/fake_data`,
|
||||
{data: {fake: 'data'}}
|
||||
);
|
||||
await axios.post(`${testServerBaseSessionUrl}/${sessionId}/fake_data`, {
|
||||
data: {fake: 'data'},
|
||||
});
|
||||
(
|
||||
await axios.get(
|
||||
`${testServerBaseSessionUrl}/${sessionId}/fake_data`
|
||||
)
|
||||
await axios.get(`${testServerBaseSessionUrl}/${sessionId}/fake_data`)
|
||||
).data.value.should.eql({fake: 'data'});
|
||||
} finally {
|
||||
await driver.deleteSession();
|
||||
@@ -217,9 +199,7 @@ describe('FakePlugin', function () {
|
||||
try {
|
||||
await driver
|
||||
.getPageSource()
|
||||
.should.eventually.eql(
|
||||
`<Fake>${JSON.stringify([sessionId])}</Fake>`
|
||||
);
|
||||
.should.eventually.eql(`<Fake>${JSON.stringify([sessionId])}</Fake>`);
|
||||
} finally {
|
||||
await driver.deleteSession();
|
||||
}
|
||||
@@ -230,10 +210,10 @@ describe('FakePlugin', function () {
|
||||
const {sessionId} = driver;
|
||||
try {
|
||||
const el = (
|
||||
await axios.post(
|
||||
`${testServerBaseSessionUrl}/${sessionId}/element`,
|
||||
{using: 'xpath', value: '//MockWebView'}
|
||||
)
|
||||
await axios.post(`${testServerBaseSessionUrl}/${sessionId}/element`, {
|
||||
using: 'xpath',
|
||||
value: '//MockWebView',
|
||||
})
|
||||
).data.value;
|
||||
el.should.have.property('fake');
|
||||
} finally {
|
||||
@@ -248,11 +228,8 @@ describe('FakePlugin', function () {
|
||||
await axios.post(`${testServerBaseSessionUrl}/${sessionId}/context`, {
|
||||
name: 'PROXY',
|
||||
});
|
||||
const handle = (
|
||||
await axios.get(
|
||||
`${testServerBaseSessionUrl}/${sessionId}/window/handle`
|
||||
)
|
||||
).data.value;
|
||||
const handle = (await axios.get(`${testServerBaseSessionUrl}/${sessionId}/window/handle`))
|
||||
.data.value;
|
||||
handle.should.eql('<<proxied via proxyCommand>>');
|
||||
} finally {
|
||||
await axios.post(`${testServerBaseSessionUrl}/${sessionId}/context`, {
|
||||
@@ -307,9 +284,7 @@ describe('FakePlugin', function () {
|
||||
const driver = await wdio(wdOpts);
|
||||
const {sessionId} = driver;
|
||||
try {
|
||||
const {data} = await axios.get(
|
||||
`${testServerBaseSessionUrl}/${sessionId}/fakepluginargs`
|
||||
);
|
||||
const {data} = await axios.get(`${testServerBaseSessionUrl}/${sessionId}/fakepluginargs`);
|
||||
data.value.should.eql(FAKE_ARGS);
|
||||
} finally {
|
||||
await driver.deleteSession();
|
||||
@@ -334,9 +309,7 @@ describe('FakePlugin', function () {
|
||||
const driver = await wdio(wdOpts);
|
||||
const {sessionId} = driver;
|
||||
try {
|
||||
const {data} = await axios.get(
|
||||
`${testServerBaseSessionUrl}/${sessionId}/fakepluginargs`
|
||||
);
|
||||
const {data} = await axios.get(`${testServerBaseSessionUrl}/${sessionId}/fakepluginargs`);
|
||||
should.not.exist(data.value);
|
||||
} finally {
|
||||
await driver.deleteSession();
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
// @ts-check
|
||||
import { fs, tempDir } from '@appium/support';
|
||||
import {fs, tempDir} from '@appium/support';
|
||||
import path from 'path';
|
||||
import { DRIVER_TYPE } from '../../lib/constants';
|
||||
import { resolveFixture } from '../helpers';
|
||||
import { installLocalExtension, runAppium } from './e2e-helpers';
|
||||
import {DRIVER_TYPE} from '../../lib/constants';
|
||||
import {resolveFixture} from '../helpers';
|
||||
import {installLocalExtension, runAppium} from './e2e-helpers';
|
||||
|
||||
const {expect} = chai;
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
import getPort from 'get-port';
|
||||
import path from 'path';
|
||||
import rewiremock, { addPlugin, overrideEntryPoint, plugins } from 'rewiremock';
|
||||
import { insertAppiumPrefixes } from '../lib/utils';
|
||||
import rewiremock, {addPlugin, overrideEntryPoint, plugins} from 'rewiremock';
|
||||
import {insertAppiumPrefixes} from '../lib/utils';
|
||||
|
||||
const TEST_HOST = '127.0.0.1';
|
||||
|
||||
@@ -16,7 +16,7 @@ const BASE_CAPS = {
|
||||
automationName: 'Fake',
|
||||
platformName: 'Fake',
|
||||
deviceName: 'Fake',
|
||||
app: TEST_FAKE_APP
|
||||
app: TEST_FAKE_APP,
|
||||
};
|
||||
const W3C_PREFIXED_CAPS = {...insertAppiumPrefixes(BASE_CAPS)};
|
||||
/** @type {import('@appium/types').W3CCapabilities} */
|
||||
@@ -30,7 +30,7 @@ let TEST_PORT;
|
||||
* Returns a free port; one per process
|
||||
* @returns {Promise<number>} a free port
|
||||
*/
|
||||
async function getTestPort () {
|
||||
async function getTestPort() {
|
||||
return await (TEST_PORT || getPort());
|
||||
}
|
||||
|
||||
@@ -40,11 +40,23 @@ async function getTestPort () {
|
||||
* @param {...string} pathParts - Additional paths to `join()`
|
||||
* @returns {string}
|
||||
*/
|
||||
function resolveFixture (filename, ...pathParts) {
|
||||
function resolveFixture(filename, ...pathParts) {
|
||||
return path.join(__dirname, 'fixtures', filename, ...pathParts);
|
||||
}
|
||||
|
||||
overrideEntryPoint(module);
|
||||
addPlugin(plugins.nodejs);
|
||||
|
||||
export { TEST_FAKE_APP, TEST_HOST, BASE_CAPS, W3C_PREFIXED_CAPS, W3C_CAPS, PROJECT_ROOT, getTestPort, rewiremock, resolveFixture, FAKE_DRIVER_DIR, PACKAGE_ROOT };
|
||||
export {
|
||||
TEST_FAKE_APP,
|
||||
TEST_HOST,
|
||||
BASE_CAPS,
|
||||
W3C_PREFIXED_CAPS,
|
||||
W3C_CAPS,
|
||||
PROJECT_ROOT,
|
||||
getTestPort,
|
||||
rewiremock,
|
||||
resolveFixture,
|
||||
FAKE_DRIVER_DIR,
|
||||
PACKAGE_ROOT,
|
||||
};
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
// @ts-check
|
||||
|
||||
import { PLUGIN_TYPE } from '../../lib/constants';
|
||||
import {PLUGIN_TYPE} from '../../lib/constants';
|
||||
import B from 'bluebird';
|
||||
import { BaseDriver } from '@appium/base-driver';
|
||||
import { FakeDriver } from '@appium/fake-driver';
|
||||
import { sleep } from 'asyncbox';
|
||||
import {BaseDriver} from '@appium/base-driver';
|
||||
import {FakeDriver} from '@appium/fake-driver';
|
||||
import {sleep} from 'asyncbox';
|
||||
import _ from 'lodash';
|
||||
import { createSandbox } from 'sinon';
|
||||
import { finalizeSchema, registerSchema, resetSchema } from '../../lib/schema/schema';
|
||||
import { insertAppiumPrefixes, removeAppiumPrefixes } from '../../lib/utils';
|
||||
import { rewiremock, BASE_CAPS, W3C_CAPS, W3C_PREFIXED_CAPS } from '../helpers';
|
||||
import {createSandbox} from 'sinon';
|
||||
import {finalizeSchema, registerSchema, resetSchema} from '../../lib/schema/schema';
|
||||
import {insertAppiumPrefixes, removeAppiumPrefixes} from '../../lib/utils';
|
||||
import {rewiremock, BASE_CAPS, W3C_CAPS, W3C_PREFIXED_CAPS} from '../helpers';
|
||||
|
||||
const SESSION_ID = '1';
|
||||
|
||||
@@ -28,13 +28,13 @@ describe('AppiumDriver', function () {
|
||||
|
||||
MockConfig = {
|
||||
getBuildInfo: sandbox.stub().returns({
|
||||
version: '2.0'
|
||||
version: '2.0',
|
||||
}),
|
||||
updateBuildInfo: sandbox.stub().resolves(),
|
||||
APPIUM_VER: '2.0'
|
||||
APPIUM_VER: '2.0',
|
||||
};
|
||||
({AppiumDriver} = rewiremock.proxy(() => require('../../lib/appium'), {
|
||||
'../../lib/config': MockConfig
|
||||
'../../lib/config': MockConfig,
|
||||
}));
|
||||
});
|
||||
|
||||
@@ -75,11 +75,11 @@ describe('AppiumDriver', function () {
|
||||
* @param {*} DriverClass
|
||||
* @returns {[AppiumDriver, sinon.SinonMock]}
|
||||
*/
|
||||
function getDriverAndFakeDriver (appiumArgs = {}, DriverClass = FakeDriver) {
|
||||
function getDriverAndFakeDriver(appiumArgs = {}, DriverClass = FakeDriver) {
|
||||
const appium = new AppiumDriver(appiumArgs);
|
||||
fakeDriver = new DriverClass();
|
||||
const mockFakeDriver = sandbox.mock(fakeDriver);
|
||||
const mockedDriverReturnerClass = function Driver () {
|
||||
const mockedDriverReturnerClass = function Driver() {
|
||||
return fakeDriver;
|
||||
};
|
||||
|
||||
@@ -88,7 +88,7 @@ describe('AppiumDriver', function () {
|
||||
driver: mockedDriverReturnerClass,
|
||||
version: '1.2.3',
|
||||
driverName: 'fake',
|
||||
})
|
||||
}),
|
||||
};
|
||||
|
||||
return [appium, mockFakeDriver];
|
||||
@@ -107,18 +107,25 @@ describe('AppiumDriver', function () {
|
||||
});
|
||||
|
||||
it(`should call inner driver's createSession with desired capabilities`, async function () {
|
||||
mockFakeDriver.expects('createSession')
|
||||
.once().withExactArgs(undefined, null, W3C_CAPS, [])
|
||||
mockFakeDriver
|
||||
.expects('createSession')
|
||||
.once()
|
||||
.withExactArgs(undefined, null, W3C_CAPS, [])
|
||||
.returns([SESSION_ID, removeAppiumPrefixes(W3C_PREFIXED_CAPS)]);
|
||||
await appium.createSession(undefined, null, W3C_CAPS);
|
||||
mockFakeDriver.verify();
|
||||
});
|
||||
it(`should call inner driver's createSession with desired and default capabilities`, async function () {
|
||||
let defaultCaps = {'appium:someCap': 'hello'};
|
||||
let allCaps = {...W3C_CAPS, alwaysMatch: {...W3C_CAPS.alwaysMatch, ...defaultCaps}};
|
||||
let allCaps = {
|
||||
...W3C_CAPS,
|
||||
alwaysMatch: {...W3C_CAPS.alwaysMatch, ...defaultCaps},
|
||||
};
|
||||
appium.args.defaultCapabilities = defaultCaps;
|
||||
mockFakeDriver.expects('createSession')
|
||||
.once().withArgs(undefined, null, allCaps)
|
||||
mockFakeDriver
|
||||
.expects('createSession')
|
||||
.once()
|
||||
.withArgs(undefined, null, allCaps)
|
||||
.returns([SESSION_ID, removeAppiumPrefixes(allCaps.alwaysMatch)]);
|
||||
await appium.createSession(undefined, null, W3C_CAPS);
|
||||
mockFakeDriver.verify();
|
||||
@@ -128,8 +135,10 @@ describe('AppiumDriver', function () {
|
||||
// should do nothing
|
||||
let defaultCaps = {platformName: 'Ersatz'};
|
||||
appium.args.defaultCapabilities = defaultCaps;
|
||||
mockFakeDriver.expects('createSession')
|
||||
.once().withArgs(undefined, null, W3C_CAPS)
|
||||
mockFakeDriver
|
||||
.expects('createSession')
|
||||
.once()
|
||||
.withArgs(undefined, null, W3C_CAPS)
|
||||
.returns([SESSION_ID, removeAppiumPrefixes(W3C_PREFIXED_CAPS)]);
|
||||
await appium.createSession(undefined, null, W3C_CAPS);
|
||||
mockFakeDriver.verify();
|
||||
@@ -138,19 +147,14 @@ describe('AppiumDriver', function () {
|
||||
appium.args.sessionOverride = true;
|
||||
|
||||
// mock three sessions that should be removed when the new one is created
|
||||
let fakeDrivers = [
|
||||
new FakeDriver(),
|
||||
new FakeDriver(),
|
||||
new FakeDriver(),
|
||||
];
|
||||
let fakeDrivers = [new FakeDriver(), new FakeDriver(), new FakeDriver()];
|
||||
let mockFakeDrivers = _.map(fakeDrivers, (fd) => sandbox.mock(fd));
|
||||
mockFakeDrivers[0].expects('deleteSession')
|
||||
.once();
|
||||
mockFakeDrivers[1].expects('deleteSession')
|
||||
mockFakeDrivers[0].expects('deleteSession').once();
|
||||
mockFakeDrivers[1]
|
||||
.expects('deleteSession')
|
||||
.once()
|
||||
.throws('Cannot shut down Android driver; it has already shut down');
|
||||
mockFakeDrivers[2].expects('deleteSession')
|
||||
.once();
|
||||
mockFakeDrivers[2].expects('deleteSession').once();
|
||||
appium.sessions['abc-123-xyz'] = fakeDrivers[0];
|
||||
appium.sessions['xyz-321-abc'] = fakeDrivers[1];
|
||||
appium.sessions['123-abc-xyz'] = fakeDrivers[2];
|
||||
@@ -158,8 +162,10 @@ describe('AppiumDriver', function () {
|
||||
let sessions = await appium.getSessions();
|
||||
sessions.should.have.length(3);
|
||||
|
||||
mockFakeDriver.expects('createSession')
|
||||
.once().withExactArgs(undefined, null, W3C_CAPS, [])
|
||||
mockFakeDriver
|
||||
.expects('createSession')
|
||||
.once()
|
||||
.withExactArgs(undefined, null, W3C_CAPS, [])
|
||||
.returns([SESSION_ID, removeAppiumPrefixes(W3C_PREFIXED_CAPS)]);
|
||||
await appium.createSession(undefined, null, W3C_CAPS);
|
||||
|
||||
@@ -172,8 +178,10 @@ describe('AppiumDriver', function () {
|
||||
mockFakeDriver.verify();
|
||||
});
|
||||
it('should call "createSession" with W3C capabilities argument, if provided', async function () {
|
||||
mockFakeDriver.expects('createSession')
|
||||
.once().withArgs(undefined, undefined, W3C_CAPS)
|
||||
mockFakeDriver
|
||||
.expects('createSession')
|
||||
.once()
|
||||
.withArgs(undefined, undefined, W3C_CAPS)
|
||||
.returns([SESSION_ID, BASE_CAPS]);
|
||||
await appium.createSession(undefined, undefined, W3C_CAPS);
|
||||
mockFakeDriver.verify();
|
||||
@@ -187,8 +195,10 @@ describe('AppiumDriver', function () {
|
||||
'appium:someOtherParm': 'someOtherParm',
|
||||
},
|
||||
};
|
||||
mockFakeDriver.expects('createSession')
|
||||
.once().withArgs(undefined, undefined, {
|
||||
mockFakeDriver
|
||||
.expects('createSession')
|
||||
.once()
|
||||
.withArgs(undefined, undefined, {
|
||||
alwaysMatch: {
|
||||
...w3cCaps.alwaysMatch,
|
||||
'appium:someOtherParm': 'someOtherParm',
|
||||
@@ -220,8 +230,7 @@ describe('AppiumDriver', function () {
|
||||
});
|
||||
|
||||
it('should assign args to property `cliArgs`', async function () {
|
||||
class ArgsDriver extends BaseDriver {
|
||||
}
|
||||
class ArgsDriver extends BaseDriver {}
|
||||
const args = {driver: {fake: {randomArg: 1234}}};
|
||||
[appium, mockFakeDriver] = getDriverAndFakeDriver(args, ArgsDriver);
|
||||
const {value} = await appium.createSession(undefined, undefined, W3C_CAPS);
|
||||
@@ -249,11 +258,9 @@ describe('AppiumDriver', function () {
|
||||
sessions = await appium.getSessions();
|
||||
sessions.should.have.length(0);
|
||||
});
|
||||
it('should call inner driver\'s deleteSession method', async function () {
|
||||
it("should call inner driver's deleteSession method", async function () {
|
||||
const [sessionId] = (await appium.createSession(null, null, W3C_CAPS)).value;
|
||||
mockFakeDriver.expects('deleteSession')
|
||||
.once().withExactArgs(sessionId, [])
|
||||
.returns();
|
||||
mockFakeDriver.expects('deleteSession').once().withExactArgs(sessionId, []).returns();
|
||||
await appium.deleteSession(sessionId);
|
||||
mockFakeDriver.verify();
|
||||
|
||||
@@ -279,12 +286,20 @@ describe('AppiumDriver', function () {
|
||||
sessions.should.be.empty;
|
||||
});
|
||||
it('should return sessions created', async function () {
|
||||
let caps1 = {alwaysMatch: {...W3C_PREFIXED_CAPS, 'appium:cap': 'value'}};
|
||||
let caps2 = {alwaysMatch: {...W3C_PREFIXED_CAPS, 'appium:cap': 'other value'}};
|
||||
mockFakeDriver.expects('createSession').once()
|
||||
let caps1 = {
|
||||
alwaysMatch: {...W3C_PREFIXED_CAPS, 'appium:cap': 'value'},
|
||||
};
|
||||
let caps2 = {
|
||||
alwaysMatch: {...W3C_PREFIXED_CAPS, 'appium:cap': 'other value'},
|
||||
};
|
||||
mockFakeDriver
|
||||
.expects('createSession')
|
||||
.once()
|
||||
.returns(['fake-session-id-1', removeAppiumPrefixes(caps1.alwaysMatch)]);
|
||||
let [session1Id, session1Caps] = (await appium.createSession(null, null, caps1)).value;
|
||||
mockFakeDriver.expects('createSession').once()
|
||||
mockFakeDriver
|
||||
.expects('createSession')
|
||||
.once()
|
||||
.returns(['fake-session-id-2', removeAppiumPrefixes(caps2.alwaysMatch)]);
|
||||
let [session2Id, session2Caps] = (await appium.createSession(null, null, caps2)).value;
|
||||
|
||||
@@ -308,8 +323,7 @@ describe('AppiumDriver', function () {
|
||||
status.build.version.should.exist;
|
||||
});
|
||||
});
|
||||
describe('sessionExists', function () {
|
||||
});
|
||||
describe('sessionExists', function () {});
|
||||
describe('attachUnexpectedShutdownHandler', function () {
|
||||
/** @type {AppiumDriver} */
|
||||
let appium;
|
||||
@@ -324,7 +338,7 @@ describe('AppiumDriver', function () {
|
||||
});
|
||||
|
||||
it('should remove session if inner driver unexpectedly exits with an error', async function () {
|
||||
let [sessionId,] = (await appium.createSession(null, null, _.clone(W3C_CAPS))).value; // eslint-disable-line comma-spacing
|
||||
let [sessionId] = (await appium.createSession(null, null, _.clone(W3C_CAPS))).value; // eslint-disable-line comma-spacing
|
||||
_.keys(appium.sessions).should.contain(sessionId);
|
||||
appium.sessions[sessionId].eventEmitter.emit('onUnexpectedShutdown', new Error('Oops'));
|
||||
// let event loop spin so rejection is handled
|
||||
@@ -332,7 +346,7 @@ describe('AppiumDriver', function () {
|
||||
_.keys(appium.sessions).should.not.contain(sessionId);
|
||||
});
|
||||
it('should remove session if inner driver unexpectedly exits with no error', async function () {
|
||||
let [sessionId,] = (await appium.createSession(null, null, _.clone(W3C_CAPS))).value; // eslint-disable-line comma-spacing
|
||||
let [sessionId] = (await appium.createSession(null, null, _.clone(W3C_CAPS))).value; // eslint-disable-line comma-spacing
|
||||
_.keys(appium.sessions).should.contain(sessionId);
|
||||
appium.sessions[sessionId].eventEmitter.emit('onUnexpectedShutdown');
|
||||
// let event loop spin so rejection is handled
|
||||
@@ -363,18 +377,18 @@ describe('AppiumDriver', function () {
|
||||
properties: {
|
||||
randomArg: {
|
||||
type: 'number',
|
||||
default: 2000
|
||||
}
|
||||
}
|
||||
default: 2000,
|
||||
},
|
||||
},
|
||||
});
|
||||
registerSchema(PLUGIN_TYPE, ArrayArgPlugin.pluginName, {
|
||||
type: 'object',
|
||||
properties: {
|
||||
arr: {
|
||||
type: 'array',
|
||||
default: []
|
||||
}
|
||||
}
|
||||
default: [],
|
||||
},
|
||||
},
|
||||
});
|
||||
finalizeSchema();
|
||||
});
|
||||
@@ -391,7 +405,9 @@ describe('AppiumDriver', function () {
|
||||
|
||||
describe('when args are equal to the schema defaults', function () {
|
||||
it('should not set CLI args', function () {
|
||||
const appium = new AppiumDriver({plugin: {[ArgsPlugin.pluginName]: {randomArg: 2000}}});
|
||||
const appium = new AppiumDriver({
|
||||
plugin: {[ArgsPlugin.pluginName]: {randomArg: 2000}},
|
||||
});
|
||||
appium.pluginClasses = [NoArgsPlugin, ArgsPlugin];
|
||||
for (const plugin of appium.createPluginInstances()) {
|
||||
chai.expect(plugin.cliArgs).not.to.exist;
|
||||
@@ -400,7 +416,9 @@ describe('AppiumDriver', function () {
|
||||
|
||||
describe('when the default is an "object"', function () {
|
||||
it('should not set CLI args', function () {
|
||||
const appium = new AppiumDriver({plugin: {[ArrayArgPlugin.pluginName]: {arr: []}}});
|
||||
const appium = new AppiumDriver({
|
||||
plugin: {[ArrayArgPlugin.pluginName]: {arr: []}},
|
||||
});
|
||||
appium.pluginClasses = [NoArgsPlugin, ArgsPlugin, ArrayArgPlugin];
|
||||
for (const plugin of appium.createPluginInstances()) {
|
||||
chai.expect(plugin.cliArgs).not.to.exist;
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
|
||||
import { tempDir, fs, npm } from '@appium/support';
|
||||
import { loadExtensions } from '../../../lib/extension';
|
||||
import { Manifest } from '../../../lib/extension/manifest';
|
||||
import {tempDir, fs, npm} from '@appium/support';
|
||||
import {loadExtensions} from '../../../lib/extension';
|
||||
import {Manifest} from '../../../lib/extension/manifest';
|
||||
import DriverCommand from '../../../lib/cli/driver-command';
|
||||
import { createSandbox } from 'sinon';
|
||||
import {createSandbox} from 'sinon';
|
||||
|
||||
describe('DriverCommand', function () {
|
||||
/**
|
||||
@@ -40,12 +39,14 @@ describe('DriverCommand', function () {
|
||||
npmMock = sandbox.mock(npm);
|
||||
});
|
||||
|
||||
function setupDriverUpdate (curVersion, latestVersion, latestSafeVersion) {
|
||||
npmMock.expects('getLatestVersion')
|
||||
function setupDriverUpdate(curVersion, latestVersion, latestSafeVersion) {
|
||||
npmMock
|
||||
.expects('getLatestVersion')
|
||||
.once()
|
||||
.withExactArgs(appiumHome, pkgName)
|
||||
.returns(latestVersion);
|
||||
npmMock.expects('getLatestSafeUpgradeVersion')
|
||||
npmMock
|
||||
.expects('getLatestSafeUpgradeVersion')
|
||||
.once()
|
||||
.withExactArgs(appiumHome, pkgName, curVersion)
|
||||
.returns(latestSafeVersion);
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
import {createSandbox} from 'sinon';
|
||||
import {
|
||||
finalizeSchema,
|
||||
resetSchema,
|
||||
SchemaFinalizationError,
|
||||
} from '../../../lib/schema/schema';
|
||||
import {finalizeSchema, resetSchema, SchemaFinalizationError} from '../../../lib/schema/schema';
|
||||
import {rewiremock} from '../../helpers';
|
||||
|
||||
const expect = chai.expect;
|
||||
@@ -19,9 +15,7 @@ describe('cli/schema-args', function () {
|
||||
|
||||
beforeEach(function () {
|
||||
sandbox = createSandbox();
|
||||
({toParserArgs} = rewiremock.proxy(() =>
|
||||
require('../../../lib/schema/cli-args')
|
||||
));
|
||||
({toParserArgs} = rewiremock.proxy(() => require('../../../lib/schema/cli-args')));
|
||||
});
|
||||
|
||||
afterEach(function () {
|
||||
@@ -35,10 +29,7 @@ describe('cli/schema-args', function () {
|
||||
afterEach(resetSchema);
|
||||
|
||||
it('should return a Map', function () {
|
||||
expect(toParserArgs())
|
||||
.to.be.an.instanceof(Map)
|
||||
.and.have.property('size')
|
||||
.that.is.above(0);
|
||||
expect(toParserArgs()).to.be.an.instanceof(Map).and.have.property('size').that.is.above(0);
|
||||
});
|
||||
|
||||
it('should generate metavars in SCREAMING_SNAKE_CASE', function () {
|
||||
@@ -46,9 +37,7 @@ describe('cli/schema-args', function () {
|
||||
const argDefsWithMetavar = [...argDefs].filter((arg) => arg[1].metavar);
|
||||
expect(argDefsWithMetavar).not.to.be.empty;
|
||||
// is there a more idiomatic way to do this?
|
||||
expect(
|
||||
argDefsWithMetavar.every((arg) => /[A-Z_]+/.test(arg[1].metavar))
|
||||
).to.be.true;
|
||||
expect(argDefsWithMetavar.every((arg) => /[A-Z_]+/.test(arg[1].metavar))).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -9,26 +9,12 @@ import {resolveFixture, rewiremock} from '../helpers';
|
||||
const expect = chai.expect;
|
||||
|
||||
describe('config-file', function () {
|
||||
const GOOD_YAML_CONFIG_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.good.yaml'
|
||||
);
|
||||
const GOOD_JSON_CONFIG_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.good.json'
|
||||
);
|
||||
const GOOD_JS_CONFIG_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.good.js'
|
||||
);
|
||||
const GOOD_YAML_CONFIG = YAML.parse(
|
||||
fs.readFileSync(GOOD_YAML_CONFIG_FILEPATH, 'utf8')
|
||||
);
|
||||
const GOOD_YAML_CONFIG_FILEPATH = resolveFixture('config', 'appium.config.good.yaml');
|
||||
const GOOD_JSON_CONFIG_FILEPATH = resolveFixture('config', 'appium.config.good.json');
|
||||
const GOOD_JS_CONFIG_FILEPATH = resolveFixture('config', 'appium.config.good.js');
|
||||
const GOOD_YAML_CONFIG = YAML.parse(fs.readFileSync(GOOD_YAML_CONFIG_FILEPATH, 'utf8'));
|
||||
const GOOD_JSON_CONFIG = require(GOOD_JSON_CONFIG_FILEPATH);
|
||||
const BAD_JSON_CONFIG_FILEPATH = resolveFixture(
|
||||
'config',
|
||||
'appium.config.bad.json'
|
||||
);
|
||||
const BAD_JSON_CONFIG_FILEPATH = resolveFixture('config', 'appium.config.bad.json');
|
||||
const BAD_JSON_CONFIG = require(BAD_JSON_CONFIG_FILEPATH);
|
||||
|
||||
/**
|
||||
@@ -192,9 +178,7 @@ describe('config-file', function () {
|
||||
|
||||
describe('when the config file is not empty', function () {
|
||||
it('should validate the config against a schema', function () {
|
||||
expect(schema.validate).to.have.been.calledOnceWith(
|
||||
GOOD_JSON_CONFIG
|
||||
);
|
||||
expect(schema.validate).to.have.been.calledOnceWith(GOOD_JSON_CONFIG);
|
||||
});
|
||||
|
||||
describe('when the config file is valid', function () {
|
||||
@@ -274,9 +258,7 @@ describe('config-file', function () {
|
||||
});
|
||||
|
||||
it('should pass error through', async function () {
|
||||
await expect(readConfigFile('appium.json')).to.be.rejectedWith(
|
||||
/guru meditation/
|
||||
);
|
||||
await expect(readConfigFile('appium.json')).to.be.rejectedWith(/guru meditation/);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -298,9 +280,7 @@ describe('config-file', function () {
|
||||
|
||||
describe('when the config file is not empty', function () {
|
||||
it('should validate the config against a schema', function () {
|
||||
expect(schema.validate).to.have.been.calledOnceWith(
|
||||
GOOD_JSON_CONFIG
|
||||
);
|
||||
expect(schema.validate).to.have.been.calledOnceWith(GOOD_JSON_CONFIG);
|
||||
});
|
||||
|
||||
describe('when the config file is valid', function () {
|
||||
@@ -334,19 +314,13 @@ describe('config-file', function () {
|
||||
describe('formatErrors()', function () {
|
||||
describe('when provided `errors` as an empty array', function () {
|
||||
it('should throw', function () {
|
||||
expect(() => formatErrors([])).to.throw(
|
||||
TypeError,
|
||||
'Array of errors must be non-empty'
|
||||
);
|
||||
expect(() => formatErrors([])).to.throw(TypeError, 'Array of errors must be non-empty');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when provided `errors` as `undefined`', function () {
|
||||
it('should throw', function () {
|
||||
expect(() => formatErrors()).to.throw(
|
||||
TypeError,
|
||||
'Array of errors must be non-empty'
|
||||
);
|
||||
expect(() => formatErrors()).to.throw(TypeError, 'Array of errors must be non-empty');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,12 +1,25 @@
|
||||
// @ts-check
|
||||
|
||||
import _ from 'lodash';
|
||||
import { createSandbox } from 'sinon';
|
||||
import { getParser } from '../../lib/cli/parser';
|
||||
import { checkNodeOk, getBuildInfo, getNonDefaultServerArgs, showBuildInfo, showConfig, validateTmpDir, warnNodeDeprecations } from '../../lib/config';
|
||||
import { PLUGIN_TYPE } from '../../lib/constants';
|
||||
import {createSandbox} from 'sinon';
|
||||
import {getParser} from '../../lib/cli/parser';
|
||||
import {
|
||||
checkNodeOk,
|
||||
getBuildInfo,
|
||||
getNonDefaultServerArgs,
|
||||
showBuildInfo,
|
||||
showConfig,
|
||||
validateTmpDir,
|
||||
warnNodeDeprecations,
|
||||
} from '../../lib/config';
|
||||
import {PLUGIN_TYPE} from '../../lib/constants';
|
||||
import logger from '../../lib/logger';
|
||||
import { finalizeSchema, getDefaultsForSchema, registerSchema, resetSchema } from '../../lib/schema/schema';
|
||||
import {
|
||||
finalizeSchema,
|
||||
getDefaultsForSchema,
|
||||
registerSchema,
|
||||
resetSchema,
|
||||
} from '../../lib/schema/schema';
|
||||
|
||||
describe('Config', function () {
|
||||
/** @type {sinon.SinonSandbox} */
|
||||
@@ -44,9 +57,11 @@ describe('Config', function () {
|
||||
it('should dump the current Appium config', function () {
|
||||
showConfig(
|
||||
{address: 'bar'},
|
||||
{config: {
|
||||
// @ts-expect-error
|
||||
server: {callbackAddress: 'quux'}}
|
||||
{
|
||||
config: {
|
||||
// @ts-expect-error
|
||||
server: {callbackAddress: 'quux'},
|
||||
},
|
||||
},
|
||||
{port: 1234},
|
||||
{allowCors: false}
|
||||
@@ -103,9 +118,17 @@ describe('Config', function () {
|
||||
describe('checkNodeOk', function () {
|
||||
describe('unsupported nodes', function () {
|
||||
const unsupportedVersions = [
|
||||
'v0.1', 'v0.9.12', 'v0.10.36', 'v0.12.14',
|
||||
'v4.4.7', 'v5.7.0', 'v6.3.1', 'v7.1.1',
|
||||
'v8.0.0', 'v9.2.3', 'v10.1.0',
|
||||
'v0.1',
|
||||
'v0.9.12',
|
||||
'v0.10.36',
|
||||
'v0.12.14',
|
||||
'v4.4.7',
|
||||
'v5.7.0',
|
||||
'v6.3.1',
|
||||
'v7.1.1',
|
||||
'v8.0.0',
|
||||
'v9.2.3',
|
||||
'v10.1.0',
|
||||
];
|
||||
for (const version of unsupportedVersions) {
|
||||
it(`should fail if node is ${version}`, function () {
|
||||
@@ -188,7 +211,10 @@ describe('Config', function () {
|
||||
describe('with extension schemas', function () {
|
||||
beforeEach(function () {
|
||||
resetSchema();
|
||||
registerSchema(PLUGIN_TYPE, 'crypto-fiend', {type: 'object', properties: {elite: {type: 'boolean', default: true}}});
|
||||
registerSchema(PLUGIN_TYPE, 'crypto-fiend', {
|
||||
type: 'object',
|
||||
properties: {elite: {type: 'boolean', default: true}},
|
||||
});
|
||||
finalizeSchema();
|
||||
getParser(true);
|
||||
args = getDefaultsForSchema();
|
||||
@@ -210,7 +236,9 @@ describe('Config', function () {
|
||||
|
||||
describe('validateTmpDir', function () {
|
||||
it('should fail to use a tmp dir with incorrect permissions', function () {
|
||||
validateTmpDir('/private/if_you_run_with_sudo_this_wont_fail').should.be.rejectedWith(/could not ensure/);
|
||||
validateTmpDir('/private/if_you_run_with_sudo_this_wont_fail').should.be.rejectedWith(
|
||||
/could not ensure/
|
||||
);
|
||||
});
|
||||
it('should fail to use an undefined tmp dir', function () {
|
||||
// @ts-expect-error
|
||||
|
||||
@@ -317,18 +317,17 @@ describe('DriverConfig', function () {
|
||||
describe('when the extension data is missing `schema`', function () {
|
||||
it('should throw', function () {
|
||||
delete extData.schema;
|
||||
expect(() =>
|
||||
driverConfig.readExtensionSchema(extName, extData)
|
||||
).to.throw(TypeError, /why is this function being called/i);
|
||||
expect(() => driverConfig.readExtensionSchema(extName, extData)).to.throw(
|
||||
TypeError,
|
||||
/why is this function being called/i
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the extension schema has already been registered (with the same schema)', function () {
|
||||
it('should not throw', function () {
|
||||
driverConfig.readExtensionSchema(extName, extData);
|
||||
expect(() =>
|
||||
driverConfig.readExtensionSchema(extName, extData)
|
||||
).not.to.throw();
|
||||
expect(() => driverConfig.readExtensionSchema(extName, extData)).not.to.throw();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -36,10 +36,7 @@ describe('Manifest', function () {
|
||||
({MockPackageChanged, MockAppiumSupport, overrides, sandbox} = initMocks());
|
||||
MockAppiumSupport.fs.readFile.resolves(yamlFixture);
|
||||
|
||||
({Manifest} = rewiremock.proxy(
|
||||
() => require('../../../lib/extension/manifest'),
|
||||
overrides
|
||||
));
|
||||
({Manifest} = rewiremock.proxy(() => require('../../../lib/extension/manifest'), overrides));
|
||||
|
||||
Manifest.getInstance.cache = new Map();
|
||||
});
|
||||
@@ -71,9 +68,7 @@ describe('Manifest', function () {
|
||||
describe('property', function () {
|
||||
describe('appiumHome', function () {
|
||||
it('should return the `appiumHome` path', function () {
|
||||
expect(Manifest.getInstance('/some/path').appiumHome).to.equal(
|
||||
'/some/path'
|
||||
);
|
||||
expect(Manifest.getInstance('/some/path').appiumHome).to.equal('/some/path');
|
||||
});
|
||||
|
||||
it('should not be writable', function () {
|
||||
@@ -88,8 +83,7 @@ describe('Manifest', function () {
|
||||
describe('manifestPath', function () {
|
||||
describe('before `read()` has been called', function () {
|
||||
it('should be undefined', function () {
|
||||
expect(Manifest.getInstance('/some/path').manifestPath).to.be
|
||||
.undefined;
|
||||
expect(Manifest.getInstance('/some/path').manifestPath).to.be.undefined;
|
||||
});
|
||||
});
|
||||
|
||||
@@ -348,10 +342,7 @@ describe('Manifest', function () {
|
||||
});
|
||||
|
||||
it('should add an extension to the internal data', function () {
|
||||
manifest.addExtensionFromPackage(
|
||||
packageJson,
|
||||
'/some/path/to/package.json'
|
||||
);
|
||||
manifest.addExtensionFromPackage(packageJson, '/some/path/to/package.json');
|
||||
expect(manifest.getExtensionData('driver')).to.deep.equal({
|
||||
myDriver: {
|
||||
automationName: 'derp',
|
||||
@@ -366,29 +357,18 @@ describe('Manifest', function () {
|
||||
});
|
||||
|
||||
it('should return `true`', function () {
|
||||
expect(
|
||||
manifest.addExtensionFromPackage(
|
||||
packageJson,
|
||||
'/some/path/to/package.json'
|
||||
)
|
||||
).to.be.true;
|
||||
expect(manifest.addExtensionFromPackage(packageJson, '/some/path/to/package.json')).to.be
|
||||
.true;
|
||||
});
|
||||
|
||||
describe('when the driver has already been registered', function () {
|
||||
beforeEach(function () {
|
||||
manifest.addExtensionFromPackage(
|
||||
packageJson,
|
||||
'/some/path/to/package.json'
|
||||
);
|
||||
manifest.addExtensionFromPackage(packageJson, '/some/path/to/package.json');
|
||||
});
|
||||
|
||||
it('should return `false`', function () {
|
||||
expect(
|
||||
manifest.addExtensionFromPackage(
|
||||
packageJson,
|
||||
'/some/path/to/package.json'
|
||||
)
|
||||
).to.be.false;
|
||||
expect(manifest.addExtensionFromPackage(packageJson, '/some/path/to/package.json')).to
|
||||
.be.false;
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -408,10 +388,7 @@ describe('Manifest', function () {
|
||||
});
|
||||
|
||||
it('should add an extension to the internal data', function () {
|
||||
manifest.addExtensionFromPackage(
|
||||
packageJson,
|
||||
'/some/path/to/package.json'
|
||||
);
|
||||
manifest.addExtensionFromPackage(packageJson, '/some/path/to/package.json');
|
||||
expect(manifest.getExtensionData(PLUGIN_TYPE)).to.deep.equal({
|
||||
myPlugin: {
|
||||
mainClass: 'SomeClass',
|
||||
@@ -424,29 +401,18 @@ describe('Manifest', function () {
|
||||
});
|
||||
|
||||
it('should return `true`', function () {
|
||||
expect(
|
||||
manifest.addExtensionFromPackage(
|
||||
packageJson,
|
||||
'/some/path/to/package.json'
|
||||
)
|
||||
).to.be.true;
|
||||
expect(manifest.addExtensionFromPackage(packageJson, '/some/path/to/package.json')).to.be
|
||||
.true;
|
||||
});
|
||||
|
||||
describe('when the plugin has already been registered', function () {
|
||||
beforeEach(function () {
|
||||
manifest.addExtensionFromPackage(
|
||||
packageJson,
|
||||
'/some/path/to/package.json'
|
||||
);
|
||||
manifest.addExtensionFromPackage(packageJson, '/some/path/to/package.json');
|
||||
});
|
||||
|
||||
it('should return `false`', function () {
|
||||
expect(
|
||||
manifest.addExtensionFromPackage(
|
||||
packageJson,
|
||||
'/some/path/to/package.json'
|
||||
)
|
||||
).to.be.false;
|
||||
expect(manifest.addExtensionFromPackage(packageJson, '/some/path/to/package.json')).to
|
||||
.be.false;
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -504,9 +470,7 @@ describe('Manifest', function () {
|
||||
|
||||
it('should add a found extension', async function () {
|
||||
await manifest.syncWithInstalledExtensions();
|
||||
expect(manifest.getExtensionData(DRIVER_TYPE)).to.have.property(
|
||||
'myDriver'
|
||||
);
|
||||
expect(manifest.getExtensionData(DRIVER_TYPE)).to.have.property('myDriver');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -6,21 +6,17 @@
|
||||
*/
|
||||
|
||||
import path from 'path';
|
||||
import { createSandbox } from 'sinon';
|
||||
import {createSandbox} from 'sinon';
|
||||
|
||||
export function initMocks (sandbox = createSandbox()) {
|
||||
export function initMocks(sandbox = createSandbox()) {
|
||||
/**
|
||||
* Mocks for package `@appium/support`
|
||||
* @type {MockAppiumSupport}
|
||||
*/
|
||||
const MockAppiumSupport = {
|
||||
fs: {
|
||||
readFile: /** @type {MockAppiumSupportFs['readFile']} */ (
|
||||
sandbox.stub().resolves('{}')
|
||||
),
|
||||
writeFile: /** @type {MockAppiumSupportFs['writeFile']} */ (
|
||||
sandbox.stub().resolves(true)
|
||||
),
|
||||
readFile: /** @type {MockAppiumSupportFs['readFile']} */ (sandbox.stub().resolves('{}')),
|
||||
writeFile: /** @type {MockAppiumSupportFs['writeFile']} */ (sandbox.stub().resolves(true)),
|
||||
walk: /** @type {MockAppiumSupportFs['walk']} */ (
|
||||
sandbox.stub().returns({
|
||||
[Symbol.asyncIterator]: sandbox
|
||||
@@ -28,28 +24,20 @@ export function initMocks (sandbox = createSandbox()) {
|
||||
.returns({next: sandbox.stub().resolves({done: true})}),
|
||||
})
|
||||
),
|
||||
mkdirp: /** @type {MockAppiumSupportFs['mkdirp']} */ (
|
||||
sandbox.stub().resolves()
|
||||
),
|
||||
mkdirp: /** @type {MockAppiumSupportFs['mkdirp']} */ (sandbox.stub().resolves()),
|
||||
},
|
||||
env: {
|
||||
resolveAppiumHome:
|
||||
/** @type {MockAppiumSupportEnv['resolveAppiumHome']} */ (
|
||||
resolveAppiumHome: /** @type {MockAppiumSupportEnv['resolveAppiumHome']} */ (
|
||||
sandbox.stub().resolves('/some/path')
|
||||
),
|
||||
resolveManifestPath:
|
||||
/** @type {MockAppiumSupportEnv['resolveManifestPath']} */ (
|
||||
resolveManifestPath: /** @type {MockAppiumSupportEnv['resolveManifestPath']} */ (
|
||||
sandbox.stub().resolves('/some/path/extensions.yaml')
|
||||
),
|
||||
hasAppiumDependency:
|
||||
/** @type {MockAppiumSupportEnv['hasAppiumDependency']} */ (
|
||||
hasAppiumDependency: /** @type {MockAppiumSupportEnv['hasAppiumDependency']} */ (
|
||||
sandbox.stub().resolves(false)
|
||||
),
|
||||
readPackageInDir:
|
||||
/** @type {MockAppiumSupportEnv['readPackageInDir']} */ (
|
||||
sandbox
|
||||
.stub()
|
||||
.callsFake(async () => MockAppiumSupport.env.__pkg)
|
||||
readPackageInDir: /** @type {MockAppiumSupportEnv['readPackageInDir']} */ (
|
||||
sandbox.stub().callsFake(async () => MockAppiumSupport.env.__pkg)
|
||||
),
|
||||
__pkg: {
|
||||
name: 'mock-package',
|
||||
@@ -62,11 +50,7 @@ export function initMocks (sandbox = createSandbox()) {
|
||||
getLogger: /** @type {MockAppiumSupportLogger['getLogger']} */ (
|
||||
sandbox
|
||||
.stub()
|
||||
.returns(
|
||||
sandbox.stub(
|
||||
new global.console.Console(process.stdout, process.stderr),
|
||||
),
|
||||
)
|
||||
.returns(sandbox.stub(new global.console.Console(process.stdout, process.stderr)))
|
||||
),
|
||||
},
|
||||
};
|
||||
|
||||
@@ -250,8 +250,7 @@ describe('PluginConfig', function () {
|
||||
});
|
||||
|
||||
it('should return an empty array', function () {
|
||||
expect(pluginConfig.getSchemaProblems(externalManifest, 'foo')).to
|
||||
.be.empty;
|
||||
expect(pluginConfig.getSchemaProblems(externalManifest, 'foo')).to.be.empty;
|
||||
});
|
||||
});
|
||||
|
||||
@@ -309,9 +308,10 @@ describe('PluginConfig', function () {
|
||||
it('should throw', function () {
|
||||
// @ts-expect-error
|
||||
delete extData.schema;
|
||||
expect(() =>
|
||||
pluginConfig.readExtensionSchema(extName, extData)
|
||||
).to.throw(TypeError, /why is this function being called/i);
|
||||
expect(() => pluginConfig.readExtensionSchema(extName, extData)).to.throw(
|
||||
TypeError,
|
||||
/why is this function being called/i
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -319,9 +319,7 @@ describe('PluginConfig', function () {
|
||||
describe('when the schema is identical (presumably the same extension)', function () {
|
||||
it('should not throw', function () {
|
||||
pluginConfig.readExtensionSchema(extName, extData);
|
||||
expect(() =>
|
||||
pluginConfig.readExtensionSchema(extName, extData)
|
||||
).not.to.throw();
|
||||
expect(() => pluginConfig.readExtensionSchema(extName, extData)).not.to.throw();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -329,9 +327,9 @@ describe('PluginConfig', function () {
|
||||
it('should throw', function () {
|
||||
pluginConfig.readExtensionSchema(extName, extData);
|
||||
MockResolveFrom.returns(resolveFixture('driver.schema.js'));
|
||||
expect(() =>
|
||||
pluginConfig.readExtensionSchema(extName, extData)
|
||||
).to.throw(/conflicts with an existing schema/i);
|
||||
expect(() => pluginConfig.readExtensionSchema(extName, extData)).to.throw(
|
||||
/conflicts with an existing schema/i
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -32,10 +32,7 @@ describe('grid-register', function () {
|
||||
axios: sandbox.stub().resolves({data: '', status: 200}),
|
||||
};
|
||||
|
||||
({default: registerNode} = rewiremock.proxy(
|
||||
() => require('../../lib/grid-register'),
|
||||
mocks
|
||||
));
|
||||
({default: registerNode} = rewiremock.proxy(() => require('../../lib/grid-register'), mocks));
|
||||
});
|
||||
|
||||
describe('when provided a path to a config file', function () {
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
|
||||
import { init as logsinkInit, clear as logsinkClear } from '../../lib/logsink';
|
||||
import { createSandbox } from 'sinon';
|
||||
import { logger } from '@appium/support';
|
||||
|
||||
import {init as logsinkInit, clear as logsinkClear} from '../../lib/logsink';
|
||||
import {createSandbox} from 'sinon';
|
||||
import {logger} from '@appium/support';
|
||||
|
||||
// temporarily turn on logging to stdio, so we can catch and query
|
||||
const forceLogs = process.env._FORCE_LOGS;
|
||||
@@ -30,7 +28,7 @@ describe('logging', function () {
|
||||
const warnMsg = 'some warning';
|
||||
const debugMsg = 'some debug';
|
||||
|
||||
function doLogging () {
|
||||
function doLogging() {
|
||||
log.error(errorMsg);
|
||||
log.warn(warnMsg);
|
||||
log.debug(debugMsg);
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { DRIVER_TYPE, PLUGIN_TYPE } from '../../lib/constants';
|
||||
import { getParser } from '../../lib/cli/parser';
|
||||
import { INSTALL_TYPES } from '../../lib/extension/extension-config';
|
||||
import {DRIVER_TYPE, PLUGIN_TYPE} from '../../lib/constants';
|
||||
import {getParser} from '../../lib/cli/parser';
|
||||
import {INSTALL_TYPES} from '../../lib/extension/extension-config';
|
||||
import * as schema from '../../lib/schema/schema';
|
||||
import { readConfigFile } from '../../lib/config-file';
|
||||
import { resolveFixture } from '../helpers';
|
||||
import {readConfigFile} from '../../lib/config-file';
|
||||
import {resolveFixture} from '../helpers';
|
||||
|
||||
// these paths should not make assumptions about the current working directory
|
||||
const ALLOW_FIXTURE = resolveFixture('allow-feat.txt');
|
||||
@@ -53,23 +53,33 @@ describe('parser', function () {
|
||||
// TODO: figure out how best to suppress color in error message
|
||||
describe('invalid arguments', function () {
|
||||
it('should throw an error with unknown argument', function () {
|
||||
(() => {p.parseArgs(['--apple']);}).should.throw(/unrecognized arguments: --apple/i);
|
||||
(() => {
|
||||
p.parseArgs(['--apple']);
|
||||
}).should.throw(/unrecognized arguments: --apple/i);
|
||||
});
|
||||
|
||||
it('should throw an error for an invalid value ("hostname")', function () {
|
||||
(() => {p.parseArgs(['--address', '-42']);}).should.throw(/must match format "hostname"/i);
|
||||
(() => {
|
||||
p.parseArgs(['--address', '-42']);
|
||||
}).should.throw(/must match format "hostname"/i);
|
||||
});
|
||||
|
||||
it('should throw an error for an invalid value ("uri")', function () {
|
||||
(() => {p.parseArgs(['--webhook', 'blub']);}).should.throw(/must match format "uri"/i);
|
||||
(() => {
|
||||
p.parseArgs(['--webhook', 'blub']);
|
||||
}).should.throw(/must match format "uri"/i);
|
||||
});
|
||||
|
||||
it('should throw an error for an invalid value (using "enum")', function () {
|
||||
(() => {p.parseArgs(['--log-level', '-42']);}).should.throw(/must be equal to one of the allowed values/i);
|
||||
(() => {
|
||||
p.parseArgs(['--log-level', '-42']);
|
||||
}).should.throw(/must be equal to one of the allowed values/i);
|
||||
});
|
||||
|
||||
it('should throw an error for incorrectly formatted arg (matching "dest")', function () {
|
||||
(() => {p.parseArgs(['--loglevel', '-42']);}).should.throw(/unrecognized arguments: --loglevel/i);
|
||||
(() => {
|
||||
p.parseArgs(['--loglevel', '-42']);
|
||||
}).should.throw(/unrecognized arguments: --loglevel/i);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -86,10 +96,18 @@ describe('parser', function () {
|
||||
});
|
||||
|
||||
it('should throw an error with invalid arg to default capabilities', function () {
|
||||
(() => {p.parseArgs(['-dc', '42']);}).should.throw();
|
||||
(() => {p.parseArgs(['-dc', 'false']);}).should.throw();
|
||||
(() => {p.parseArgs(['-dc', 'null']);}).should.throw();
|
||||
(() => {p.parseArgs(['-dc', 'does/not/exist.json']);}).should.throw();
|
||||
(() => {
|
||||
p.parseArgs(['-dc', '42']);
|
||||
}).should.throw();
|
||||
(() => {
|
||||
p.parseArgs(['-dc', 'false']);
|
||||
}).should.throw();
|
||||
(() => {
|
||||
p.parseArgs(['-dc', 'null']);
|
||||
}).should.throw();
|
||||
(() => {
|
||||
p.parseArgs(['-dc', 'does/not/exist.json']);
|
||||
}).should.throw();
|
||||
});
|
||||
|
||||
it('should parse --allow-insecure correctly', function () {
|
||||
@@ -110,7 +128,10 @@ describe('parser', function () {
|
||||
|
||||
it('should parse --allow-insecure & --deny-insecure from files', function () {
|
||||
const parsed = p.parseArgs([
|
||||
'--allow-insecure', ALLOW_FIXTURE, '--deny-insecure', DENY_FIXTURE
|
||||
'--allow-insecure',
|
||||
ALLOW_FIXTURE,
|
||||
'--deny-insecure',
|
||||
DENY_FIXTURE,
|
||||
]);
|
||||
parsed.allowInsecure.should.eql(['feature1', 'feature2', 'feature3']);
|
||||
parsed.denyInsecure.should.eql(['nofeature1', 'nofeature2', 'nofeature3']);
|
||||
@@ -120,7 +141,6 @@ describe('parser', function () {
|
||||
p.parseArgs(['--use-drivers', 'fake']).useDrivers.should.eql(['fake']);
|
||||
});
|
||||
|
||||
|
||||
it('should allow multiple --use-drivers', function () {
|
||||
p.parseArgs(['--use-drivers', 'fake,phony']).useDrivers.should.eql(['fake', 'phony']);
|
||||
});
|
||||
@@ -136,7 +156,11 @@ describe('parser', function () {
|
||||
// we have to require() here because babel will not compile stuff in node_modules
|
||||
// (even if it's in the monorepo; there may be a way around this)
|
||||
// anyway, if we do that, we need to use the `default` prop.
|
||||
schema.registerSchema(DRIVER_TYPE, 'fake', require('@appium/fake-driver/build/lib/fake-driver-schema').default);
|
||||
schema.registerSchema(
|
||||
DRIVER_TYPE,
|
||||
'fake',
|
||||
require('@appium/fake-driver/build/lib/fake-driver-schema').default
|
||||
);
|
||||
schema.finalizeSchema();
|
||||
p = getParser(true);
|
||||
});
|
||||
@@ -147,12 +171,14 @@ describe('parser', function () {
|
||||
// the command-line flags are derived also from the schema.
|
||||
// the result should be that the parsed args should match the config file.
|
||||
const {config} = await readConfigFile(resolveFixture('config', 'driver-fake.config.json'));
|
||||
const fakeDriverArgs = {fake: {sillyWebServerPort: 1234, sillyWebServerHost: 'hey'}};
|
||||
const fakeDriverArgs = {
|
||||
fake: {sillyWebServerPort: 1234, sillyWebServerHost: 'hey'},
|
||||
};
|
||||
const args = p.parseArgs([
|
||||
'--driver-fake-silly-web-server-port',
|
||||
fakeDriverArgs.fake.sillyWebServerPort,
|
||||
'--driver-fake-silly-web-server-host',
|
||||
fakeDriverArgs.fake.sillyWebServerHost
|
||||
fakeDriverArgs.fake.sillyWebServerHost,
|
||||
]);
|
||||
|
||||
args.driver.fake.should.eql(config.driver.fake);
|
||||
@@ -165,30 +191,36 @@ describe('parser', function () {
|
||||
|
||||
it('should nicely handle extensions w/ dashes in them', function () {
|
||||
schema.resetSchema();
|
||||
schema.registerSchema(PLUGIN_TYPE, 'crypto-fiend', {type: 'object', properties: {elite: {type: 'boolean'}}});
|
||||
schema.registerSchema(PLUGIN_TYPE, 'crypto-fiend', {
|
||||
type: 'object',
|
||||
properties: {elite: {type: 'boolean'}},
|
||||
});
|
||||
schema.finalizeSchema();
|
||||
p = getParser(true);
|
||||
const args = p.parseArgs([
|
||||
'--plugin-crypto-fiend-elite'
|
||||
]);
|
||||
const args = p.parseArgs(['--plugin-crypto-fiend-elite']);
|
||||
|
||||
args.should.have.nested.property('plugin.crypto-fiend.elite', true);
|
||||
});
|
||||
|
||||
describe('when user supplies invalid args', function () {
|
||||
it('should error out', function () {
|
||||
(() => p.parseArgs(['--driver-fake-silly-web-server-port', 'foo'])).should.throw(/must be integer/i);
|
||||
(() => p.parseArgs(['--driver-fake-silly-web-server-port', 'foo'])).should.throw(
|
||||
/must be integer/i
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not support --driver-args', function () {
|
||||
(() => p.parseArgs(['--driver-args', '/some/file.json'])).should.throw(/unrecognized arguments/i);
|
||||
(() => p.parseArgs(['--driver-args', '/some/file.json'])).should.throw(
|
||||
/unrecognized arguments/i
|
||||
);
|
||||
});
|
||||
|
||||
it('should not support --plugin-args', function () {
|
||||
(() => p.parseArgs(['--plugin-args', '/some/file.json'])).should.throw(/unrecognized arguments/i);
|
||||
(() => p.parseArgs(['--plugin-args', '/some/file.json'])).should.throw(
|
||||
/unrecognized arguments/i
|
||||
);
|
||||
});
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// @ts-check
|
||||
|
||||
import { DRIVER_TYPE } from '../../../lib/constants';
|
||||
import { ArgSpec } from '../../../lib/schema/arg-spec';
|
||||
import {DRIVER_TYPE} from '../../../lib/constants';
|
||||
import {ArgSpec} from '../../../lib/schema/arg-spec';
|
||||
|
||||
const {expect} = chai;
|
||||
|
||||
@@ -17,7 +17,7 @@ describe('ArgSpec', function () {
|
||||
describe('when provided no extension information', function () {
|
||||
it('should return a schema ID for a specific argument', function () {
|
||||
expect(ArgSpec.toSchemaRef('foo')).to.equal(
|
||||
'appium.json#/properties/server/properties/foo',
|
||||
'appium.json#/properties/server/properties/foo'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -25,7 +25,7 @@ describe('ArgSpec', function () {
|
||||
describe('when provided extension information', function () {
|
||||
it('should return a schema ID for a specific argument within an extension schema', function () {
|
||||
expect(ArgSpec.toSchemaRef('bar', DRIVER_TYPE, 'stuff')).to.equal(
|
||||
'driver-stuff.json#/properties/bar',
|
||||
'driver-stuff.json#/properties/bar'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -34,17 +34,13 @@ describe('ArgSpec', function () {
|
||||
describe('toSchemaBaseRef()', function () {
|
||||
describe('when provided no extension information', function () {
|
||||
it('should return the base schema ID', function () {
|
||||
expect(ArgSpec.toSchemaBaseRef()).to.equal(
|
||||
'appium.json',
|
||||
);
|
||||
expect(ArgSpec.toSchemaBaseRef()).to.equal('appium.json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('when provided extension information', function () {
|
||||
it('should return a schema ID for an extension', function () {
|
||||
expect(ArgSpec.toSchemaBaseRef(DRIVER_TYPE, 'stuff')).to.equal(
|
||||
'driver-stuff.json',
|
||||
);
|
||||
expect(ArgSpec.toSchemaBaseRef(DRIVER_TYPE, 'stuff')).to.equal('driver-stuff.json');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -59,7 +55,7 @@ describe('ArgSpec', function () {
|
||||
describe('when provided extension information', function () {
|
||||
it('should return an extension-specific arg name', function () {
|
||||
expect(ArgSpec.toArg('no-oats', DRIVER_TYPE, 'bad-donkey')).to.equal(
|
||||
'driver-bad-donkey-no-oats',
|
||||
'driver-bad-donkey-no-oats'
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -68,15 +64,15 @@ describe('ArgSpec', function () {
|
||||
describe('extensionInfoFromRootSchemaId()', function () {
|
||||
describe('when provided the base schema ID', function () {
|
||||
it('should return an empty object', function () {
|
||||
expect(ArgSpec.extensionInfoFromRootSchemaId('appium.json')).to.be
|
||||
.empty;
|
||||
expect(ArgSpec.extensionInfoFromRootSchemaId('appium.json')).to.be.empty;
|
||||
});
|
||||
});
|
||||
|
||||
describe('when provided the schema ID of an extension schema', function () {
|
||||
expect(
|
||||
ArgSpec.extensionInfoFromRootSchemaId('driver-stuff.json'),
|
||||
).to.eql({extType: DRIVER_TYPE, normalizedExtName: 'stuff'});
|
||||
expect(ArgSpec.extensionInfoFromRootSchemaId('driver-stuff.json')).to.eql({
|
||||
extType: DRIVER_TYPE,
|
||||
normalizedExtName: 'stuff',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
// @ts-check
|
||||
|
||||
import _ from 'lodash';
|
||||
import { PLUGIN_TYPE } from '../../../lib/constants';
|
||||
import { finalizeSchema, registerSchema, resetSchema } from '../../../lib/schema';
|
||||
import { toParserArgs } from '../../../lib/schema/cli-args';
|
||||
import { transformers } from '../../../lib/schema/cli-transformers';
|
||||
import {PLUGIN_TYPE} from '../../../lib/constants';
|
||||
import {finalizeSchema, registerSchema, resetSchema} from '../../../lib/schema';
|
||||
import {toParserArgs} from '../../../lib/schema/cli-args';
|
||||
import {transformers} from '../../../lib/schema/cli-transformers';
|
||||
|
||||
const {expect} = chai;
|
||||
|
||||
@@ -15,7 +15,7 @@ describe('cli-args', function () {
|
||||
* @param {*} opts
|
||||
* @returns
|
||||
*/
|
||||
function getArgs (opts = {}) {
|
||||
function getArgs(opts = {}) {
|
||||
let {extName, extType, schema} = opts;
|
||||
if (schema && extName && extType) {
|
||||
registerSchema(extType, extName, schema);
|
||||
@@ -37,15 +37,15 @@ describe('cli-args', function () {
|
||||
|
||||
describe('boolean', function () {
|
||||
beforeEach(function () {
|
||||
const schema = {properties: {foo: {type: 'boolean'}}, type: 'object'};
|
||||
const schema = {
|
||||
properties: {foo: {type: 'boolean'}},
|
||||
type: 'object',
|
||||
};
|
||||
result = getArgs({schema, extName, extType});
|
||||
});
|
||||
|
||||
it('should return options containing `action` prop of `store_const` and no `type`', function () {
|
||||
expect(result['--plugin-blob-foo']).to.have.property(
|
||||
'action',
|
||||
'store_const',
|
||||
);
|
||||
expect(result['--plugin-blob-foo']).to.have.property('action', 'store_const');
|
||||
});
|
||||
|
||||
it('should not contain a `metavar` property', function () {
|
||||
@@ -55,22 +55,19 @@ describe('cli-args', function () {
|
||||
|
||||
describe('object', function () {
|
||||
beforeEach(function () {
|
||||
const schema = {properties: {foo: {type: 'object'}}, type: 'object'};
|
||||
const schema = {
|
||||
properties: {foo: {type: 'object'}},
|
||||
type: 'object',
|
||||
};
|
||||
result = getArgs({schema, extName, extType});
|
||||
});
|
||||
|
||||
it('should use the `json` transformer', function () {
|
||||
expect(result['--plugin-blob-foo']).to.have.property(
|
||||
'type',
|
||||
transformers.json,
|
||||
);
|
||||
expect(result['--plugin-blob-foo']).to.have.property('type', transformers.json);
|
||||
});
|
||||
|
||||
it('should contain a SCREAMING_SNAKE_CASE `metavar` prop', function () {
|
||||
expect(result['--plugin-blob-foo']).to.have.property(
|
||||
'metavar',
|
||||
'FOO',
|
||||
);
|
||||
expect(result['--plugin-blob-foo']).to.have.property('metavar', 'FOO');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -81,23 +78,20 @@ describe('cli-args', function () {
|
||||
});
|
||||
|
||||
it('should use the `csv` transformer', function () {
|
||||
expect(result['--plugin-blob-foo']).to.have.property(
|
||||
'type',
|
||||
transformers.csv,
|
||||
);
|
||||
expect(result['--plugin-blob-foo']).to.have.property('type', transformers.csv);
|
||||
});
|
||||
|
||||
it('should contain a SCREAMING_SNAKE_CASE `metavar` prop', function () {
|
||||
expect(result['--plugin-blob-foo']).to.have.property(
|
||||
'metavar',
|
||||
'FOO',
|
||||
);
|
||||
expect(result['--plugin-blob-foo']).to.have.property('metavar', 'FOO');
|
||||
});
|
||||
});
|
||||
|
||||
describe('number', function () {
|
||||
beforeEach(function () {
|
||||
const schema = {properties: {foo: {type: 'number'}}, type: 'object'};
|
||||
const schema = {
|
||||
properties: {foo: {type: 'number'}},
|
||||
type: 'object',
|
||||
};
|
||||
result = getArgs({schema, extName, extType});
|
||||
});
|
||||
|
||||
@@ -106,16 +100,16 @@ describe('cli-args', function () {
|
||||
});
|
||||
|
||||
it('should contain a SCREAMING_SNAKE_CASE `metavar` prop', function () {
|
||||
expect(result['--plugin-blob-foo']).to.have.property(
|
||||
'metavar',
|
||||
'FOO',
|
||||
);
|
||||
expect(result['--plugin-blob-foo']).to.have.property('metavar', 'FOO');
|
||||
});
|
||||
});
|
||||
|
||||
describe('integer', function () {
|
||||
beforeEach(function () {
|
||||
const schema = {properties: {foo: {type: 'integer'}}, type: 'object'};
|
||||
const schema = {
|
||||
properties: {foo: {type: 'integer'}},
|
||||
type: 'object',
|
||||
};
|
||||
result = getArgs({schema, extName, extType});
|
||||
});
|
||||
|
||||
@@ -124,10 +118,7 @@ describe('cli-args', function () {
|
||||
});
|
||||
|
||||
it('should contain a SCREAMING_SNAKE_CASE `metavar` prop', function () {
|
||||
expect(result['--plugin-blob-foo']).to.have.property(
|
||||
'metavar',
|
||||
'FOO',
|
||||
);
|
||||
expect(result['--plugin-blob-foo']).to.have.property('metavar', 'FOO');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -145,10 +136,7 @@ describe('cli-args', function () {
|
||||
});
|
||||
|
||||
it('should contain a SCREAMING_SNAKE_CASE `metavar` prop', function () {
|
||||
expect(result['--plugin-blob-foo']).to.have.property(
|
||||
'metavar',
|
||||
'FOO',
|
||||
);
|
||||
expect(result['--plugin-blob-foo']).to.have.property('metavar', 'FOO');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -157,7 +145,7 @@ describe('cli-args', function () {
|
||||
const schema = {properties: {foo: {type: 'null'}}, type: 'object'};
|
||||
expect(() => getArgs({extType, extName, schema})).to.throw(
|
||||
TypeError,
|
||||
/unknown or disallowed/,
|
||||
/unknown or disallowed/
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -168,10 +156,7 @@ describe('cli-args', function () {
|
||||
properties: {foo: {type: 'donkey'}},
|
||||
type: 'object',
|
||||
};
|
||||
expect(() => getArgs({extType, extName, schema})).to.throw(
|
||||
Error,
|
||||
/schema is invalid/,
|
||||
);
|
||||
expect(() => getArgs({extType, extName, schema})).to.throw(Error, /schema is invalid/);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -187,9 +172,7 @@ describe('cli-args', function () {
|
||||
type: 'object',
|
||||
};
|
||||
result = getArgs({schema, extName, extType});
|
||||
expect(result).to.have.property(
|
||||
'--plugin-blob-foo,--plugin-blob-fooooo,--plugin-blob-F',
|
||||
);
|
||||
expect(result).to.have.property('--plugin-blob-foo,--plugin-blob-fooooo,--plugin-blob-F');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -232,9 +215,7 @@ describe('cli-args', function () {
|
||||
type: 'object',
|
||||
};
|
||||
result = getArgs({schema, extName, extType});
|
||||
expect(() => result['--plugin-blob-foo'].type('123')).to.throw(
|
||||
/must be a valid json/i,
|
||||
);
|
||||
expect(() => result['--plugin-blob-foo'].type('123')).to.throw(/must be a valid json/i);
|
||||
});
|
||||
|
||||
// this is unlikely to happen, but I want to establish the behavior as defined.
|
||||
@@ -254,7 +235,7 @@ describe('cli-args', function () {
|
||||
};
|
||||
result = getArgs({schema, extName, extType});
|
||||
expect(() => result['--plugin-blob-foo'].type('herp')).to.throw(
|
||||
/must be a valid json/i,
|
||||
/must be a valid json/i
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -274,9 +255,7 @@ describe('cli-args', function () {
|
||||
type: 'object',
|
||||
};
|
||||
result = getArgs({schema, extName, extType});
|
||||
expect(
|
||||
result['--plugin-blob-foo'].type('{"herp": "derp"}'),
|
||||
).to.eql({herp: 'derp'});
|
||||
expect(result['--plugin-blob-foo'].type('{"herp": "derp"}')).to.eql({herp: 'derp'});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -293,9 +272,9 @@ describe('cli-args', function () {
|
||||
type: 'object',
|
||||
};
|
||||
result = getArgs({schema, extName, extType});
|
||||
expect(() =>
|
||||
result['--plugin-blob-foo'].type('{"georgy": "porgy"}'),
|
||||
).to.throw(/one of the allowed values/i);
|
||||
expect(() => result['--plugin-blob-foo'].type('{"georgy": "porgy"}')).to.throw(
|
||||
/one of the allowed values/i
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -316,12 +295,12 @@ describe('cli-args', function () {
|
||||
};
|
||||
expect(() => getArgs({schema, extName, extType})).to.throw(
|
||||
TypeError,
|
||||
/`enum` is only supported for `type: 'string'`/i,
|
||||
/`enum` is only supported for `type: 'string'`/i
|
||||
);
|
||||
});
|
||||
|
||||
it(
|
||||
'should actually throw earlier by failing schema validation, but that would mean overriding the behavior of `enum` which sounds inadvisable',
|
||||
'should actually throw earlier by failing schema validation, but that would mean overriding the behavior of `enum` which sounds inadvisable'
|
||||
);
|
||||
});
|
||||
|
||||
@@ -337,10 +316,7 @@ describe('cli-args', function () {
|
||||
type: 'object',
|
||||
};
|
||||
const result = getArgs({schema, extName, extType});
|
||||
expect(result['--plugin-blob-foo']).to.have.deep.property(
|
||||
'choices',
|
||||
['herp', 'derp'],
|
||||
);
|
||||
expect(result['--plugin-blob-foo']).to.have.deep.property('choices', ['herp', 'derp']);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -144,10 +144,7 @@ describe('schema', function () {
|
||||
expect(() => {
|
||||
// @ts-expect-error
|
||||
registerSchema(DRIVER_TYPE, 'whoopeee', [45]);
|
||||
}).to.throw(
|
||||
SchemaUnsupportedSchemaError,
|
||||
/must be a plain object/i
|
||||
);
|
||||
}).to.throw(SchemaUnsupportedSchemaError, /must be a plain object/i);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -156,10 +153,7 @@ describe('schema', function () {
|
||||
expect(() => {
|
||||
// @ts-expect-error
|
||||
registerSchema(DRIVER_TYPE, 'whoopee', {$async: true});
|
||||
}).to.throw(
|
||||
SchemaUnsupportedSchemaError,
|
||||
/cannot be an async schema/i
|
||||
);
|
||||
}).to.throw(SchemaUnsupportedSchemaError, /cannot be an async schema/i);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -178,9 +172,7 @@ describe('schema', function () {
|
||||
it('should not throw', function () {
|
||||
const schemaObject = {title: 'whoopee'};
|
||||
registerSchema(DRIVER_TYPE, 'whoopee', schemaObject);
|
||||
expect(() =>
|
||||
registerSchema(DRIVER_TYPE, 'whoopee', schemaObject)
|
||||
).not.to.throw();
|
||||
expect(() => registerSchema(DRIVER_TYPE, 'whoopee', schemaObject)).not.to.throw();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -201,9 +193,7 @@ describe('schema', function () {
|
||||
describe('when provided a nonempty `type`, `schema` and `name`', function () {
|
||||
it('should register the schema', function () {
|
||||
const schemaObject = {title: 'whoopee'};
|
||||
expect(() =>
|
||||
registerSchema(DRIVER_TYPE, 'whoopee', schemaObject)
|
||||
).not.to.throw();
|
||||
expect(() => registerSchema(DRIVER_TYPE, 'whoopee', schemaObject)).not.to.throw();
|
||||
});
|
||||
|
||||
describe('when the `name` is not unique but `type` is', function () {
|
||||
@@ -211,9 +201,7 @@ describe('schema', function () {
|
||||
const schema1 = {title: 'pro-skub'};
|
||||
const schema2 = {title: 'anti-skub'};
|
||||
registerSchema(DRIVER_TYPE, 'skub', schema1);
|
||||
expect(() =>
|
||||
registerSchema(PLUGIN_TYPE, 'skub', schema2)
|
||||
).not.to.throw();
|
||||
expect(() => registerSchema(PLUGIN_TYPE, 'skub', schema2)).not.to.throw();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -243,29 +231,21 @@ describe('schema', function () {
|
||||
|
||||
describe('when schema ID is the base schema ID', function () {
|
||||
it('should return the base schema', function () {
|
||||
expect(getSchema(APPIUM_CONFIG_SCHEMA_ID)).to.eql(
|
||||
AppiumConfigJsonSchema
|
||||
);
|
||||
expect(getSchema(APPIUM_CONFIG_SCHEMA_ID)).to.eql(AppiumConfigJsonSchema);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the schema ID is a reference', function () {
|
||||
it('should return the schema for the reference', function () {
|
||||
expect(
|
||||
getSchema(
|
||||
`${APPIUM_CONFIG_SCHEMA_ID}#/properties/server/properties/address`
|
||||
)
|
||||
).to.exist.and.to.eql(
|
||||
AppiumConfigJsonSchema.properties.server.properties.address
|
||||
);
|
||||
getSchema(`${APPIUM_CONFIG_SCHEMA_ID}#/properties/server/properties/address`)
|
||||
).to.exist.and.to.eql(AppiumConfigJsonSchema.properties.server.properties.address);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when schema ID is invalid', function () {
|
||||
it('should throw', function () {
|
||||
expect(() => getSchema('schema-the-clown')).to.throw(
|
||||
SchemaUnknownSchemaError
|
||||
);
|
||||
expect(() => getSchema('schema-the-clown')).to.throw(SchemaUnknownSchemaError);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -402,8 +382,10 @@ describe('schema', function () {
|
||||
|
||||
it('should return a Record containing all extension schemas _and_ the base schema containing references to the extension schemas', function () {
|
||||
const baseSchemaWithRefs = _.cloneDeep(AppiumConfigJsonSchema);
|
||||
baseSchemaWithRefs.properties.server.properties.driver.properties.stuff =
|
||||
{$ref: 'driver-stuff.json', $comment: 'stuff'};
|
||||
baseSchemaWithRefs.properties.server.properties.driver.properties.stuff = {
|
||||
$ref: 'driver-stuff.json',
|
||||
$comment: 'stuff',
|
||||
};
|
||||
expect(finalizeSchema()).to.eql({
|
||||
[APPIUM_CONFIG_SCHEMA_ID]: baseSchemaWithRefs,
|
||||
'driver-stuff.json': DRIVER_SCHEMA_FIXTURE,
|
||||
@@ -442,9 +424,7 @@ describe('schema', function () {
|
||||
|
||||
describe('when provided an invalid schema ID ref', function () {
|
||||
it('should throw', function () {
|
||||
expect(() => validate('foo', 'bar')).to.throw(
|
||||
SchemaUnknownSchemaError
|
||||
);
|
||||
expect(() => validate('foo', 'bar')).to.throw(SchemaUnknownSchemaError);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -457,8 +437,7 @@ describe('schema', function () {
|
||||
|
||||
describe('when provided an invalid value', function () {
|
||||
it('should return an array containing errors', function () {
|
||||
expect(validate({address: '127.0.0.1'})).to.be.an('array').and.to
|
||||
.not.be.empty;
|
||||
expect(validate({address: '127.0.0.1'})).to.be.an('array').and.to.not.be.empty;
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -467,10 +446,7 @@ describe('schema', function () {
|
||||
describe('when provided a valid value', function () {
|
||||
it('should return an empty array of no errors', function () {
|
||||
expect(
|
||||
validate(
|
||||
'127.0.0.1',
|
||||
'appium.json#/properties/server/properties/address'
|
||||
)
|
||||
validate('127.0.0.1', 'appium.json#/properties/server/properties/address')
|
||||
).to.eql([]);
|
||||
});
|
||||
});
|
||||
@@ -478,10 +454,7 @@ describe('schema', function () {
|
||||
describe('when provided an invalid value', function () {
|
||||
it('should return an array containing errors', function () {
|
||||
expect(
|
||||
validate(
|
||||
'127.0.0.1',
|
||||
'appium.json#/properties/server/properties/port'
|
||||
)
|
||||
validate('127.0.0.1', 'appium.json#/properties/server/properties/port')
|
||||
).to.be.an('array').and.to.not.be.empty;
|
||||
});
|
||||
});
|
||||
@@ -496,26 +469,21 @@ describe('schema', function () {
|
||||
|
||||
describe('when provided an invalid schema ID ref', function () {
|
||||
it('should throw', function () {
|
||||
expect(() => validate('foo', 'bar')).to.throw(
|
||||
SchemaUnknownSchemaError
|
||||
);
|
||||
expect(() => validate('foo', 'bar')).to.throw(SchemaUnknownSchemaError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when not provided a schema ID ref', function () {
|
||||
describe('when provided a valid value', function () {
|
||||
it('should return an empty array of no errors', function () {
|
||||
expect(validate({server: {driver: {stuff: {answer: 99}}}})).to.eql(
|
||||
[]
|
||||
);
|
||||
expect(validate({server: {driver: {stuff: {answer: 99}}}})).to.eql([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when provided an invalid value', function () {
|
||||
it('should return an array containing errors', function () {
|
||||
expect(
|
||||
validate({server: {driver: {stuff: {answer: 101}}}})
|
||||
).to.be.an('array').and.to.not.be.empty;
|
||||
expect(validate({server: {driver: {stuff: {answer: 101}}}})).to.be.an('array').and.to
|
||||
.not.be.empty;
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -523,17 +491,14 @@ describe('schema', function () {
|
||||
describe('when provided a schema ID ref', function () {
|
||||
describe('when provided a valid value', function () {
|
||||
it('should return an empty array of no errors', function () {
|
||||
expect(validate(99, 'driver-stuff.json#/properties/answer')).to.eql(
|
||||
[]
|
||||
);
|
||||
expect(validate(99, 'driver-stuff.json#/properties/answer')).to.eql([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when provided an invalid value', function () {
|
||||
it('should return an array containing errors', function () {
|
||||
expect(
|
||||
validate(101, 'driver-stuff.json#/properties/answer')
|
||||
).to.be.an('array').and.to.not.be.empty;
|
||||
expect(validate(101, 'driver-stuff.json#/properties/answer')).to.be.an('array').and.to
|
||||
.not.be.empty;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import {
|
||||
parseCapsForInnerDriver, insertAppiumPrefixes, pullSettings,
|
||||
removeAppiumPrefixes, inspect
|
||||
parseCapsForInnerDriver,
|
||||
insertAppiumPrefixes,
|
||||
pullSettings,
|
||||
removeAppiumPrefixes,
|
||||
inspect,
|
||||
} from '../../lib/utils';
|
||||
import { BASE_CAPS, W3C_CAPS } from '../helpers';
|
||||
import {BASE_CAPS, W3C_CAPS} from '../helpers';
|
||||
import _ from 'lodash';
|
||||
import { stripColors } from '@colors/colors';
|
||||
import { createSandbox } from 'sinon';
|
||||
import {stripColors} from '@colors/colors';
|
||||
import {createSandbox} from 'sinon';
|
||||
import logger from '../../lib/logger';
|
||||
|
||||
describe('utils', function () {
|
||||
@@ -16,14 +19,16 @@ describe('utils', function () {
|
||||
error.message.should.match(/W3C/);
|
||||
});
|
||||
it('should return W3C caps unchanged if only W3C caps were provided', function () {
|
||||
let {desiredCaps, processedJsonwpCapabilities, processedW3CCapabilities, protocol} = parseCapsForInnerDriver(undefined, W3C_CAPS);
|
||||
let {desiredCaps, processedJsonwpCapabilities, processedW3CCapabilities, protocol} =
|
||||
parseCapsForInnerDriver(undefined, W3C_CAPS);
|
||||
desiredCaps.should.deep.equal(BASE_CAPS);
|
||||
should.not.exist(processedJsonwpCapabilities);
|
||||
processedW3CCapabilities.should.deep.equal(W3C_CAPS);
|
||||
protocol.should.equal('W3C');
|
||||
});
|
||||
it('should return JSONWP and W3C caps if both were provided', function () {
|
||||
let {desiredCaps, processedJsonwpCapabilities, processedW3CCapabilities, protocol} = parseCapsForInnerDriver(BASE_CAPS, W3C_CAPS);
|
||||
let {desiredCaps, processedJsonwpCapabilities, processedW3CCapabilities, protocol} =
|
||||
parseCapsForInnerDriver(BASE_CAPS, W3C_CAPS);
|
||||
desiredCaps.should.deep.equal(BASE_CAPS);
|
||||
processedJsonwpCapabilities.should.deep.equal(BASE_CAPS);
|
||||
processedW3CCapabilities.should.deep.equal(W3C_CAPS);
|
||||
@@ -38,54 +43,68 @@ describe('utils', function () {
|
||||
foo: 'bar',
|
||||
baz: 'bla',
|
||||
};
|
||||
const {
|
||||
desiredCaps,
|
||||
processedJsonwpCapabilities,
|
||||
processedW3CCapabilities
|
||||
} = parseCapsForInnerDriver(BASE_CAPS, W3C_CAPS, {}, defaultW3CCaps);
|
||||
const {desiredCaps, processedJsonwpCapabilities, processedW3CCapabilities} =
|
||||
parseCapsForInnerDriver(BASE_CAPS, W3C_CAPS, {}, defaultW3CCaps);
|
||||
desiredCaps.should.deep.equal({
|
||||
...expectedDefaultCaps,
|
||||
...BASE_CAPS,
|
||||
});
|
||||
processedJsonwpCapabilities.should.deep.equal({
|
||||
...expectedDefaultCaps,
|
||||
...BASE_CAPS
|
||||
...BASE_CAPS,
|
||||
});
|
||||
processedW3CCapabilities.alwaysMatch.should.deep.equal({
|
||||
...insertAppiumPrefixes(expectedDefaultCaps),
|
||||
...insertAppiumPrefixes(BASE_CAPS)
|
||||
...insertAppiumPrefixes(BASE_CAPS),
|
||||
});
|
||||
});
|
||||
it('should allow valid default capabilities', function () {
|
||||
const res = parseCapsForInnerDriver(null, W3C_CAPS, {}, {
|
||||
'appium:foo': 'bar2',
|
||||
});
|
||||
const res = parseCapsForInnerDriver(
|
||||
null,
|
||||
W3C_CAPS,
|
||||
{},
|
||||
{
|
||||
'appium:foo': 'bar2',
|
||||
}
|
||||
);
|
||||
res.processedW3CCapabilities.alwaysMatch['appium:foo'].should.eql('bar2');
|
||||
});
|
||||
it('should not allow invalid default capabilities', function () {
|
||||
const res = parseCapsForInnerDriver(null, W3C_CAPS, {}, {
|
||||
foo: 'bar', 'appium:foo2': 'bar2',
|
||||
});
|
||||
const res = parseCapsForInnerDriver(
|
||||
null,
|
||||
W3C_CAPS,
|
||||
{},
|
||||
{
|
||||
foo: 'bar',
|
||||
'appium:foo2': 'bar2',
|
||||
}
|
||||
);
|
||||
res.error.should.eql({
|
||||
jsonwpCode: 61, error: 'invalid argument', w3cStatus: 400, _stacktrace: null
|
||||
jsonwpCode: 61,
|
||||
error: 'invalid argument',
|
||||
w3cStatus: 400,
|
||||
_stacktrace: null,
|
||||
});
|
||||
});
|
||||
it('should reject if W3C caps are not passing constraints', function () {
|
||||
const err = parseCapsForInnerDriver(undefined, W3C_CAPS, {hello: {presence: true}}).error;
|
||||
const err = parseCapsForInnerDriver(undefined, W3C_CAPS, {
|
||||
hello: {presence: true},
|
||||
}).error;
|
||||
err.message.should.match(/'hello' can't be blank/);
|
||||
_.isError(err).should.be.true;
|
||||
|
||||
});
|
||||
it('should only accept W3C caps that have passing constraints', function () {
|
||||
let w3cCaps = {
|
||||
...W3C_CAPS,
|
||||
firstMatch: [
|
||||
{foo: 'bar'},
|
||||
{'appium:hello': 'world'},
|
||||
],
|
||||
firstMatch: [{foo: 'bar'}, {'appium:hello': 'world'}],
|
||||
};
|
||||
parseCapsForInnerDriver(BASE_CAPS, w3cCaps, {hello: {presence: true}}).error.should.eql({
|
||||
jsonwpCode: 61, error: 'invalid argument', w3cStatus: 400, _stacktrace: null
|
||||
parseCapsForInnerDriver(BASE_CAPS, w3cCaps, {
|
||||
hello: {presence: true},
|
||||
}).error.should.eql({
|
||||
jsonwpCode: 61,
|
||||
error: 'invalid argument',
|
||||
w3cStatus: 400,
|
||||
_stacktrace: null,
|
||||
});
|
||||
});
|
||||
it('should add appium prefixes to W3C caps that are not standard in W3C', function () {
|
||||
@@ -105,7 +124,7 @@ describe('utils', function () {
|
||||
'ms:cap2': 'value2',
|
||||
someCap: 'someCap',
|
||||
}).should.eql({
|
||||
'cap1': 'value1',
|
||||
cap1: 'value1',
|
||||
'ms:cap2': 'value2',
|
||||
someCap: 'someCap',
|
||||
});
|
||||
@@ -213,7 +232,6 @@ describe('utils', function () {
|
||||
});
|
||||
|
||||
describe('inspect()', function () {
|
||||
|
||||
/**
|
||||
* @type {sinon.SinonSandbox}
|
||||
*/
|
||||
@@ -229,8 +247,9 @@ describe('utils', function () {
|
||||
|
||||
it('should log the result of inspecting a value', function () {
|
||||
inspect({foo: 'bar'});
|
||||
stripColors(/** @type {sinon.SinonStub} */(logger.info).firstCall.firstArg)
|
||||
.should.match(/\{\s*\n*foo:\s'bar'\s*\n*\}/);
|
||||
stripColors(/** @type {sinon.SinonStub} */ (logger.info).firstCall.firstArg).should.match(
|
||||
/\{\s*\n*foo:\s'bar'\s*\n*\}/
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import { CommonMetadata, ExtMetadata, SchemaMetadata } from './external-manifest';
|
||||
import { ExtensionType, DriverType, PluginType } from '.';
|
||||
import {CommonMetadata, ExtMetadata, SchemaMetadata} from './external-manifest';
|
||||
import {ExtensionType, DriverType, PluginType} from '.';
|
||||
|
||||
export type InstallType = 'npm' | 'git' | 'local' | 'github';
|
||||
|
||||
export interface InternalMetadata {
|
||||
/**
|
||||
* Package name of extension
|
||||
*
|
||||
*
|
||||
* `name` from its `package.json`
|
||||
*/
|
||||
pkgName: string;
|
||||
/**
|
||||
* Version of extension
|
||||
*
|
||||
*
|
||||
* `version` from its `package.json`
|
||||
*/
|
||||
version: string;
|
||||
@@ -32,11 +32,7 @@ export interface InternalMetadata {
|
||||
*/
|
||||
export type ExtManifest<ExtType extends ExtensionType> = Omit<
|
||||
ExtMetadata<ExtType>,
|
||||
ExtType extends DriverType
|
||||
? 'driverName'
|
||||
: ExtType extends PluginType
|
||||
? 'pluginName'
|
||||
: never
|
||||
ExtType extends DriverType ? 'driverName' : ExtType extends PluginType ? 'pluginName' : never
|
||||
> &
|
||||
InternalMetadata &
|
||||
CommonMetadata; // XXX: ExtMetadata should be a union with CommonMetadata. why is this needed?
|
||||
@@ -48,16 +44,13 @@ export type WithSchemaManifest = {
|
||||
/**
|
||||
* This is just a {@linkcode ExtManifest} except it _for sure_ has a `schema` prop.
|
||||
*/
|
||||
export type ExtManifestWithSchema<ExtType extends ExtensionType> =
|
||||
ExtManifest<ExtType> & WithSchemaManifest;
|
||||
export type ExtManifestWithSchema<ExtType extends ExtensionType> = ExtManifest<ExtType> &
|
||||
WithSchemaManifest;
|
||||
|
||||
/**
|
||||
* Generic type for an object keyed by extension name, with values of type {@linkcode ExtData}
|
||||
*/
|
||||
export type ExtRecord<ExtType extends ExtensionType> = Record<
|
||||
string,
|
||||
ExtManifest<ExtType>
|
||||
>;
|
||||
export type ExtRecord<ExtType extends ExtensionType> = Record<string, ExtManifest<ExtType>>;
|
||||
|
||||
export type DriverRecord = ExtRecord<DriverType>;
|
||||
export type PluginRecord = ExtRecord<PluginType>;
|
||||
|
||||
@@ -17,10 +17,7 @@ export type PluginSubcommand = typeof PLUGIN_SUBCOMMAND;
|
||||
/**
|
||||
* Possible subcommands for the `appium` CLI.
|
||||
*/
|
||||
export type CliSubcommand =
|
||||
| ServerSubcommand
|
||||
| DriverSubcommand
|
||||
| PluginSubcommand;
|
||||
export type CliSubcommand = ServerSubcommand | DriverSubcommand | PluginSubcommand;
|
||||
|
||||
/**
|
||||
* Possible subcommands of {@linkcode DriverSubcommand} or
|
||||
@@ -127,11 +124,7 @@ export interface WithExtSubcommand {
|
||||
*/
|
||||
type CommonArgs<SArgs, T = WithServerSubcommand> = MoreArgs &
|
||||
ProgrammaticArgs &
|
||||
(T extends WithServerSubcommand
|
||||
? SArgs
|
||||
: T extends WithExtSubcommand
|
||||
? ExtArgs
|
||||
: never);
|
||||
(T extends WithServerSubcommand ? SArgs : T extends WithExtSubcommand ? ExtArgs : never);
|
||||
|
||||
/**
|
||||
* Fully-parsed arguments, containing defaults, computed args, and config file values.
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import type { BaseDriverBase } from '@appium/base-driver/lib/basedriver/driver';
|
||||
import { Class, Driver, ExternalDriver } from '@appium/types';
|
||||
import { DriverType, ExtensionType, PluginType } from '.';
|
||||
import type {BaseDriverBase} from '@appium/base-driver/lib/basedriver/driver';
|
||||
import {Class, Driver, ExternalDriver} from '@appium/types';
|
||||
import {DriverType, ExtensionType, PluginType} from '.';
|
||||
|
||||
export type DriverClass = BaseDriverBase<ExternalDriver,
|
||||
ExternalDriverStatic>;
|
||||
export type DriverClass = BaseDriverBase<ExternalDriver, ExternalDriverStatic>;
|
||||
|
||||
/**
|
||||
* Additional static props for external driver classes
|
||||
@@ -40,10 +39,7 @@ export interface PluginProto {
|
||||
/**
|
||||
* Don't know what this is, but it's also required.
|
||||
*/
|
||||
onUnexpectedShutdown?: (
|
||||
driver: Driver,
|
||||
cause: Error | string,
|
||||
) => Promise<void>;
|
||||
onUnexpectedShutdown?: (driver: Driver, cause: Error | string) => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
* These types describe information about external extensions and the contents of their `package.json` files
|
||||
*/
|
||||
|
||||
import type { SchemaObject } from 'ajv';
|
||||
import type { PackageJson, SetRequired } from 'type-fest';
|
||||
import { DriverType, ExtensionType, PluginType } from './index';
|
||||
import type {SchemaObject} from 'ajv';
|
||||
import type {PackageJson, SetRequired} from 'type-fest';
|
||||
import {DriverType, ExtensionType, PluginType} from './index';
|
||||
|
||||
/**
|
||||
* This is what is allowed in the `appium.schema` prop of an extension's `package.json`.
|
||||
@@ -40,13 +40,12 @@ export interface PluginMetadata {
|
||||
* Generic type to refer to either {@linkcode DriverMetadata} or {@linkcode PluginMetadata}
|
||||
* Corresponds to the `appium` prop in an extension's `package.json`.
|
||||
*/
|
||||
export type ExtMetadata<ExtType extends ExtensionType> =
|
||||
(ExtType extends DriverType
|
||||
? DriverMetadata
|
||||
: ExtType extends PluginType
|
||||
? PluginMetadata
|
||||
: never) &
|
||||
CommonMetadata;
|
||||
export type ExtMetadata<ExtType extends ExtensionType> = (ExtType extends DriverType
|
||||
? DriverMetadata
|
||||
: ExtType extends PluginType
|
||||
? PluginMetadata
|
||||
: never) &
|
||||
CommonMetadata;
|
||||
|
||||
/**
|
||||
* A `package.json` containing extension metadata.
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
// @ts-check
|
||||
|
||||
import _ from 'lodash';
|
||||
import { validator } from './desired-caps';
|
||||
import { util } from '@appium/support';
|
||||
import {validator} from './desired-caps';
|
||||
import {util} from '@appium/support';
|
||||
import log from './logger';
|
||||
import { errors } from '../protocol/errors';
|
||||
import {errors} from '../protocol/errors';
|
||||
|
||||
const APPIUM_VENDOR_PREFIX = 'appium:';
|
||||
const APPIUM_OPTS_CAP = 'options';
|
||||
@@ -17,13 +17,17 @@ const PREFIXED_APPIUM_OPTS_CAP = `${APPIUM_VENDOR_PREFIX}${APPIUM_OPTS_CAP}`;
|
||||
* @param {Capabilities} [secondary]
|
||||
* @returns {Capabilities}
|
||||
*/
|
||||
function mergeCaps (primary = {}, secondary = {}) {
|
||||
function mergeCaps(primary = {}, secondary = {}) {
|
||||
let result = Object.assign({}, primary);
|
||||
|
||||
for (let [name, value] of _.toPairs(secondary)) {
|
||||
// Overwriting is not allowed. Primary and secondary must have different properties (w3c rule 4.4)
|
||||
if (!_.isUndefined(primary[name])) {
|
||||
throw new errors.InvalidArgumentError(`property '${name}' should not exist on both primary (${JSON.stringify(primary)}) and secondary (${JSON.stringify(secondary)}) object`);
|
||||
throw new errors.InvalidArgumentError(
|
||||
`property '${name}' should not exist on both primary (${JSON.stringify(
|
||||
primary
|
||||
)}) and secondary (${JSON.stringify(secondary)}) object`
|
||||
);
|
||||
}
|
||||
result[name] = value;
|
||||
}
|
||||
@@ -39,8 +43,7 @@ function mergeCaps (primary = {}, secondary = {}) {
|
||||
* @param {ValidateCapsOpts} [opts]
|
||||
* @returns {Capabilities}
|
||||
*/
|
||||
function validateCaps (caps, constraints = {}, opts = {}) {
|
||||
|
||||
function validateCaps(caps, constraints = {}, opts = {}) {
|
||||
let {skipPresenceConstraint} = opts;
|
||||
|
||||
if (!_.isPlainObject(caps)) {
|
||||
@@ -56,9 +59,9 @@ function validateCaps (caps, constraints = {}, opts = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
let validationErrors = validator.validate(_.pickBy(caps, util.hasValue),
|
||||
constraints,
|
||||
{fullMessages: false});
|
||||
let validationErrors = validator.validate(_.pickBy(caps, util.hasValue), constraints, {
|
||||
fullMessages: false,
|
||||
});
|
||||
|
||||
if (validationErrors) {
|
||||
let message = [];
|
||||
@@ -84,16 +87,19 @@ const STANDARD_CAPS = [
|
||||
'proxy',
|
||||
'setWindowRect',
|
||||
'timeouts',
|
||||
'unhandledPromptBehavior'
|
||||
'unhandledPromptBehavior',
|
||||
];
|
||||
|
||||
function isStandardCap (cap) {
|
||||
return !!_.find(STANDARD_CAPS, (standardCap) => standardCap.toLowerCase() === `${cap}`.toLowerCase());
|
||||
function isStandardCap(cap) {
|
||||
return !!_.find(
|
||||
STANDARD_CAPS,
|
||||
(standardCap) => standardCap.toLowerCase() === `${cap}`.toLowerCase()
|
||||
);
|
||||
}
|
||||
|
||||
// If the 'appium:' prefix was provided and it's a valid capability, strip out the prefix (see https://www.w3.org/TR/webdriver/#dfn-extension-capabilities)
|
||||
// (NOTE: Method is destructive and mutates contents of caps)
|
||||
function stripAppiumPrefixes (caps) {
|
||||
function stripAppiumPrefixes(caps) {
|
||||
const prefix = 'appium:';
|
||||
const prefixedCaps = _.filter(_.keys(caps), (cap) => `${cap}`.startsWith(prefix));
|
||||
const badPrefixedCaps = [];
|
||||
@@ -108,8 +114,10 @@ function stripAppiumPrefixes (caps) {
|
||||
if (_.isNil(caps[strippedCapName])) {
|
||||
caps[strippedCapName] = caps[prefixedCap];
|
||||
} else {
|
||||
log.warn(`Ignoring capability '${prefixedCap}=${caps[prefixedCap]}' and ` +
|
||||
`using capability '${strippedCapName}=${caps[strippedCapName]}'`);
|
||||
log.warn(
|
||||
`Ignoring capability '${prefixedCap}=${caps[prefixedCap]}' and ` +
|
||||
`using capability '${strippedCapName}=${caps[strippedCapName]}'`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
caps[strippedCapName] = caps[prefixedCap];
|
||||
@@ -121,7 +129,11 @@ function stripAppiumPrefixes (caps) {
|
||||
|
||||
// If we found standard caps that were incorrectly prefixed, throw an exception (e.g.: don't accept 'appium:platformName', only accept just 'platformName')
|
||||
if (badPrefixedCaps.length > 0) {
|
||||
log.warn(`The capabilities ${JSON.stringify(badPrefixedCaps)} are standard capabilities and do not require "appium:" prefix`);
|
||||
log.warn(
|
||||
`The capabilities ${JSON.stringify(
|
||||
badPrefixedCaps
|
||||
)} are standard capabilities and do not require "appium:" prefix`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -129,12 +141,15 @@ function stripAppiumPrefixes (caps) {
|
||||
* Get an array of all the unprefixed caps that are being used in 'alwaysMatch' and all of the 'firstMatch' object
|
||||
* @param {Object} caps A capabilities object
|
||||
*/
|
||||
function findNonPrefixedCaps ({alwaysMatch = {}, firstMatch = []}) {
|
||||
function findNonPrefixedCaps({alwaysMatch = {}, firstMatch = []}) {
|
||||
return _.chain([alwaysMatch, ...firstMatch])
|
||||
.reduce((unprefixedCaps, caps) => [
|
||||
...unprefixedCaps,
|
||||
...Object.keys(caps).filter((cap) => !cap.includes(':') && !isStandardCap(cap)),
|
||||
], [])
|
||||
.reduce(
|
||||
(unprefixedCaps, caps) => [
|
||||
...unprefixedCaps,
|
||||
...Object.keys(caps).filter((cap) => !cap.includes(':') && !isStandardCap(cap)),
|
||||
],
|
||||
[]
|
||||
)
|
||||
.uniq()
|
||||
.value();
|
||||
}
|
||||
@@ -147,10 +162,12 @@ function findNonPrefixedCaps ({alwaysMatch = {}, firstMatch = []}) {
|
||||
* @param {boolean} [shouldValidateCaps]
|
||||
* @returns
|
||||
*/
|
||||
function parseCaps (caps, constraints = {}, shouldValidateCaps = true) {
|
||||
function parseCaps(caps, constraints = {}, shouldValidateCaps = true) {
|
||||
// If capabilities request is not an object, return error (#1.1)
|
||||
if (!_.isPlainObject(caps)) {
|
||||
throw new errors.InvalidArgumentError('The capabilities argument was not valid for the following reason(s): "capabilities" must be a JSON object.');
|
||||
throw new errors.InvalidArgumentError(
|
||||
'The capabilities argument was not valid for the following reason(s): "capabilities" must be a JSON object.'
|
||||
);
|
||||
}
|
||||
|
||||
// Let 'requiredCaps' be property named 'alwaysMatch' from capabilities request (#2)
|
||||
@@ -162,21 +179,27 @@ function parseCaps (caps, constraints = {}, shouldValidateCaps = true) {
|
||||
|
||||
// Reject 'firstMatch' argument if it's not an array (#3.2)
|
||||
if (!_.isArray(allFirstMatchCaps)) {
|
||||
throw new errors.InvalidArgumentError('The capabilities.firstMatch argument was not valid for the following reason(s): "capabilities.firstMatch" must be a JSON array or undefined');
|
||||
throw new errors.InvalidArgumentError(
|
||||
'The capabilities.firstMatch argument was not valid for the following reason(s): "capabilities.firstMatch" must be a JSON array or undefined'
|
||||
);
|
||||
}
|
||||
|
||||
// If an empty array as provided, we'll be forgiving and make it an array of one empty object
|
||||
// In the future, reject 'firstMatch' argument if its array did not have one or more entries (#3.2)
|
||||
if (allFirstMatchCaps.length === 0) {
|
||||
log.warn(`The firstMatch array in the given capabilities has no entries. Adding an empty entry fo rnow, ` +
|
||||
`but it will require one or more entries as W3C spec.`);
|
||||
log.warn(
|
||||
`The firstMatch array in the given capabilities has no entries. Adding an empty entry fo rnow, ` +
|
||||
`but it will require one or more entries as W3C spec.`
|
||||
);
|
||||
allFirstMatchCaps.push({});
|
||||
}
|
||||
|
||||
// Check for non-prefixed, non-standard capabilities and log warnings if they are found
|
||||
let nonPrefixedCaps = findNonPrefixedCaps(caps);
|
||||
if (!_.isEmpty(nonPrefixedCaps)) {
|
||||
throw new errors.InvalidArgumentError(`All non-standard capabilities should have a vendor prefix. The following capabilities did not have one: ${nonPrefixedCaps}`);
|
||||
throw new errors.InvalidArgumentError(
|
||||
`All non-standard capabilities should have a vendor prefix. The following capabilities did not have one: ${nonPrefixedCaps}`
|
||||
);
|
||||
}
|
||||
|
||||
// Strip out the 'appium:' prefix from all
|
||||
@@ -187,10 +210,11 @@ function parseCaps (caps, constraints = {}, shouldValidateCaps = true) {
|
||||
|
||||
// Validate the requiredCaps. But don't validate 'presence' because if that constraint fails on 'alwaysMatch' it could still pass on one of the 'firstMatch' keys
|
||||
if (shouldValidateCaps) {
|
||||
requiredCaps = validateCaps(requiredCaps, constraints, {skipPresenceConstraint: true});
|
||||
requiredCaps = validateCaps(requiredCaps, constraints, {
|
||||
skipPresenceConstraint: true,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// Remove the 'presence' constraint for any keys that are already present in 'requiredCaps'
|
||||
// since we know that this constraint has already passed
|
||||
let filteredConstraints = {...constraints};
|
||||
@@ -204,14 +228,18 @@ function parseCaps (caps, constraints = {}, shouldValidateCaps = true) {
|
||||
// Validate all of the first match capabilities and return an array with only the valid caps (see spec #5)
|
||||
let validationErrors = [];
|
||||
/** @type {Capabilities[]} */
|
||||
let validatedFirstMatchCaps = _.compact(allFirstMatchCaps.map((firstMatchCaps) => {
|
||||
try {
|
||||
// Validate firstMatch caps
|
||||
return shouldValidateCaps ? validateCaps(firstMatchCaps, filteredConstraints) : firstMatchCaps;
|
||||
} catch (e) {
|
||||
validationErrors.push(e.message);
|
||||
}
|
||||
}));
|
||||
let validatedFirstMatchCaps = _.compact(
|
||||
allFirstMatchCaps.map((firstMatchCaps) => {
|
||||
try {
|
||||
// Validate firstMatch caps
|
||||
return shouldValidateCaps
|
||||
? validateCaps(firstMatchCaps, filteredConstraints)
|
||||
: firstMatchCaps;
|
||||
} catch (e) {
|
||||
validationErrors.push(e.message);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
// Try to merge requiredCaps with first match capabilities, break once it finds its first match (see spec #6)
|
||||
let matchedCaps = null;
|
||||
@@ -228,7 +256,13 @@ function parseCaps (caps, constraints = {}, shouldValidateCaps = true) {
|
||||
}
|
||||
|
||||
// Returns variables for testing purposes
|
||||
return {requiredCaps, allFirstMatchCaps, validatedFirstMatchCaps, matchedCaps, validationErrors};
|
||||
return {
|
||||
requiredCaps,
|
||||
allFirstMatchCaps,
|
||||
validatedFirstMatchCaps,
|
||||
matchedCaps,
|
||||
validationErrors,
|
||||
};
|
||||
}
|
||||
|
||||
// Calls parseCaps and just returns the matchedCaps variable
|
||||
@@ -239,14 +273,18 @@ function parseCaps (caps, constraints = {}, shouldValidateCaps = true) {
|
||||
* @param {boolean} [shouldValidateCaps]
|
||||
* @returns {Capabilities}
|
||||
*/
|
||||
function processCapabilities (w3cCaps, constraints = {}, shouldValidateCaps = true) {
|
||||
function processCapabilities(w3cCaps, constraints = {}, shouldValidateCaps = true) {
|
||||
const {matchedCaps, validationErrors} = parseCaps(w3cCaps, constraints, shouldValidateCaps);
|
||||
|
||||
// If we found an error throw an exception
|
||||
if (!util.hasValue(matchedCaps)) {
|
||||
if (_.isArray(w3cCaps.firstMatch) && w3cCaps.firstMatch.length > 1) {
|
||||
// If there was more than one 'firstMatch' cap, indicate that we couldn't find a matching capabilities set and show all the errors
|
||||
throw new errors.InvalidArgumentError(`Could not find matching capabilities from ${JSON.stringify(w3cCaps)}:\n ${validationErrors.join('\n')}`);
|
||||
throw new errors.InvalidArgumentError(
|
||||
`Could not find matching capabilities from ${JSON.stringify(
|
||||
w3cCaps
|
||||
)}:\n ${validationErrors.join('\n')}`
|
||||
);
|
||||
} else {
|
||||
// Otherwise, just show the singular error message
|
||||
throw new errors.InvalidArgumentError(validationErrors[0]);
|
||||
@@ -267,7 +305,7 @@ function processCapabilities (w3cCaps, constraints = {}, shouldValidateCaps = tr
|
||||
* @param {object} originalCaps - the capabilities to analyze and promote from 'options'
|
||||
* @return {object!} - the capabilities with 'options' promoted if necessary
|
||||
*/
|
||||
function promoteAppiumOptions (originalCaps) {
|
||||
function promoteAppiumOptions(originalCaps) {
|
||||
const appiumOptions = originalCaps[APPIUM_OPTS_CAP];
|
||||
if (!appiumOptions) {
|
||||
return originalCaps;
|
||||
@@ -284,8 +322,10 @@ function promoteAppiumOptions (originalCaps) {
|
||||
// warn if we are going to overwrite any keys on the base caps object
|
||||
const overwrittenKeys = _.intersection(Object.keys(caps), Object.keys(appiumOptions));
|
||||
if (overwrittenKeys.length > 0) {
|
||||
log.warn(`Found capabilities inside ${PREFIXED_APPIUM_OPTS_CAP} that will overwrite ` +
|
||||
`capabilities at the top level: ${JSON.stringify(overwrittenKeys)}`);
|
||||
log.warn(
|
||||
`Found capabilities inside ${PREFIXED_APPIUM_OPTS_CAP} that will overwrite ` +
|
||||
`capabilities at the top level: ${JSON.stringify(overwrittenKeys)}`
|
||||
);
|
||||
}
|
||||
|
||||
// now just apply them to the main caps object
|
||||
@@ -296,10 +336,18 @@ function promoteAppiumOptions (originalCaps) {
|
||||
return caps;
|
||||
}
|
||||
|
||||
|
||||
export {
|
||||
parseCaps, processCapabilities, validateCaps, mergeCaps, APPIUM_VENDOR_PREFIX, APPIUM_OPTS_CAP,
|
||||
findNonPrefixedCaps, isStandardCap, stripAppiumPrefixes, promoteAppiumOptions, PREFIXED_APPIUM_OPTS_CAP,
|
||||
parseCaps,
|
||||
processCapabilities,
|
||||
validateCaps,
|
||||
mergeCaps,
|
||||
APPIUM_VENDOR_PREFIX,
|
||||
APPIUM_OPTS_CAP,
|
||||
findNonPrefixedCaps,
|
||||
isStandardCap,
|
||||
stripAppiumPrefixes,
|
||||
promoteAppiumOptions,
|
||||
PREFIXED_APPIUM_OPTS_CAP,
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,7 +6,7 @@ import _ from 'lodash';
|
||||
* @param {TimeoutBase} Base
|
||||
* @returns {EventBase}
|
||||
*/
|
||||
export function EventMixin (Base) {
|
||||
export function EventMixin(Base) {
|
||||
/**
|
||||
* @implements {IEventCommands}
|
||||
*/
|
||||
@@ -18,7 +18,7 @@ export function EventMixin (Base) {
|
||||
* separation
|
||||
* @param {string} event - the event name
|
||||
*/
|
||||
async logCustomEvent (vendor, event) {
|
||||
async logCustomEvent(vendor, event) {
|
||||
this.logEvent(`${vendor}:${event}`);
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ export function EventMixin (Base) {
|
||||
* It returns all events if the type is not provided or empty string/array.
|
||||
* @returns {Promise<import('@appium/types').EventHistory|Record<string,number>>} - the event history log object
|
||||
*/
|
||||
async getLogEvents (type) {
|
||||
async getLogEvents(type) {
|
||||
if (_.isEmpty(type)) {
|
||||
return this.eventHistory;
|
||||
}
|
||||
@@ -43,7 +43,7 @@ export function EventMixin (Base) {
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{},
|
||||
{}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import {errors} from '../../protocol';
|
||||
* @param {EventBase} Base
|
||||
* @returns {FindBase}
|
||||
*/
|
||||
export function FindMixin (Base) {
|
||||
export function FindMixin(Base) {
|
||||
/**
|
||||
* @implements {IFindCommands}
|
||||
*/
|
||||
@@ -17,7 +17,7 @@ export function FindMixin (Base) {
|
||||
*
|
||||
* @returns {Promise<Element>}
|
||||
*/
|
||||
async findElement (strategy, selector) {
|
||||
async findElement(strategy, selector) {
|
||||
return await this.findElOrElsWithProcessing(strategy, selector, false);
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ export function FindMixin (Base) {
|
||||
*
|
||||
* @returns {Promise<Element[]>}
|
||||
*/
|
||||
async findElements (strategy, selector) {
|
||||
async findElements(strategy, selector) {
|
||||
return await this.findElOrElsWithProcessing(strategy, selector, true);
|
||||
}
|
||||
|
||||
@@ -33,26 +33,16 @@ export function FindMixin (Base) {
|
||||
*
|
||||
* @returns {Promise<Element>}
|
||||
*/
|
||||
async findElementFromElement (strategy, selector, elementId) {
|
||||
return await this.findElOrElsWithProcessing(
|
||||
strategy,
|
||||
selector,
|
||||
false,
|
||||
elementId,
|
||||
);
|
||||
async findElementFromElement(strategy, selector, elementId) {
|
||||
return await this.findElOrElsWithProcessing(strategy, selector, false, elementId);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns {Promise<Element[]>}
|
||||
*/
|
||||
async findElementsFromElement (strategy, selector, elementId) {
|
||||
return await this.findElOrElsWithProcessing(
|
||||
strategy,
|
||||
selector,
|
||||
true,
|
||||
elementId,
|
||||
);
|
||||
async findElementsFromElement(strategy, selector, elementId) {
|
||||
return await this.findElOrElsWithProcessing(strategy, selector, true, elementId);
|
||||
}
|
||||
// Override the following function for your own driver, and the rest is taken
|
||||
// care of!
|
||||
@@ -66,14 +56,14 @@ export function FindMixin (Base) {
|
||||
* @param {string} [context]
|
||||
* @returns {Promise<Mult extends true ? Element[] : Element>}
|
||||
*/
|
||||
async findElOrEls (strategy, selector, mult, context) {
|
||||
async findElOrEls(strategy, selector, mult, context) {
|
||||
throw new errors.NotImplementedError('Not implemented yet for find.');
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async getPageSource () {
|
||||
async getPageSource() {
|
||||
throw new errors.NotImplementedError('Not implemented yet for find.');
|
||||
}
|
||||
/**
|
||||
@@ -84,19 +74,15 @@ export function FindMixin (Base) {
|
||||
* @param {string} [context]
|
||||
* @returns {Promise<Mult extends true ? Element[] : Element>}
|
||||
*/
|
||||
async findElOrElsWithProcessing (strategy, selector, mult, context) {
|
||||
async findElOrElsWithProcessing(strategy, selector, mult, context) {
|
||||
this.validateLocatorStrategy(strategy);
|
||||
try {
|
||||
return await this.findElOrEls(strategy, selector, mult, context);
|
||||
} catch (err) {
|
||||
if (this.opts.printPageSourceOnFindFailure) {
|
||||
const src = await this.getPageSource();
|
||||
this.log.debug(
|
||||
`Error finding element${mult ? 's' : ''}: ${err.message}`,
|
||||
);
|
||||
this.log.debug(
|
||||
`Page source requested through 'printPageSourceOnFindFailure':`,
|
||||
);
|
||||
this.log.debug(`Error finding element${mult ? 's' : ''}: ${err.message}`);
|
||||
this.log.debug(`Page source requested through 'printPageSourceOnFindFailure':`);
|
||||
this.log.debug(src);
|
||||
}
|
||||
// still want the error to occur
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
// @ts-check
|
||||
|
||||
import { EventMixin } from './event';
|
||||
import { FindMixin } from './find';
|
||||
import { LogMixin } from './log';
|
||||
import { SessionMixin } from './session';
|
||||
import { SettingsMixin } from './settings';
|
||||
import { TimeoutMixin } from './timeout';
|
||||
import {EventMixin} from './event';
|
||||
import {FindMixin} from './find';
|
||||
import {LogMixin} from './log';
|
||||
import {SessionMixin} from './session';
|
||||
import {SettingsMixin} from './settings';
|
||||
import {TimeoutMixin} from './timeout';
|
||||
|
||||
/**
|
||||
* Applies all the mixins to the `BaseDriverBase` class.
|
||||
* Returns a `BaseDriver` class.
|
||||
* @param {BaseDriverBase} Base
|
||||
*/
|
||||
export function createBaseDriverClass (Base) {
|
||||
export function createBaseDriverClass(Base) {
|
||||
const WithTimeoutCommands = TimeoutMixin(Base);
|
||||
const WithEventCommands = EventMixin(WithTimeoutCommands);
|
||||
const WithFindCommands = FindMixin(WithEventCommands);
|
||||
|
||||
@@ -8,19 +8,18 @@ import _ from 'lodash';
|
||||
* @param {FindBase} Base
|
||||
* @returns {LogBase}
|
||||
*/
|
||||
export function LogMixin (Base) {
|
||||
export function LogMixin(Base) {
|
||||
/**
|
||||
* @implements {ILogCommands}
|
||||
*/
|
||||
class LogCommands extends Base {
|
||||
|
||||
constructor (...args) {
|
||||
constructor(...args) {
|
||||
super(...args);
|
||||
/** @type {Record<string, LogType<Driver>>} */
|
||||
this.supportedLogTypes = this.supportedLogTypes ?? {};
|
||||
}
|
||||
|
||||
async getLogTypes () {
|
||||
async getLogTypes() {
|
||||
this.log.debug('Retrieving supported log types');
|
||||
return _.keys(this.supportedLogTypes);
|
||||
}
|
||||
@@ -29,14 +28,14 @@ export function LogMixin (Base) {
|
||||
* @this {Driver}
|
||||
* @param {string} logType
|
||||
*/
|
||||
async getLog (logType) {
|
||||
async getLog(logType) {
|
||||
this.log.debug(`Retrieving '${logType}' logs`);
|
||||
|
||||
if (!(await this.getLogTypes()).includes(logType)) {
|
||||
const logsTypesWithDescriptions = _.mapValues(this.supportedLogTypes, 'description');
|
||||
throw new Error(
|
||||
`Unsupported log type '${logType}'. ` +
|
||||
`Supported types: ${JSON.stringify(logsTypesWithDescriptions)}`,
|
||||
`Supported types: ${JSON.stringify(logsTypesWithDescriptions)}`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -46,7 +45,6 @@ export function LogMixin (Base) {
|
||||
return LogCommands;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @typedef {import('@appium/types').LogCommands} ILogCommands
|
||||
* @typedef {import('@appium/types').Driver} Driver
|
||||
|
||||
@@ -7,7 +7,7 @@ import _ from 'lodash';
|
||||
* @param {SettingsBase} Base
|
||||
* @returns {SessionBase}
|
||||
*/
|
||||
export function SessionMixin (Base) {
|
||||
export function SessionMixin(Base) {
|
||||
/**
|
||||
* @implements {ISessionCommands}
|
||||
*/
|
||||
@@ -15,7 +15,7 @@ export function SessionMixin (Base) {
|
||||
/**
|
||||
* @returns {Promise<MultiSessionData[]>}
|
||||
*/
|
||||
async getSessions () {
|
||||
async getSessions() {
|
||||
let ret = [];
|
||||
|
||||
if (this.sessionId) {
|
||||
@@ -31,7 +31,7 @@ export function SessionMixin (Base) {
|
||||
/**
|
||||
* @returns {Promise<SingularSessionData>}
|
||||
*/
|
||||
async getSession () {
|
||||
async getSession() {
|
||||
if (this.caps.eventTimings) {
|
||||
return {...this.caps, events: this.eventHistory};
|
||||
}
|
||||
|
||||
@@ -5,26 +5,24 @@
|
||||
* @param {ReturnType<import('./log').LogMixin>} Base
|
||||
* @returns {SettingsBase}
|
||||
*/
|
||||
export function SettingsMixin (Base) {
|
||||
export function SettingsMixin(Base) {
|
||||
/**
|
||||
* @implements {ISettingsCommands}
|
||||
*/
|
||||
class SettingsCommands extends Base {
|
||||
|
||||
async updateSettings (newSettings) {
|
||||
async updateSettings(newSettings) {
|
||||
if (!this.settings) {
|
||||
this.log.errorAndThrow('Cannot update settings; settings object not found');
|
||||
}
|
||||
return await this.settings.update(newSettings);
|
||||
}
|
||||
|
||||
async getSettings () {
|
||||
async getSettings() {
|
||||
if (!this.settings) {
|
||||
this.log.errorAndThrow('Cannot get settings; settings object not found');
|
||||
}
|
||||
return await this.settings.getSettings();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return SettingsCommands;
|
||||
|
||||
@@ -13,17 +13,14 @@ const MIN_TIMEOUT = 0;
|
||||
* @param {import('../driver').BaseDriverBase} Base
|
||||
* @returns {TimeoutBase}
|
||||
*/
|
||||
export function TimeoutMixin (Base) {
|
||||
|
||||
export function TimeoutMixin(Base) {
|
||||
/**
|
||||
* @implements {ITimeoutCommands}
|
||||
*/
|
||||
class TimeoutCommands extends Base {
|
||||
async timeouts (type, ms, script, pageLoad, implicit) {
|
||||
async timeouts(type, ms, script, pageLoad, implicit) {
|
||||
if (util.hasValue(type) && util.hasValue(ms)) {
|
||||
this.log.debug(
|
||||
`MJSONWP timeout arguments: ${JSON.stringify({type, ms})}}`,
|
||||
);
|
||||
this.log.debug(`MJSONWP timeout arguments: ${JSON.stringify({type, ms})}}`);
|
||||
|
||||
switch (type) {
|
||||
case 'command':
|
||||
@@ -39,9 +36,7 @@ export function TimeoutMixin (Base) {
|
||||
await this.scriptTimeoutMJSONWP(ms);
|
||||
return;
|
||||
default:
|
||||
throw new Error(
|
||||
`'${type}' type is not supported for MJSONWP timeout`,
|
||||
);
|
||||
throw new Error(`'${type}' type is not supported for MJSONWP timeout`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +46,7 @@ export function TimeoutMixin (Base) {
|
||||
script,
|
||||
pageLoad,
|
||||
implicit,
|
||||
})}}`,
|
||||
})}}`
|
||||
);
|
||||
if (util.hasValue(script)) {
|
||||
await this.scriptTimeoutW3C(script);
|
||||
@@ -64,7 +59,7 @@ export function TimeoutMixin (Base) {
|
||||
}
|
||||
}
|
||||
|
||||
async getTimeouts () {
|
||||
async getTimeouts() {
|
||||
return {
|
||||
command: this.newCommandTimeoutMs,
|
||||
implicit: this.implicitWaitMs,
|
||||
@@ -72,42 +67,42 @@ export function TimeoutMixin (Base) {
|
||||
}
|
||||
|
||||
// implicit
|
||||
async implicitWaitW3C (ms) {
|
||||
async implicitWaitW3C(ms) {
|
||||
await this.implicitWait(ms);
|
||||
}
|
||||
|
||||
async implicitWaitMJSONWP (ms) {
|
||||
async implicitWaitMJSONWP(ms) {
|
||||
await this.implicitWait(ms);
|
||||
}
|
||||
|
||||
async implicitWait (ms) {
|
||||
async implicitWait(ms) {
|
||||
await this.setImplicitWait(this.parseTimeoutArgument(ms));
|
||||
}
|
||||
|
||||
// pageLoad
|
||||
async pageLoadTimeoutW3C (ms) {
|
||||
async pageLoadTimeoutW3C(ms) {
|
||||
throw new errors.NotImplementedError('Not implemented yet for pageLoad.');
|
||||
}
|
||||
|
||||
async pageLoadTimeoutMJSONWP (ms) {
|
||||
async pageLoadTimeoutMJSONWP(ms) {
|
||||
throw new errors.NotImplementedError('Not implemented yet for pageLoad.');
|
||||
}
|
||||
|
||||
// script
|
||||
async scriptTimeoutW3C (ms) {
|
||||
async scriptTimeoutW3C(ms) {
|
||||
throw new errors.NotImplementedError('Not implemented yet for script.');
|
||||
}
|
||||
|
||||
async scriptTimeoutMJSONWP (ms) {
|
||||
async scriptTimeoutMJSONWP(ms) {
|
||||
throw new errors.NotImplementedError('Not implemented yet for script.');
|
||||
}
|
||||
|
||||
// command
|
||||
async newCommandTimeout (ms) {
|
||||
async newCommandTimeout(ms) {
|
||||
this.setNewCommandTimeout(this.parseTimeoutArgument(ms));
|
||||
}
|
||||
|
||||
setImplicitWait (ms) {
|
||||
setImplicitWait(ms) {
|
||||
// eslint-disable-line require-await
|
||||
this.implicitWaitMs = ms;
|
||||
this.log.debug(`Set implicit wait to ${ms}ms`);
|
||||
@@ -121,7 +116,7 @@ export function TimeoutMixin (Base) {
|
||||
}
|
||||
}
|
||||
|
||||
setNewCommandTimeout (ms) {
|
||||
setNewCommandTimeout(ms) {
|
||||
this.newCommandTimeoutMs = ms;
|
||||
this.log.debug(`Set new command timeout to ${ms}ms`);
|
||||
if (this.managedDrivers && this.managedDrivers.length) {
|
||||
@@ -134,7 +129,7 @@ export function TimeoutMixin (Base) {
|
||||
}
|
||||
}
|
||||
|
||||
async implicitWaitForCondition (condFn) {
|
||||
async implicitWaitForCondition(condFn) {
|
||||
this.log.debug(`Waiting up to ${this.implicitWaitMs} ms for condition`);
|
||||
let wrappedCondFn = async (...args) => {
|
||||
// reset command timeout
|
||||
@@ -149,7 +144,7 @@ export function TimeoutMixin (Base) {
|
||||
});
|
||||
}
|
||||
|
||||
parseTimeoutArgument (ms) {
|
||||
parseTimeoutArgument(ms) {
|
||||
let duration = parseInt(ms, 10);
|
||||
if (_.isNaN(duration) || duration < MIN_TIMEOUT) {
|
||||
throw new errors.UnknownError(`Invalid timeout value '${ms}'`);
|
||||
|
||||
@@ -2,15 +2,15 @@
|
||||
/* eslint-disable no-unused-vars */
|
||||
/* eslint-disable require-await */
|
||||
|
||||
import { fs, logger, node } from '@appium/support';
|
||||
import {fs, logger, node} from '@appium/support';
|
||||
import AsyncLock from 'async-lock';
|
||||
import { EventEmitter } from 'events';
|
||||
import {EventEmitter} from 'events';
|
||||
import _ from 'lodash';
|
||||
import os from 'os';
|
||||
import { DEFAULT_BASE_PATH, PROTOCOLS } from '../constants';
|
||||
import { errors } from '../protocol';
|
||||
import { validateCaps } from './capabilities';
|
||||
import { desiredCapabilityConstraints } from './desired-caps';
|
||||
import {DEFAULT_BASE_PATH, PROTOCOLS} from '../constants';
|
||||
import {errors} from '../protocol';
|
||||
import {validateCaps} from './capabilities';
|
||||
import {desiredCapabilityConstraints} from './desired-caps';
|
||||
import DeviceSettings from './device-settings';
|
||||
import helpers from './helpers';
|
||||
|
||||
@@ -24,7 +24,6 @@ const ON_UNEXPECTED_SHUTDOWN_EVENT = 'onUnexpectedShutdown';
|
||||
* @implements {Core}
|
||||
*/
|
||||
class DriverCore {
|
||||
|
||||
/**
|
||||
* Make the basedriver version available so for any driver which inherits from this package, we
|
||||
* know which version of basedriver it inherited from
|
||||
@@ -32,40 +31,40 @@ class DriverCore {
|
||||
static baseVersion = BASEDRIVER_VER;
|
||||
|
||||
/**
|
||||
* @type {string?}
|
||||
*/
|
||||
* @type {string?}
|
||||
*/
|
||||
sessionId = null;
|
||||
|
||||
/**
|
||||
* @type {DriverOpts & Capabilities}
|
||||
*/
|
||||
* @type {DriverOpts & Capabilities}
|
||||
*/
|
||||
opts;
|
||||
|
||||
/**
|
||||
* @type {DriverOpts}
|
||||
*/
|
||||
* @type {DriverOpts}
|
||||
*/
|
||||
initialOpts;
|
||||
|
||||
/**
|
||||
* @type {Capabilities}
|
||||
*/
|
||||
* @type {Capabilities}
|
||||
*/
|
||||
caps;
|
||||
|
||||
/**
|
||||
* @type {W3CCapabilities}
|
||||
*/
|
||||
* @type {W3CCapabilities}
|
||||
*/
|
||||
originalCaps;
|
||||
|
||||
helpers = helpers;
|
||||
|
||||
/**
|
||||
* basePath is used for several purposes, for example in setting up
|
||||
* proxying to other drivers, since we need to know what the base path
|
||||
* of any incoming request might look like. We set it to the default
|
||||
* initially but it is automatically updated during any actual program
|
||||
* execution by the routeConfiguringFunction, which is necessarily run as
|
||||
* the entrypoint for any Appium server
|
||||
*/
|
||||
* basePath is used for several purposes, for example in setting up
|
||||
* proxying to other drivers, since we need to know what the base path
|
||||
* of any incoming request might look like. We set it to the default
|
||||
* initially but it is automatically updated during any actual program
|
||||
* execution by the routeConfiguringFunction, which is necessarily run as
|
||||
* the entrypoint for any Appium server
|
||||
*/
|
||||
basePath = DEFAULT_BASE_PATH;
|
||||
|
||||
relaxedSecurityEnabled = false;
|
||||
@@ -102,38 +101,35 @@ class DriverCore {
|
||||
eventEmitter = new EventEmitter();
|
||||
|
||||
/**
|
||||
* @type {AppiumLogger}
|
||||
*/
|
||||
* @type {AppiumLogger}
|
||||
*/
|
||||
_log;
|
||||
|
||||
/**
|
||||
* @protected
|
||||
*/
|
||||
* @protected
|
||||
*/
|
||||
shutdownUnexpectedly = false;
|
||||
|
||||
/**
|
||||
* @type {boolean}
|
||||
* @protected
|
||||
*/
|
||||
* @type {boolean}
|
||||
* @protected
|
||||
*/
|
||||
shouldValidateCaps;
|
||||
|
||||
/**
|
||||
* @protected
|
||||
*/
|
||||
* @protected
|
||||
*/
|
||||
commandsQueueGuard = new AsyncLock();
|
||||
|
||||
/**
|
||||
* settings should be instantiated by drivers which extend BaseDriver, but
|
||||
* we set it to an empty DeviceSettings instance here to make sure that the
|
||||
* default settings are applied even if an extending driver doesn't utilize
|
||||
* the settings functionality itself
|
||||
*/
|
||||
* settings should be instantiated by drivers which extend BaseDriver, but
|
||||
* we set it to an empty DeviceSettings instance here to make sure that the
|
||||
* default settings are applied even if an extending driver doesn't utilize
|
||||
* the settings functionality itself
|
||||
*/
|
||||
settings = new DeviceSettings();
|
||||
|
||||
constructor (
|
||||
opts = /** @type {DriverOpts} */ ({}),
|
||||
shouldValidateCaps = true,
|
||||
) {
|
||||
constructor(opts = /** @type {DriverOpts} */ ({}), shouldValidateCaps = true) {
|
||||
this._log = logger.getLogger(helpers.generateDriverLogPrefix(this));
|
||||
|
||||
// setup state
|
||||
@@ -141,8 +137,7 @@ class DriverCore {
|
||||
|
||||
// use a custom tmp dir to avoid losing data and app when computer is
|
||||
// restarted
|
||||
this.opts.tmpDir =
|
||||
this.opts.tmpDir || process.env.APPIUM_TMP_DIR || os.tmpdir();
|
||||
this.opts.tmpDir = this.opts.tmpDir || process.env.APPIUM_TMP_DIR || os.tmpdir();
|
||||
|
||||
// base-driver internals
|
||||
this.shouldValidateCaps = shouldValidateCaps;
|
||||
@@ -153,63 +148,63 @@ class DriverCore {
|
||||
this.sessionId = null;
|
||||
}
|
||||
|
||||
get log () {
|
||||
get log() {
|
||||
return this._log;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set a callback handler if needed to execute a custom piece of code
|
||||
* when the driver is shut down unexpectedly. Multiple calls to this method
|
||||
* will cause the handler to be executed mutiple times
|
||||
*
|
||||
* @param {(...args: any[]) => void} handler The code to be executed on unexpected shutdown.
|
||||
* The function may accept one argument, which is the actual error instance, which
|
||||
* caused the driver to shut down.
|
||||
*/
|
||||
onUnexpectedShutdown (handler) {
|
||||
* Set a callback handler if needed to execute a custom piece of code
|
||||
* when the driver is shut down unexpectedly. Multiple calls to this method
|
||||
* will cause the handler to be executed mutiple times
|
||||
*
|
||||
* @param {(...args: any[]) => void} handler The code to be executed on unexpected shutdown.
|
||||
* The function may accept one argument, which is the actual error instance, which
|
||||
* caused the driver to shut down.
|
||||
*/
|
||||
onUnexpectedShutdown(handler) {
|
||||
this.eventEmitter.on(ON_UNEXPECTED_SHUTDOWN_EVENT, handler);
|
||||
}
|
||||
|
||||
/**
|
||||
* This property is used by AppiumDriver to store the data of the
|
||||
* specific driver sessions. This data can be later used to adjust
|
||||
* properties for driver instances running in parallel.
|
||||
* Override it in inherited driver classes if necessary.
|
||||
*
|
||||
* @return {Record<string,unknown>} Driver properties mapping
|
||||
*/
|
||||
get driverData () {
|
||||
* This property is used by AppiumDriver to store the data of the
|
||||
* specific driver sessions. This data can be later used to adjust
|
||||
* properties for driver instances running in parallel.
|
||||
* Override it in inherited driver classes if necessary.
|
||||
*
|
||||
* @return {Record<string,unknown>} Driver properties mapping
|
||||
*/
|
||||
get driverData() {
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* This property controls the way {#executeCommand} method
|
||||
* handles new driver commands received from the client.
|
||||
* Override it for inherited classes only in special cases.
|
||||
*
|
||||
* @return {boolean} If the returned value is true (default) then all the commands
|
||||
* received by the particular driver instance are going to be put into the queue,
|
||||
* so each following command will not be executed until the previous command
|
||||
* execution is completed. False value disables that queue, so each driver command
|
||||
* is executed independently and does not wait for anything.
|
||||
*/
|
||||
get isCommandsQueueEnabled () {
|
||||
* This property controls the way {#executeCommand} method
|
||||
* handles new driver commands received from the client.
|
||||
* Override it for inherited classes only in special cases.
|
||||
*
|
||||
* @return {boolean} If the returned value is true (default) then all the commands
|
||||
* received by the particular driver instance are going to be put into the queue,
|
||||
* so each following command will not be executed until the previous command
|
||||
* execution is completed. False value disables that queue, so each driver command
|
||||
* is executed independently and does not wait for anything.
|
||||
*/
|
||||
get isCommandsQueueEnabled() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/*
|
||||
* make eventHistory a property and return a cloned object so a consumer can't
|
||||
* inadvertently change data outside of logEvent
|
||||
*/
|
||||
get eventHistory () {
|
||||
* make eventHistory a property and return a cloned object so a consumer can't
|
||||
* inadvertently change data outside of logEvent
|
||||
*/
|
||||
get eventHistory() {
|
||||
return _.cloneDeep(this._eventHistory);
|
||||
}
|
||||
|
||||
/**
|
||||
* API method for driver developers to log timings for important events
|
||||
* @param {string} eventName
|
||||
*/
|
||||
logEvent (eventName) {
|
||||
* API method for driver developers to log timings for important events
|
||||
* @param {string} eventName
|
||||
*/
|
||||
logEvent(eventName) {
|
||||
if (eventName === 'commands') {
|
||||
throw new Error('Cannot log commands directly');
|
||||
}
|
||||
@@ -226,15 +221,15 @@ class DriverCore {
|
||||
}
|
||||
|
||||
/**
|
||||
* Overridden in appium driver, but here so that individual drivers can be
|
||||
* tested with clients that poll
|
||||
*/
|
||||
async getStatus () {
|
||||
* Overridden in appium driver, but here so that individual drivers can be
|
||||
* tested with clients that poll
|
||||
*/
|
||||
async getStatus() {
|
||||
return {};
|
||||
}
|
||||
|
||||
// we only want subclasses to ever extend the contraints
|
||||
set desiredCapConstraints (constraints) {
|
||||
set desiredCapConstraints(constraints) {
|
||||
this._constraints = Object.assign(this._constraints, constraints);
|
||||
// 'presence' means different things in different versions of the validator,
|
||||
// when we say 'true' we mean that it should not be able to be empty
|
||||
@@ -247,41 +242,40 @@ class DriverCore {
|
||||
}
|
||||
}
|
||||
|
||||
get desiredCapConstraints () {
|
||||
get desiredCapConstraints() {
|
||||
return this._constraints;
|
||||
}
|
||||
|
||||
/**
|
||||
* method required by MJSONWP in order to determine whether it should
|
||||
* respond with an invalid session response
|
||||
* @param {string} [sessionId]
|
||||
* @returns {boolean}
|
||||
*/
|
||||
sessionExists (sessionId) {
|
||||
* method required by MJSONWP in order to determine whether it should
|
||||
* respond with an invalid session response
|
||||
* @param {string} [sessionId]
|
||||
* @returns {boolean}
|
||||
*/
|
||||
sessionExists(sessionId) {
|
||||
if (!sessionId) return false; // eslint-disable-line curly
|
||||
return sessionId === this.sessionId;
|
||||
}
|
||||
|
||||
/**
|
||||
* method required by MJSONWP in order to determine if the command should
|
||||
* be proxied directly to the driver
|
||||
* @param {string} sessionId
|
||||
* @returns {this | import('@appium/types').Driver}
|
||||
*/
|
||||
driverForSession (sessionId) {
|
||||
* method required by MJSONWP in order to determine if the command should
|
||||
* be proxied directly to the driver
|
||||
* @param {string} sessionId
|
||||
* @returns {this | import('@appium/types').Driver}
|
||||
*/
|
||||
driverForSession(sessionId) {
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Capabilities} caps
|
||||
*/
|
||||
logExtraCaps (caps) {
|
||||
*
|
||||
* @param {Capabilities} caps
|
||||
*/
|
||||
logExtraCaps(caps) {
|
||||
let extraCaps = _.difference(_.keys(caps), _.keys(this._constraints));
|
||||
if (extraCaps.length) {
|
||||
this.log.warn(
|
||||
`The following capabilities were provided, but are not ` +
|
||||
`recognized by Appium:`,
|
||||
`The following capabilities were provided, but are not ` + `recognized by Appium:`
|
||||
);
|
||||
for (const cap of extraCaps) {
|
||||
this.log.warn(` ${cap}`);
|
||||
@@ -290,11 +284,11 @@ class DriverCore {
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Capabilities} caps
|
||||
* @returns {boolean}
|
||||
*/
|
||||
validateDesiredCaps (caps) {
|
||||
*
|
||||
* @param {Capabilities} caps
|
||||
* @returns {boolean}
|
||||
*/
|
||||
validateDesiredCaps(caps) {
|
||||
if (!this.shouldValidateCaps) {
|
||||
return true;
|
||||
}
|
||||
@@ -305,8 +299,8 @@ class DriverCore {
|
||||
this.log.errorAndThrow(
|
||||
new errors.SessionNotCreatedError(
|
||||
`The desiredCapabilities object was not valid for the ` +
|
||||
`following reason(s): ${e.message}`,
|
||||
),
|
||||
`following reason(s): ${e.message}`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -315,30 +309,30 @@ class DriverCore {
|
||||
return true;
|
||||
}
|
||||
|
||||
isMjsonwpProtocol () {
|
||||
isMjsonwpProtocol() {
|
||||
return this.protocol === PROTOCOLS.MJSONWP;
|
||||
}
|
||||
|
||||
isW3CProtocol () {
|
||||
isW3CProtocol() {
|
||||
return this.protocol === PROTOCOLS.W3C;
|
||||
}
|
||||
|
||||
setProtocolMJSONWP () {
|
||||
setProtocolMJSONWP() {
|
||||
this.protocol = PROTOCOLS.MJSONWP;
|
||||
}
|
||||
|
||||
setProtocolW3C () {
|
||||
setProtocolW3C() {
|
||||
this.protocol = PROTOCOLS.W3C;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether a given feature is enabled via its name
|
||||
*
|
||||
* @param {string} name - name of feature/command
|
||||
*
|
||||
* @returns {Boolean}
|
||||
*/
|
||||
isFeatureEnabled (name) {
|
||||
* Check whether a given feature is enabled via its name
|
||||
*
|
||||
* @param {string} name - name of feature/command
|
||||
*
|
||||
* @returns {Boolean}
|
||||
*/
|
||||
isFeatureEnabled(name) {
|
||||
// if we have explicitly denied this feature, return false immediately
|
||||
if (this.denyInsecure && _.includes(this.denyInsecure, name)) {
|
||||
return false;
|
||||
@@ -360,35 +354,31 @@ class DriverCore {
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that a given feature is enabled and throw a helpful error if it's
|
||||
* not
|
||||
*
|
||||
* @param {string} name - name of feature/command
|
||||
*/
|
||||
ensureFeatureEnabled (name) {
|
||||
* Assert that a given feature is enabled and throw a helpful error if it's
|
||||
* not
|
||||
*
|
||||
* @param {string} name - name of feature/command
|
||||
*/
|
||||
ensureFeatureEnabled(name) {
|
||||
if (!this.isFeatureEnabled(name)) {
|
||||
throw new Error(
|
||||
`Potentially insecure feature '${name}' has not been ` +
|
||||
`enabled. If you want to enable this feature and accept ` +
|
||||
`the security ramifications, please do so by following ` +
|
||||
`the documented instructions at https://github.com/appium` +
|
||||
`/appium/blob/master/docs/en/writing-running-appium/security.md`,
|
||||
`/appium/blob/master/docs/en/writing-running-appium/security.md`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} strategy
|
||||
* @param {boolean} [webContext]
|
||||
*/
|
||||
validateLocatorStrategy (strategy, webContext = false) {
|
||||
*
|
||||
* @param {string} strategy
|
||||
* @param {boolean} [webContext]
|
||||
*/
|
||||
validateLocatorStrategy(strategy, webContext = false) {
|
||||
let validStrategies = this.locatorStrategies;
|
||||
this.log.debug(
|
||||
`Valid locator strategies for this request: ${validStrategies.join(
|
||||
', ',
|
||||
)}`,
|
||||
);
|
||||
this.log.debug(`Valid locator strategies for this request: ${validStrategies.join(', ')}`);
|
||||
|
||||
if (webContext) {
|
||||
validStrategies = validStrategies.concat(this.webLocatorStrategies);
|
||||
@@ -396,52 +386,52 @@ class DriverCore {
|
||||
|
||||
if (!_.includes(validStrategies, strategy)) {
|
||||
throw new errors.InvalidSelectorError(
|
||||
`Locator Strategy '${strategy}' is not supported for this session`,
|
||||
`Locator Strategy '${strategy}' is not supported for this session`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} [sessionId]
|
||||
* @returns {boolean}
|
||||
*/
|
||||
proxyActive (sessionId) {
|
||||
*
|
||||
* @param {string} [sessionId]
|
||||
* @returns {boolean}
|
||||
*/
|
||||
proxyActive(sessionId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} sessionId
|
||||
* @returns {[string, RegExp][]}
|
||||
*/
|
||||
getProxyAvoidList (sessionId) {
|
||||
*
|
||||
* @param {string} sessionId
|
||||
* @returns {[string, RegExp][]}
|
||||
*/
|
||||
getProxyAvoidList(sessionId) {
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} [sessionId]
|
||||
* @returns {boolean}
|
||||
*/
|
||||
canProxy (sessionId) {
|
||||
*
|
||||
* @param {string} [sessionId]
|
||||
* @returns {boolean}
|
||||
*/
|
||||
canProxy(sessionId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether a given command route (expressed as method and url) should not be
|
||||
* proxied according to this driver
|
||||
*
|
||||
* @param {string} sessionId - the current sessionId (in case the driver runs
|
||||
* multiple session ids and requires it). This is not used in this method but
|
||||
* should be made available to overridden methods.
|
||||
* @param {import('@appium/types').HTTPMethod} method - HTTP method of the route
|
||||
* @param {string} url - url of the route
|
||||
* @param {any} [body] - webdriver request body
|
||||
*
|
||||
* @returns {boolean} - whether the route should be avoided
|
||||
*/
|
||||
proxyRouteIsAvoided (sessionId, method, url, body) {
|
||||
* Whether a given command route (expressed as method and url) should not be
|
||||
* proxied according to this driver
|
||||
*
|
||||
* @param {string} sessionId - the current sessionId (in case the driver runs
|
||||
* multiple session ids and requires it). This is not used in this method but
|
||||
* should be made available to overridden methods.
|
||||
* @param {import('@appium/types').HTTPMethod} method - HTTP method of the route
|
||||
* @param {string} url - url of the route
|
||||
* @param {any} [body] - webdriver request body
|
||||
*
|
||||
* @returns {boolean} - whether the route should be avoided
|
||||
*/
|
||||
proxyRouteIsAvoided(sessionId, method, url, body) {
|
||||
for (let avoidSchema of this.getProxyAvoidList(sessionId)) {
|
||||
if (!_.isArray(avoidSchema) || avoidSchema.length !== 2) {
|
||||
throw new Error('Proxy avoidance must be a list of pairs');
|
||||
@@ -453,10 +443,7 @@ class DriverCore {
|
||||
if (!_.isRegExp(avoidPathRegex)) {
|
||||
throw new Error('Proxy avoidance path must be a regular expression');
|
||||
}
|
||||
let normalizedUrl = url.replace(
|
||||
new RegExp(`^${_.escapeRegExp(this.basePath)}`),
|
||||
'',
|
||||
);
|
||||
let normalizedUrl = url.replace(new RegExp(`^${_.escapeRegExp(this.basePath)}`), '');
|
||||
if (avoidMethod === method && avoidPathRegex.test(normalizedUrl)) {
|
||||
return true;
|
||||
}
|
||||
@@ -465,18 +452,18 @@ class DriverCore {
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Driver} driver
|
||||
*/
|
||||
addManagedDriver (driver) {
|
||||
*
|
||||
* @param {Driver} driver
|
||||
*/
|
||||
addManagedDriver(driver) {
|
||||
this.managedDrivers.push(driver);
|
||||
}
|
||||
|
||||
getManagedDrivers () {
|
||||
getManagedDrivers() {
|
||||
return this.managedDrivers;
|
||||
}
|
||||
|
||||
async clearNewCommandTimeout () {
|
||||
async clearNewCommandTimeout() {
|
||||
if (this.noCommandTimer) {
|
||||
clearTimeout(this.noCommandTimer);
|
||||
this.noCommandTimer = null;
|
||||
|
||||
@@ -9,53 +9,50 @@ let desiredCapabilityConstraints = {
|
||||
isString: true,
|
||||
},
|
||||
deviceName: {
|
||||
isString: true
|
||||
isString: true,
|
||||
},
|
||||
platformVersion: {
|
||||
isString: true
|
||||
isString: true,
|
||||
},
|
||||
newCommandTimeout: {
|
||||
isNumber: true
|
||||
isNumber: true,
|
||||
},
|
||||
automationName: {
|
||||
isString: true
|
||||
isString: true,
|
||||
},
|
||||
autoLaunch: {
|
||||
isBoolean: true
|
||||
isBoolean: true,
|
||||
},
|
||||
udid: {
|
||||
isString: true
|
||||
isString: true,
|
||||
},
|
||||
orientation: {
|
||||
inclusion: [
|
||||
'LANDSCAPE',
|
||||
'PORTRAIT'
|
||||
]
|
||||
inclusion: ['LANDSCAPE', 'PORTRAIT'],
|
||||
},
|
||||
autoWebview: {
|
||||
isBoolean: true
|
||||
isBoolean: true,
|
||||
},
|
||||
noReset: {
|
||||
isBoolean: true
|
||||
isBoolean: true,
|
||||
},
|
||||
fullReset: {
|
||||
isBoolean: true
|
||||
isBoolean: true,
|
||||
},
|
||||
language: {
|
||||
isString: true
|
||||
isString: true,
|
||||
},
|
||||
locale: {
|
||||
isString: true
|
||||
isString: true,
|
||||
},
|
||||
eventTimings: {
|
||||
isBoolean: true
|
||||
isBoolean: true,
|
||||
},
|
||||
printPageSourceOnFindFailure: {
|
||||
isBoolean: true
|
||||
isBoolean: true,
|
||||
},
|
||||
};
|
||||
|
||||
validator.validators.isString = function isString (value) {
|
||||
validator.validators.isString = function isString(value) {
|
||||
if (typeof value === 'string') {
|
||||
return null;
|
||||
}
|
||||
@@ -66,7 +63,7 @@ validator.validators.isString = function isString (value) {
|
||||
|
||||
return 'must be of type string';
|
||||
};
|
||||
validator.validators.isNumber = function isNumber (value) {
|
||||
validator.validators.isNumber = function isNumber(value) {
|
||||
if (typeof value === 'number') {
|
||||
return null;
|
||||
}
|
||||
@@ -83,7 +80,7 @@ validator.validators.isNumber = function isNumber (value) {
|
||||
|
||||
return 'must be of type number';
|
||||
};
|
||||
validator.validators.isBoolean = function isBoolean (value) {
|
||||
validator.validators.isBoolean = function isBoolean(value) {
|
||||
if (typeof value === 'boolean') {
|
||||
return null;
|
||||
}
|
||||
@@ -99,7 +96,7 @@ validator.validators.isBoolean = function isBoolean (value) {
|
||||
|
||||
return 'must be of type boolean';
|
||||
};
|
||||
validator.validators.isObject = function isObject (value) {
|
||||
validator.validators.isObject = function isObject(value) {
|
||||
if (typeof value === 'object') {
|
||||
return null;
|
||||
}
|
||||
@@ -110,7 +107,7 @@ validator.validators.isObject = function isObject (value) {
|
||||
|
||||
return 'must be of type object';
|
||||
};
|
||||
validator.validators.isArray = function isArray (value) {
|
||||
validator.validators.isArray = function isArray(value) {
|
||||
if (Array.isArray(value)) {
|
||||
return null;
|
||||
}
|
||||
@@ -121,13 +118,13 @@ validator.validators.isArray = function isArray (value) {
|
||||
|
||||
return 'must be of type array';
|
||||
};
|
||||
validator.validators.deprecated = function deprecated (value, options, key) {
|
||||
validator.validators.deprecated = function deprecated(value, options, key) {
|
||||
if (options) {
|
||||
log.warn(`${key} is a deprecated capability`);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
validator.validators.inclusionCaseInsensitive = function inclusionCaseInsensitive (value, options) {
|
||||
validator.validators.inclusionCaseInsensitive = function inclusionCaseInsensitive(value, options) {
|
||||
if (typeof value === 'undefined') {
|
||||
return null;
|
||||
} else if (typeof value !== 'string') {
|
||||
@@ -142,9 +139,8 @@ validator.validators.inclusionCaseInsensitive = function inclusionCaseInsensitiv
|
||||
};
|
||||
|
||||
validator.promise = B;
|
||||
validator.prettify = function prettify (val) {
|
||||
validator.prettify = function prettify(val) {
|
||||
return val;
|
||||
};
|
||||
|
||||
|
||||
export { desiredCapabilityConstraints, validator };
|
||||
export {desiredCapabilityConstraints, validator};
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
import _ from 'lodash';
|
||||
import log from './logger';
|
||||
import { node, util } from '@appium/support';
|
||||
import { errors } from '../protocol/errors';
|
||||
import {node, util} from '@appium/support';
|
||||
import {errors} from '../protocol/errors';
|
||||
|
||||
/**
|
||||
* Maximum size (in bytes) of a given driver's settings object (which is internal to {@linkcode DriverSettings}).
|
||||
@@ -15,7 +15,6 @@ export const MAX_SETTINGS_SIZE = 20 * 1024 * 1024; // 20 MB
|
||||
* @implements {IDeviceSettings<T>}
|
||||
*/
|
||||
class DeviceSettings {
|
||||
|
||||
/**
|
||||
* @protected
|
||||
* @type {T}
|
||||
@@ -33,8 +32,8 @@ class DeviceSettings {
|
||||
* @param {T} [defaultSettings]
|
||||
* @param {import('@appium/types').SettingsUpdateListener<T>} [onSettingsUpdate]
|
||||
*/
|
||||
constructor (defaultSettings, onSettingsUpdate) {
|
||||
this._settings = /** @type {T} */({...(defaultSettings ?? {})});
|
||||
constructor(defaultSettings, onSettingsUpdate) {
|
||||
this._settings = /** @type {T} */ ({...(defaultSettings ?? {})});
|
||||
this._onSettingsUpdate = onSettingsUpdate ?? (async () => {});
|
||||
}
|
||||
|
||||
@@ -42,18 +41,22 @@ class DeviceSettings {
|
||||
* calls updateSettings from implementing driver every time a setting is changed.
|
||||
* @param {T} newSettings
|
||||
*/
|
||||
async update (newSettings) {
|
||||
async update(newSettings) {
|
||||
if (!_.isPlainObject(newSettings)) {
|
||||
throw new errors.InvalidArgumentError(`Settings update should be called with valid JSON. Got ` +
|
||||
`${JSON.stringify(newSettings)} instead`);
|
||||
throw new errors.InvalidArgumentError(
|
||||
`Settings update should be called with valid JSON. Got ` +
|
||||
`${JSON.stringify(newSettings)} instead`
|
||||
);
|
||||
}
|
||||
|
||||
if (node.getObjectSize({...this._settings, ...newSettings}) >= MAX_SETTINGS_SIZE) {
|
||||
throw new errors.InvalidArgumentError(`New settings cannot be applied, because the overall ` +
|
||||
`object size exceeds the allowed limit of ${util.toReadableSizeString(MAX_SETTINGS_SIZE)}`);
|
||||
throw new errors.InvalidArgumentError(
|
||||
`New settings cannot be applied, because the overall ` +
|
||||
`object size exceeds the allowed limit of ${util.toReadableSizeString(MAX_SETTINGS_SIZE)}`
|
||||
);
|
||||
}
|
||||
|
||||
const props = /** @type {(keyof T & string)[]} */(_.keys(newSettings));
|
||||
const props = /** @type {(keyof T & string)[]} */ (_.keys(newSettings));
|
||||
for (const prop of props) {
|
||||
if (!_.isUndefined(this._settings[prop])) {
|
||||
if (this._settings[prop] === newSettings[prop]) {
|
||||
@@ -66,13 +69,13 @@ class DeviceSettings {
|
||||
}
|
||||
}
|
||||
|
||||
getSettings () {
|
||||
getSettings() {
|
||||
return this._settings;
|
||||
}
|
||||
}
|
||||
|
||||
export default DeviceSettings;
|
||||
export { DeviceSettings };
|
||||
export {DeviceSettings};
|
||||
|
||||
/**
|
||||
* @template T
|
||||
|
||||
@@ -2,19 +2,19 @@
|
||||
/* eslint-disable require-await */
|
||||
/* eslint-disable no-unused-vars */
|
||||
|
||||
import { DriverCore } from './core';
|
||||
import { util } from '@appium/support';
|
||||
import {DriverCore} from './core';
|
||||
import {util} from '@appium/support';
|
||||
import B from 'bluebird';
|
||||
import _ from 'lodash';
|
||||
import { fixCaps, isW3cCaps } from '../helpers/capabilities';
|
||||
import { DELETE_SESSION_COMMAND, determineProtocol, errors } from '../protocol';
|
||||
import {fixCaps, isW3cCaps} from '../helpers/capabilities';
|
||||
import {DELETE_SESSION_COMMAND, determineProtocol, errors} from '../protocol';
|
||||
import {
|
||||
APPIUM_OPTS_CAP,
|
||||
PREFIXED_APPIUM_OPTS_CAP,
|
||||
processCapabilities,
|
||||
promoteAppiumOptions
|
||||
promoteAppiumOptions,
|
||||
} from './capabilities';
|
||||
import { createBaseDriverClass } from './commands';
|
||||
import {createBaseDriverClass} from './commands';
|
||||
import helpers from './helpers';
|
||||
|
||||
const EVENT_SESSION_INIT = 'newSessionRequested';
|
||||
@@ -23,12 +23,10 @@ const EVENT_SESSION_QUIT_START = 'quitSessionRequested';
|
||||
const EVENT_SESSION_QUIT_DONE = 'quitSessionFinished';
|
||||
const ON_UNEXPECTED_SHUTDOWN_EVENT = 'onUnexpectedShutdown';
|
||||
|
||||
|
||||
/**
|
||||
* @implements {SessionHandler}
|
||||
*/
|
||||
export class BaseDriverCore extends DriverCore {
|
||||
|
||||
/** @type {Record<string,any>|undefined} */
|
||||
cliArgs;
|
||||
|
||||
@@ -37,11 +35,11 @@ export class BaseDriverCore extends DriverCore {
|
||||
// and ensuring that we execute commands one at a time. This method is called
|
||||
// by MJSONWP's express router.
|
||||
/**
|
||||
* @param {string} cmd
|
||||
* @param {...any[]} args
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
async executeCommand (cmd, ...args) {
|
||||
* @param {string} cmd
|
||||
* @param {...any[]} args
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
async executeCommand(cmd, ...args) {
|
||||
// get start time for this command, and log in special cases
|
||||
let startTime = Date.now();
|
||||
|
||||
@@ -58,9 +56,7 @@ export class BaseDriverCore extends DriverCore {
|
||||
await this.clearNewCommandTimeout();
|
||||
|
||||
if (this.shutdownUnexpectedly) {
|
||||
throw new errors.NoSuchDriverError(
|
||||
'The driver was unexpectedly shut down!',
|
||||
);
|
||||
throw new errors.NoSuchDriverError('The driver was unexpectedly shut down!');
|
||||
}
|
||||
|
||||
// If we don't have this command, it must not be implemented
|
||||
@@ -74,17 +70,14 @@ export class BaseDriverCore extends DriverCore {
|
||||
this[cmd](...args),
|
||||
new B((resolve, reject) => {
|
||||
unexpectedShutdownListener = reject;
|
||||
this.eventEmitter.on(
|
||||
ON_UNEXPECTED_SHUTDOWN_EVENT,
|
||||
unexpectedShutdownListener,
|
||||
);
|
||||
this.eventEmitter.on(ON_UNEXPECTED_SHUTDOWN_EVENT, unexpectedShutdownListener);
|
||||
}),
|
||||
]).finally(() => {
|
||||
if (unexpectedShutdownListener) {
|
||||
// This is needed to prevent memory leaks
|
||||
this.eventEmitter.removeListener(
|
||||
ON_UNEXPECTED_SHUTDOWN_EVENT,
|
||||
unexpectedShutdownListener,
|
||||
ON_UNEXPECTED_SHUTDOWN_EVENT,
|
||||
unexpectedShutdownListener
|
||||
);
|
||||
unexpectedShutdownListener = null;
|
||||
}
|
||||
@@ -117,13 +110,11 @@ export class BaseDriverCore extends DriverCore {
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {Error} err
|
||||
*/
|
||||
async startUnexpectedShutdown (
|
||||
err = new errors.NoSuchDriverError(
|
||||
'The driver was unexpectedly shut down!',
|
||||
),
|
||||
*
|
||||
* @param {Error} err
|
||||
*/
|
||||
async startUnexpectedShutdown(
|
||||
err = new errors.NoSuchDriverError('The driver was unexpectedly shut down!')
|
||||
) {
|
||||
this.eventEmitter.emit(ON_UNEXPECTED_SHUTDOWN_EVENT, err); // allow others to listen for this
|
||||
this.shutdownUnexpectedly = true;
|
||||
@@ -136,7 +127,7 @@ export class BaseDriverCore extends DriverCore {
|
||||
}
|
||||
}
|
||||
|
||||
async startNewCommandTimeout () {
|
||||
async startNewCommandTimeout() {
|
||||
// make sure there are no rogue timeouts
|
||||
await this.clearNewCommandTimeout();
|
||||
|
||||
@@ -146,7 +137,7 @@ export class BaseDriverCore extends DriverCore {
|
||||
this.noCommandTimer = setTimeout(async () => {
|
||||
this.log.warn(
|
||||
`Shutting down because we waited ` +
|
||||
`${this.newCommandTimeoutMs / 1000.0} seconds for a command`,
|
||||
`${this.newCommandTimeoutMs / 1000.0} seconds for a command`
|
||||
);
|
||||
const errorMessage =
|
||||
`New Command Timeout of ` +
|
||||
@@ -164,7 +155,7 @@ export class BaseDriverCore extends DriverCore {
|
||||
* @param {number} port
|
||||
* @param {string} path
|
||||
*/
|
||||
assignServer (server, host, port, path) {
|
||||
assignServer(server, host, port, path) {
|
||||
this.server = server;
|
||||
this.serverHost = host;
|
||||
this.serverPort = port;
|
||||
@@ -172,10 +163,10 @@ export class BaseDriverCore extends DriverCore {
|
||||
}
|
||||
|
||||
/*
|
||||
* Restart the session with the original caps,
|
||||
* preserving the timeout config.
|
||||
*/
|
||||
async reset () {
|
||||
* Restart the session with the original caps,
|
||||
* preserving the timeout config.
|
||||
*/
|
||||
async reset() {
|
||||
this.log.debug('Resetting app mid-session');
|
||||
this.log.debug('Running generic full reset');
|
||||
|
||||
@@ -209,40 +200,33 @@ export class BaseDriverCore extends DriverCore {
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Historically the first two arguments were reserved for JSONWP capabilities.
|
||||
* Appium 2 has dropped the support of these, so now we only accept capability
|
||||
* objects in W3C format and thus allow any of the three arguments to represent
|
||||
* the latter.
|
||||
* @param {W3CCapabilities} w3cCapabilities1
|
||||
* @param {W3CCapabilities} [w3cCapabilities2]
|
||||
* @param {W3CCapabilities} [w3cCapabilities]
|
||||
* @param {DriverData[]} [driverData]
|
||||
* @returns {Promise<[string,object]>}
|
||||
*/
|
||||
async createSession (
|
||||
w3cCapabilities1,
|
||||
w3cCapabilities2,
|
||||
w3cCapabilities,
|
||||
driverData,
|
||||
) {
|
||||
*
|
||||
* Historically the first two arguments were reserved for JSONWP capabilities.
|
||||
* Appium 2 has dropped the support of these, so now we only accept capability
|
||||
* objects in W3C format and thus allow any of the three arguments to represent
|
||||
* the latter.
|
||||
* @param {W3CCapabilities} w3cCapabilities1
|
||||
* @param {W3CCapabilities} [w3cCapabilities2]
|
||||
* @param {W3CCapabilities} [w3cCapabilities]
|
||||
* @param {DriverData[]} [driverData]
|
||||
* @returns {Promise<[string,object]>}
|
||||
*/
|
||||
async createSession(w3cCapabilities1, w3cCapabilities2, w3cCapabilities, driverData) {
|
||||
if (this.sessionId !== null) {
|
||||
throw new errors.SessionNotCreatedError(
|
||||
'Cannot create a new session while one is in progress',
|
||||
'Cannot create a new session while one is in progress'
|
||||
);
|
||||
}
|
||||
|
||||
this.log.debug();
|
||||
|
||||
const originalCaps = _.cloneDeep([
|
||||
w3cCapabilities,
|
||||
w3cCapabilities1,
|
||||
w3cCapabilities2,
|
||||
].find(isW3cCaps));
|
||||
const originalCaps = _.cloneDeep(
|
||||
[w3cCapabilities, w3cCapabilities1, w3cCapabilities2].find(isW3cCaps)
|
||||
);
|
||||
if (!originalCaps) {
|
||||
throw new errors.SessionNotCreatedError(
|
||||
'Appium only supports W3C-style capability objects. ' +
|
||||
'Your client is sending an older capabilities format. Please update your client library.',
|
||||
'Appium only supports W3C-style capability objects. ' +
|
||||
'Your client is sending an older capabilities format. Please update your client library.'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -250,23 +234,15 @@ export class BaseDriverCore extends DriverCore {
|
||||
|
||||
this.originalCaps = _.cloneDeep(originalCaps);
|
||||
this.log.debug(
|
||||
`Creating session with W3C capabilities: ${JSON.stringify(
|
||||
originalCaps,
|
||||
null,
|
||||
2,
|
||||
)}`,
|
||||
`Creating session with W3C capabilities: ${JSON.stringify(originalCaps, null, 2)}`
|
||||
);
|
||||
|
||||
let caps;
|
||||
try {
|
||||
caps = processCapabilities(
|
||||
originalCaps,
|
||||
this.desiredCapConstraints,
|
||||
this.shouldValidateCaps,
|
||||
);
|
||||
caps = processCapabilities(originalCaps, this.desiredCapConstraints, this.shouldValidateCaps);
|
||||
if (caps[APPIUM_OPTS_CAP]) {
|
||||
this.log.debug(
|
||||
`Found ${PREFIXED_APPIUM_OPTS_CAP} capability present; will promote items inside to caps`,
|
||||
`Found ${PREFIXED_APPIUM_OPTS_CAP} capability present; will promote items inside to caps`
|
||||
);
|
||||
caps = promoteAppiumOptions(caps);
|
||||
}
|
||||
@@ -289,9 +265,9 @@ export class BaseDriverCore extends DriverCore {
|
||||
// both to true, but this is misguided and strange, so error here instead
|
||||
if (this.opts.noReset && this.opts.fullReset) {
|
||||
throw new Error(
|
||||
"The 'noReset' and 'fullReset' capabilities are mutually " +
|
||||
'exclusive and should not both be set to true. You ' +
|
||||
"probably meant to just use 'fullReset' on its own",
|
||||
"The 'noReset' and 'fullReset' capabilities are mutually " +
|
||||
'exclusive and should not both be set to true. You ' +
|
||||
"probably meant to just use 'fullReset' on its own"
|
||||
);
|
||||
}
|
||||
if (this.opts.noReset === true) {
|
||||
@@ -320,12 +296,12 @@ export class BaseDriverCore extends DriverCore {
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} [sessionId]
|
||||
* @param {DriverData[]} [driverData]
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async deleteSession (sessionId, driverData) {
|
||||
*
|
||||
* @param {string} [sessionId]
|
||||
* @param {DriverData[]} [driverData]
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async deleteSession(sessionId, driverData) {
|
||||
await this.clearNewCommandTimeout();
|
||||
if (this.isCommandsQueueEnabled && this.commandsQueueGuard.isBusy()) {
|
||||
// simple hack to release pending commands if they exist
|
||||
@@ -345,7 +321,7 @@ export class BaseDriverCore extends DriverCore {
|
||||
* @implements {Driver}
|
||||
*/
|
||||
class BaseDriver extends createBaseDriverClass(BaseDriverCore) {}
|
||||
export { BaseDriver };
|
||||
export {BaseDriver};
|
||||
export default BaseDriver;
|
||||
|
||||
/**
|
||||
@@ -357,7 +333,6 @@ export default BaseDriver;
|
||||
* @typedef {import('@appium/types').DriverData} DriverData
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* @callback UpdateServerCallback
|
||||
* @param {import('express').Express} app - Express app
|
||||
|
||||
@@ -2,18 +2,14 @@ import _ from 'lodash';
|
||||
import path from 'path';
|
||||
import url from 'url';
|
||||
import logger from './logger';
|
||||
import { tempDir, fs, util, zip, net, timing, node } from '@appium/support';
|
||||
import {tempDir, fs, util, zip, net, timing, node} from '@appium/support';
|
||||
import LRU from 'lru-cache';
|
||||
import AsyncLock from 'async-lock';
|
||||
import axios from 'axios';
|
||||
|
||||
const IPA_EXT = '.ipa';
|
||||
const ZIP_EXTS = ['.zip', IPA_EXT];
|
||||
const ZIP_MIME_TYPES = [
|
||||
'application/zip',
|
||||
'application/x-zip-compressed',
|
||||
'multipart/x-zip',
|
||||
];
|
||||
const ZIP_MIME_TYPES = ['application/zip', 'application/x-zip-compressed', 'multipart/x-zip'];
|
||||
const CACHED_APPS_MAX_AGE = 1000 * 60 * 60 * 24; // ms
|
||||
const MAX_CACHED_APPS = 1024;
|
||||
const APPLICATIONS_CACHE = new LRU({
|
||||
@@ -21,8 +17,10 @@ const APPLICATIONS_CACHE = new LRU({
|
||||
ttl: CACHED_APPS_MAX_AGE, // expire after 24 hours
|
||||
updateAgeOnGet: true,
|
||||
dispose: (app, {fullPath}) => {
|
||||
logger.info(`The application '${app}' cached at '${fullPath}' has ` +
|
||||
`expired after ${CACHED_APPS_MAX_AGE}ms`);
|
||||
logger.info(
|
||||
`The application '${app}' cached at '${fullPath}' has ` +
|
||||
`expired after ${CACHED_APPS_MAX_AGE}ms`
|
||||
);
|
||||
if (fullPath) {
|
||||
fs.rimraf(fullPath);
|
||||
}
|
||||
@@ -39,10 +37,11 @@ process.on('exit', () => {
|
||||
return;
|
||||
}
|
||||
|
||||
const appPaths = [...APPLICATIONS_CACHE.values()]
|
||||
.map(({fullPath}) => fullPath);
|
||||
logger.debug(`Performing cleanup of ${appPaths.length} cached ` +
|
||||
util.pluralize('application', appPaths.length));
|
||||
const appPaths = [...APPLICATIONS_CACHE.values()].map(({fullPath}) => fullPath);
|
||||
logger.debug(
|
||||
`Performing cleanup of ${appPaths.length} cached ` +
|
||||
util.pluralize('application', appPaths.length)
|
||||
);
|
||||
for (const appPath of appPaths) {
|
||||
try {
|
||||
// Asynchronous calls are not supported in onExit handler
|
||||
@@ -53,21 +52,22 @@ process.on('exit', () => {
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
async function retrieveHeaders (link) {
|
||||
async function retrieveHeaders(link) {
|
||||
try {
|
||||
return (await axios({
|
||||
url: link,
|
||||
method: 'HEAD',
|
||||
timeout: 5000,
|
||||
})).headers;
|
||||
return (
|
||||
await axios({
|
||||
url: link,
|
||||
method: 'HEAD',
|
||||
timeout: 5000,
|
||||
})
|
||||
).headers;
|
||||
} catch (e) {
|
||||
logger.info(`Cannot send HEAD request to '${link}'. Original error: ${e.message}`);
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
function getCachedApplicationPath (link, currentAppProps = {}, cachedAppInfo = {}) {
|
||||
function getCachedApplicationPath(link, currentAppProps = {}, cachedAppInfo = {}) {
|
||||
const refresh = () => {
|
||||
logger.debug(`A fresh copy of the application is going to be downloaded from ${link}`);
|
||||
return null;
|
||||
@@ -108,7 +108,9 @@ function getCachedApplicationPath (link, currentAppProps = {}, cachedAppInfo = {
|
||||
if (currentMaxAge && timestamp) {
|
||||
const msLeft = timestamp + currentMaxAge * 1000 - Date.now();
|
||||
if (msLeft > 0) {
|
||||
logger.debug(`The cached application '${path.basename(fullPath)}' will expire in ${msLeft / 1000}s`);
|
||||
logger.debug(
|
||||
`The cached application '${path.basename(fullPath)}' will expire in ${msLeft / 1000}s`
|
||||
);
|
||||
return fullPath;
|
||||
}
|
||||
logger.debug(`The cached application '${path.basename(fullPath)}' has expired`);
|
||||
@@ -116,25 +118,27 @@ function getCachedApplicationPath (link, currentAppProps = {}, cachedAppInfo = {
|
||||
return refresh();
|
||||
}
|
||||
|
||||
function verifyAppExtension (app, supportedAppExtensions) {
|
||||
function verifyAppExtension(app, supportedAppExtensions) {
|
||||
if (supportedAppExtensions.map(_.toLower).includes(_.toLower(path.extname(app)))) {
|
||||
return app;
|
||||
}
|
||||
throw new Error(`New app path '${app}' did not have ` +
|
||||
`${util.pluralize('extension', supportedAppExtensions.length, false)}: ` +
|
||||
supportedAppExtensions);
|
||||
throw new Error(
|
||||
`New app path '${app}' did not have ` +
|
||||
`${util.pluralize('extension', supportedAppExtensions.length, false)}: ` +
|
||||
supportedAppExtensions
|
||||
);
|
||||
}
|
||||
|
||||
async function calculateFolderIntegrity (folderPath) {
|
||||
async function calculateFolderIntegrity(folderPath) {
|
||||
return (await fs.glob('**/*', {cwd: folderPath, strict: false, nosort: true})).length;
|
||||
}
|
||||
|
||||
async function calculateFileIntegrity (filePath) {
|
||||
async function calculateFileIntegrity(filePath) {
|
||||
return await fs.hash(filePath);
|
||||
}
|
||||
|
||||
async function isAppIntegrityOk (currentPath, expectedIntegrity = {}) {
|
||||
if (!await fs.exists(currentPath)) {
|
||||
async function isAppIntegrityOk(currentPath, expectedIntegrity = {}) {
|
||||
if (!(await fs.exists(currentPath))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -146,8 +150,8 @@ async function isAppIntegrityOk (currentPath, expectedIntegrity = {}) {
|
||||
// more precise, but we don't need to be very precise here and also don't want to
|
||||
// overuse RAM and have a performance drop.
|
||||
return (await fs.stat(currentPath)).isDirectory()
|
||||
? await calculateFolderIntegrity(currentPath) >= expectedIntegrity?.folder
|
||||
: await calculateFileIntegrity(currentPath) === expectedIntegrity?.file;
|
||||
? (await calculateFolderIntegrity(currentPath)) >= expectedIntegrity?.folder
|
||||
: (await calculateFileIntegrity(currentPath)) === expectedIntegrity?.file;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -200,16 +204,14 @@ async function isAppIntegrityOk (currentPath, expectedIntegrity = {}) {
|
||||
* @param {string|string[]|ConfigureAppOptions} options
|
||||
* @returns The full path to the resulting application bundle
|
||||
*/
|
||||
async function configureApp (app, options = {}) {
|
||||
async function configureApp(app, options = {}) {
|
||||
if (!_.isString(app)) {
|
||||
// immediately shortcircuit if not given an app
|
||||
return;
|
||||
}
|
||||
|
||||
let supportedAppExtensions;
|
||||
const {
|
||||
onPostProcess,
|
||||
} = _.isPlainObject(options) ? options : {};
|
||||
const {onPostProcess} = _.isPlainObject(options) ? options : {};
|
||||
if (_.isString(options)) {
|
||||
supportedAppExtensions = [options];
|
||||
} else if (_.isArray(options)) {
|
||||
@@ -260,14 +262,16 @@ async function configureApp (app, options = {}) {
|
||||
logger.info(`Reusing previously downloaded application at '${cachedPath}'`);
|
||||
return verifyAppExtension(cachedPath, supportedAppExtensions);
|
||||
}
|
||||
logger.info(`The application at '${cachedPath}' does not exist anymore ` +
|
||||
`or its integrity has been damaged. Deleting it from the internal cache`);
|
||||
logger.info(
|
||||
`The application at '${cachedPath}' does not exist anymore ` +
|
||||
`or its integrity has been damaged. Deleting it from the internal cache`
|
||||
);
|
||||
APPLICATIONS_CACHE.delete(app);
|
||||
}
|
||||
|
||||
let fileName = null;
|
||||
const basename = fs.sanitizeName(path.basename(decodeURIComponent(pathname)), {
|
||||
replacement: SANITIZE_REPLACEMENT
|
||||
replacement: SANITIZE_REPLACEMENT,
|
||||
});
|
||||
const extname = path.extname(basename);
|
||||
// to determine if we need to unzip the app, we have a number of places
|
||||
@@ -280,7 +284,11 @@ async function configureApp (app, options = {}) {
|
||||
const ct = headers['content-type'];
|
||||
logger.debug(`Content-Type: ${ct}`);
|
||||
// the filetype may not be obvious for certain urls, so check the mime type too
|
||||
if (ZIP_MIME_TYPES.some((mimeType) => new RegExp(`\\b${_.escapeRegExp(mimeType)}\\b`).test(ct))) {
|
||||
if (
|
||||
ZIP_MIME_TYPES.some((mimeType) =>
|
||||
new RegExp(`\\b${_.escapeRegExp(mimeType)}\\b`).test(ct)
|
||||
)
|
||||
) {
|
||||
if (!fileName) {
|
||||
fileName = `${DEFAULT_BASENAME}.zip`;
|
||||
}
|
||||
@@ -292,7 +300,7 @@ async function configureApp (app, options = {}) {
|
||||
const match = /filename="([^"]+)/i.exec(headers['content-disposition']);
|
||||
if (match) {
|
||||
fileName = fs.sanitizeName(match[1], {
|
||||
replacement: SANITIZE_REPLACEMENT
|
||||
replacement: SANITIZE_REPLACEMENT,
|
||||
});
|
||||
shouldUnzipApp = shouldUnzipApp || ZIP_EXTS.includes(path.extname(fileName));
|
||||
}
|
||||
@@ -304,8 +312,10 @@ async function configureApp (app, options = {}) {
|
||||
: DEFAULT_BASENAME;
|
||||
let resultingExt = extname;
|
||||
if (!supportedAppExtensions.includes(resultingExt)) {
|
||||
logger.info(`The current file extension '${resultingExt}' is not supported. ` +
|
||||
`Defaulting to '${_.first(supportedAppExtensions)}'`);
|
||||
logger.info(
|
||||
`The current file extension '${resultingExt}' is not supported. ` +
|
||||
`Defaulting to '${_.first(supportedAppExtensions)}'`
|
||||
);
|
||||
resultingExt = _.first(supportedAppExtensions);
|
||||
}
|
||||
fileName = `${resultingName}${resultingExt}`;
|
||||
@@ -323,7 +333,8 @@ async function configureApp (app, options = {}) {
|
||||
let errorMessage = `The application at '${newApp}' does not exist or is not accessible`;
|
||||
// protocol value for 'C:\\temp' is 'c:', so we check the length as well
|
||||
if (_.isString(protocol) && protocol.length > 2) {
|
||||
errorMessage = `The protocol '${protocol}' used in '${newApp}' is not supported. ` +
|
||||
errorMessage =
|
||||
`The protocol '${protocol}' used in '${newApp}' is not supported. ` +
|
||||
`Only http: and https: protocols are supported`;
|
||||
}
|
||||
throw new Error(errorMessage);
|
||||
@@ -345,8 +356,10 @@ async function configureApp (app, options = {}) {
|
||||
logger.info(`Will reuse previously cached application at '${fullPath}'`);
|
||||
return verifyAppExtension(fullPath, supportedAppExtensions);
|
||||
}
|
||||
logger.info(`The application at '${fullPath}' does not exist anymore ` +
|
||||
`or its integrity has been damaged. Deleting it from the cache`);
|
||||
logger.info(
|
||||
`The application at '${fullPath}' does not exist anymore ` +
|
||||
`or its integrity has been damaged. Deleting it from the cache`
|
||||
);
|
||||
APPLICATIONS_CACHE.delete(app);
|
||||
}
|
||||
const tmpRoot = await tempDir.openDir();
|
||||
@@ -360,8 +373,10 @@ async function configureApp (app, options = {}) {
|
||||
logger.info(`Unzipped local app to '${newApp}'`);
|
||||
} else if (!path.isAbsolute(newApp)) {
|
||||
newApp = path.resolve(process.cwd(), newApp);
|
||||
logger.warn(`The current application path '${app}' is not absolute ` +
|
||||
`and has been rewritten to '${newApp}'. Consider using absolute paths rather than relative`);
|
||||
logger.warn(
|
||||
`The current application path '${app}' is not absolute ` +
|
||||
`and has been rewritten to '${newApp}'. Consider using absolute paths rather than relative`
|
||||
);
|
||||
app = newApp;
|
||||
}
|
||||
|
||||
@@ -393,19 +408,19 @@ async function configureApp (app, options = {}) {
|
||||
headers: _.clone(headers),
|
||||
appPath: newApp,
|
||||
});
|
||||
return (!result?.appPath || app === result?.appPath || !await fs.exists(result?.appPath))
|
||||
return !result?.appPath || app === result?.appPath || !(await fs.exists(result?.appPath))
|
||||
? newApp
|
||||
: await storeAppInCache(result.appPath);
|
||||
}
|
||||
|
||||
verifyAppExtension(newApp, supportedAppExtensions);
|
||||
return (app !== newApp && (packageHash || _.values(remoteAppProps).some(Boolean)))
|
||||
return app !== newApp && (packageHash || _.values(remoteAppProps).some(Boolean))
|
||||
? await storeAppInCache(newApp)
|
||||
: newApp;
|
||||
});
|
||||
}
|
||||
|
||||
async function downloadApp (app, targetPath) {
|
||||
async function downloadApp(app, targetPath) {
|
||||
const {href} = url.parse(app);
|
||||
try {
|
||||
await net.downloadFile(href, targetPath, {
|
||||
@@ -430,7 +445,7 @@ async function downloadApp (app, targetPath) {
|
||||
* @throws {Error} If the given archive is invalid or no application bundles
|
||||
* have been found inside
|
||||
*/
|
||||
async function unzipApp (zipPath, dstRoot, supportedAppExtensions) {
|
||||
async function unzipApp(zipPath, dstRoot, supportedAppExtensions) {
|
||||
await zip.assertValidZip(zipPath);
|
||||
|
||||
if (!_.isArray(supportedAppExtensions)) {
|
||||
@@ -442,8 +457,8 @@ async function unzipApp (zipPath, dstRoot, supportedAppExtensions) {
|
||||
logger.debug(`Unzipping '${zipPath}'`);
|
||||
const timer = new timing.Timer().start();
|
||||
const useSystemUnzipEnv = process.env.APPIUM_PREFER_SYSTEM_UNZIP;
|
||||
const useSystemUnzip = _.isEmpty(useSystemUnzipEnv)
|
||||
|| !['0', 'false'].includes(_.toLower(useSystemUnzipEnv));
|
||||
const useSystemUnzip =
|
||||
_.isEmpty(useSystemUnzipEnv) || !['0', 'false'].includes(_.toLower(useSystemUnzipEnv));
|
||||
/**
|
||||
* Attempt to use use the system `unzip` (e.g., `/usr/bin/unzip`) due
|
||||
* to the significant performance improvement it provides over the native
|
||||
@@ -453,24 +468,40 @@ async function unzipApp (zipPath, dstRoot, supportedAppExtensions) {
|
||||
const extractionOpts = {useSystemUnzip};
|
||||
// https://github.com/appium/appium/issues/14100
|
||||
if (path.extname(zipPath) === IPA_EXT) {
|
||||
logger.debug(`Enforcing UTF-8 encoding on the extracted file names for '${path.basename(zipPath)}'`);
|
||||
logger.debug(
|
||||
`Enforcing UTF-8 encoding on the extracted file names for '${path.basename(zipPath)}'`
|
||||
);
|
||||
extractionOpts.fileNamesEncoding = 'utf8';
|
||||
}
|
||||
await zip.extractAllTo(zipPath, tmpRoot, extractionOpts);
|
||||
const globPattern = `**/*.+(${supportedAppExtensions.map((ext) => ext.replace(/^\./, '')).join('|')})`;
|
||||
const sortedBundleItems = (await fs.glob(globPattern, {
|
||||
cwd: tmpRoot,
|
||||
strict: false,
|
||||
// Get the top level match
|
||||
})).sort((a, b) => a.split(path.sep).length - b.split(path.sep).length);
|
||||
const globPattern = `**/*.+(${supportedAppExtensions
|
||||
.map((ext) => ext.replace(/^\./, ''))
|
||||
.join('|')})`;
|
||||
const sortedBundleItems = (
|
||||
await fs.glob(globPattern, {
|
||||
cwd: tmpRoot,
|
||||
strict: false,
|
||||
// Get the top level match
|
||||
})
|
||||
).sort((a, b) => a.split(path.sep).length - b.split(path.sep).length);
|
||||
if (_.isEmpty(sortedBundleItems)) {
|
||||
logger.errorAndThrow(`App unzipped OK, but we could not find any '${supportedAppExtensions}' ` +
|
||||
util.pluralize('bundle', supportedAppExtensions.length, false) +
|
||||
` in it. Make sure your archive contains at least one package having ` +
|
||||
`'${supportedAppExtensions}' ${util.pluralize('extension', supportedAppExtensions.length, false)}`);
|
||||
logger.errorAndThrow(
|
||||
`App unzipped OK, but we could not find any '${supportedAppExtensions}' ` +
|
||||
util.pluralize('bundle', supportedAppExtensions.length, false) +
|
||||
` in it. Make sure your archive contains at least one package having ` +
|
||||
`'${supportedAppExtensions}' ${util.pluralize(
|
||||
'extension',
|
||||
supportedAppExtensions.length,
|
||||
false
|
||||
)}`
|
||||
);
|
||||
}
|
||||
logger.debug(`Extracted ${util.pluralize('bundle item', sortedBundleItems.length, true)} ` +
|
||||
`from '${zipPath}' in ${Math.round(timer.getDuration().asMilliSeconds)}ms: ${sortedBundleItems}`);
|
||||
logger.debug(
|
||||
`Extracted ${util.pluralize('bundle item', sortedBundleItems.length, true)} ` +
|
||||
`from '${zipPath}' in ${Math.round(
|
||||
timer.getDuration().asMilliSeconds
|
||||
)}ms: ${sortedBundleItems}`
|
||||
);
|
||||
const matchedBundle = _.first(sortedBundleItems);
|
||||
logger.info(`Assuming '${matchedBundle}' is the correct bundle`);
|
||||
const dstPath = path.resolve(dstRoot, path.basename(matchedBundle));
|
||||
@@ -481,8 +512,8 @@ async function unzipApp (zipPath, dstRoot, supportedAppExtensions) {
|
||||
}
|
||||
}
|
||||
|
||||
function isPackageOrBundle (app) {
|
||||
return (/^([a-zA-Z0-9\-_]+\.[a-zA-Z0-9\-_]+)+$/).test(app);
|
||||
function isPackageOrBundle(app) {
|
||||
return /^([a-zA-Z0-9\-_]+\.[a-zA-Z0-9\-_]+)+$/.test(app);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -495,7 +526,7 @@ function isPackageOrBundle (app) {
|
||||
* @param {String} firstKey The first key to duplicate
|
||||
* @param {String} secondKey The second key to duplicate
|
||||
*/
|
||||
function duplicateKeys (input, firstKey, secondKey) {
|
||||
function duplicateKeys(input, firstKey, secondKey) {
|
||||
// If array provided, recursively call on all elements
|
||||
if (_.isArray(input)) {
|
||||
return input.map((item) => duplicateKeys(item, firstKey, secondKey));
|
||||
@@ -526,7 +557,7 @@ function duplicateKeys (input, firstKey, secondKey) {
|
||||
*
|
||||
* @param {string|Array<String>} cap A desired capability
|
||||
*/
|
||||
function parseCapsArray (cap) {
|
||||
function parseCapsArray(cap) {
|
||||
if (_.isArray(cap)) {
|
||||
return cap;
|
||||
}
|
||||
@@ -553,15 +584,17 @@ function parseCapsArray (cap) {
|
||||
* @param {string?} sessionId session identifier (if exists)
|
||||
* @returns {string}
|
||||
*/
|
||||
function generateDriverLogPrefix (obj, sessionId = null) {
|
||||
function generateDriverLogPrefix(obj, sessionId = null) {
|
||||
const instanceName = `${obj.constructor.name}@${node.getObjectId(obj).substring(0, 4)}`;
|
||||
return sessionId ? `${instanceName} (${sessionId.substring(0, 8)})` : instanceName;
|
||||
}
|
||||
|
||||
/** @type {import('@appium/types').DriverHelpers} */
|
||||
export default {
|
||||
configureApp, isPackageOrBundle, duplicateKeys, parseCapsArray, generateDriverLogPrefix
|
||||
};
|
||||
export {
|
||||
configureApp, isPackageOrBundle, duplicateKeys, parseCapsArray, generateDriverLogPrefix
|
||||
configureApp,
|
||||
isPackageOrBundle,
|
||||
duplicateKeys,
|
||||
parseCapsArray,
|
||||
generateDriverLogPrefix,
|
||||
};
|
||||
export {configureApp, isPackageOrBundle, duplicateKeys, parseCapsArray, generateDriverLogPrefix};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { logger } from '@appium/support';
|
||||
import {logger} from '@appium/support';
|
||||
|
||||
const log = logger.getLogger('BaseDriver');
|
||||
export default log;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { util } from '@appium/support';
|
||||
import {util} from '@appium/support';
|
||||
|
||||
// The default maximum length of a single log record
|
||||
// containing http request/response body
|
||||
@@ -17,8 +17,4 @@ const PROTOCOLS = {
|
||||
// Before Appium 2.0, this default value was '/wd/hub' by historical reasons.
|
||||
const DEFAULT_BASE_PATH = '';
|
||||
|
||||
|
||||
export {
|
||||
MAX_LOG_BODY_LENGTH, MJSONWP_ELEMENT_KEY, W3C_ELEMENT_KEY,
|
||||
PROTOCOLS, DEFAULT_BASE_PATH
|
||||
};
|
||||
export {MAX_LOG_BODY_LENGTH, MJSONWP_ELEMENT_KEY, W3C_ELEMENT_KEY, PROTOCOLS, DEFAULT_BASE_PATH};
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import { errors } from '../protocol';
|
||||
import {errors} from '../protocol';
|
||||
|
||||
|
||||
function produceError () {
|
||||
function produceError() {
|
||||
throw new errors.UnknownCommandError('Produced generic error for testing');
|
||||
}
|
||||
|
||||
function produceCrash () {
|
||||
function produceCrash() {
|
||||
throw new Error('We just tried to crash Appium!');
|
||||
}
|
||||
|
||||
|
||||
export { produceError, produceCrash };
|
||||
export {produceError, produceCrash};
|
||||
|
||||
@@ -2,23 +2,21 @@ import _ from 'lodash';
|
||||
import '@colors/colors';
|
||||
import morgan from 'morgan';
|
||||
import log from './logger';
|
||||
import { MAX_LOG_BODY_LENGTH } from '../constants';
|
||||
|
||||
import {MAX_LOG_BODY_LENGTH} from '../constants';
|
||||
|
||||
// Copied the morgan compile function over so that cooler formats
|
||||
// may be configured
|
||||
function compile (fmt) {
|
||||
function compile(fmt) {
|
||||
// escape quotes
|
||||
fmt = fmt.replace(/"/g, '\\"');
|
||||
fmt = fmt.replace(/:([-\w]{2,})(?:\[([^\]]+)\])?/g,
|
||||
function replace (_, name, arg) {
|
||||
return `"\n + (tokens["${name}"](req, res, "${arg}") || "-") + "`;
|
||||
});
|
||||
fmt = fmt.replace(/:([-\w]{2,})(?:\[([^\]]+)\])?/g, function replace(_, name, arg) {
|
||||
return `"\n + (tokens["${name}"](req, res, "${arg}") || "-") + "`;
|
||||
});
|
||||
let js = ` return "${fmt}";`;
|
||||
return new Function('tokens, req, res', js);
|
||||
}
|
||||
|
||||
function requestEndLoggingFormat (tokens, req, res) {
|
||||
function requestEndLoggingFormat(tokens, req, res) {
|
||||
let status = res.statusCode;
|
||||
let statusStr = ':status';
|
||||
if (status >= 500) {
|
||||
@@ -30,28 +28,32 @@ function requestEndLoggingFormat (tokens, req, res) {
|
||||
} else {
|
||||
statusStr = statusStr.green;
|
||||
}
|
||||
let fn = compile(`${'<-- :method :url '.white}${statusStr} ${':response-time ms - :res[content-length]'.grey}`);
|
||||
let fn = compile(
|
||||
`${'<-- :method :url '.white}${statusStr} ${':response-time ms - :res[content-length]'.grey}`
|
||||
);
|
||||
return fn(tokens, req, res);
|
||||
}
|
||||
|
||||
const endLogFormatter = morgan((tokens, req, res) => {
|
||||
log.info(requestEndLoggingFormat(tokens, req, res),
|
||||
(res.jsonResp || '').grey);
|
||||
log.info(requestEndLoggingFormat(tokens, req, res), (res.jsonResp || '').grey);
|
||||
});
|
||||
|
||||
const requestStartLoggingFormat = compile(`${'-->'.white} ${':method'.white} ${':url'.white}`);
|
||||
|
||||
const startLogFormatter = morgan((tokens, req, res) => {
|
||||
// morgan output is redirected straight to winston
|
||||
let reqBody = '';
|
||||
if (req.body) {
|
||||
try {
|
||||
reqBody = _.truncate(_.isString(req.body) ? req.body : JSON.stringify(req.body), {
|
||||
length: MAX_LOG_BODY_LENGTH,
|
||||
});
|
||||
} catch (ign) {}
|
||||
}
|
||||
log.info(requestStartLoggingFormat(tokens, req, res), reqBody.grey);
|
||||
}, {immediate: true});
|
||||
const startLogFormatter = morgan(
|
||||
(tokens, req, res) => {
|
||||
// morgan output is redirected straight to winston
|
||||
let reqBody = '';
|
||||
if (req.body) {
|
||||
try {
|
||||
reqBody = _.truncate(_.isString(req.body) ? req.body : JSON.stringify(req.body), {
|
||||
length: MAX_LOG_BODY_LENGTH,
|
||||
});
|
||||
} catch (ign) {}
|
||||
}
|
||||
log.info(requestStartLoggingFormat(tokens, req, res), reqBody.grey);
|
||||
},
|
||||
{immediate: true}
|
||||
);
|
||||
|
||||
export { endLogFormatter, startLogFormatter };
|
||||
export {endLogFormatter, startLogFormatter};
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
import log from './logger';
|
||||
import LRU from 'lru-cache';
|
||||
import { fs, util } from '@appium/support';
|
||||
import {fs, util} from '@appium/support';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { EventEmitter } from 'events';
|
||||
|
||||
import {EventEmitter} from 'events';
|
||||
|
||||
const CACHE_SIZE = 1024;
|
||||
const IDEMPOTENT_RESPONSES = new LRU({
|
||||
max: CACHE_SIZE,
|
||||
updateAgeOnGet: true,
|
||||
dispose (key, {response}) {
|
||||
dispose(key, {response}) {
|
||||
if (response) {
|
||||
fs.rimrafSync(response);
|
||||
}
|
||||
@@ -20,9 +19,7 @@ const MONITORED_METHODS = ['POST', 'PATCH'];
|
||||
const IDEMPOTENCY_KEY_HEADER = 'x-idempotency-key';
|
||||
|
||||
process.on('exit', () => {
|
||||
const resPaths = [...IDEMPOTENT_RESPONSES.values()]
|
||||
.map(({response}) => response)
|
||||
.filter(Boolean);
|
||||
const resPaths = [...IDEMPOTENT_RESPONSES.values()].map(({response}) => response).filter(Boolean);
|
||||
for (const resPath of resPaths) {
|
||||
try {
|
||||
// Asynchronous calls are not supported in onExit handler
|
||||
@@ -31,8 +28,7 @@ process.on('exit', () => {
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
function cacheResponse (key, req, res) {
|
||||
function cacheResponse(key, req, res) {
|
||||
const responseStateListener = new EventEmitter();
|
||||
IDEMPOTENT_RESPONSES.set(key, {
|
||||
method: req.method,
|
||||
@@ -72,8 +68,10 @@ function cacheResponse (key, req, res) {
|
||||
}
|
||||
|
||||
if (!IDEMPOTENT_RESPONSES.has(key)) {
|
||||
log.info(`Could not cache the response identified by '${key}'. ` +
|
||||
`Cache consistency has been damaged`);
|
||||
log.info(
|
||||
`Could not cache the response identified by '${key}'. ` +
|
||||
`Cache consistency has been damaged`
|
||||
);
|
||||
return responseStateListener.emit('ready', null);
|
||||
}
|
||||
if (writeError) {
|
||||
@@ -82,8 +80,10 @@ function cacheResponse (key, req, res) {
|
||||
return responseStateListener.emit('ready', null);
|
||||
}
|
||||
if (!isResponseFullySent) {
|
||||
log.info(`Could not cache the response identified by '${key}', ` +
|
||||
`because it has not been completed`);
|
||||
log.info(
|
||||
`Could not cache the response identified by '${key}', ` +
|
||||
`because it has not been completed`
|
||||
);
|
||||
log.info('Does the client terminate connections too early?');
|
||||
IDEMPOTENT_RESPONSES.delete(key);
|
||||
return responseStateListener.emit('ready', null);
|
||||
@@ -94,7 +94,7 @@ function cacheResponse (key, req, res) {
|
||||
});
|
||||
}
|
||||
|
||||
async function handleIdempotency (req, res, next) {
|
||||
async function handleIdempotency(req, res, next) {
|
||||
const key = req.headers[IDEMPOTENCY_KEY_HEADER];
|
||||
if (!key) {
|
||||
return next();
|
||||
@@ -124,7 +124,7 @@ async function handleIdempotency (req, res, next) {
|
||||
}
|
||||
|
||||
const rerouteCachedResponse = async (cachedResPath) => {
|
||||
if (!await fs.exists(cachedResPath)) {
|
||||
if (!(await fs.exists(cachedResPath))) {
|
||||
IDEMPOTENT_RESPONSES.delete(key);
|
||||
log.warn(`Could not read the cached response identified by key '${key}'`);
|
||||
log.warn('The temporary storage is not accessible anymore');
|
||||
@@ -149,4 +149,4 @@ async function handleIdempotency (req, res, next) {
|
||||
}
|
||||
}
|
||||
|
||||
export { handleIdempotency };
|
||||
export {handleIdempotency};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { logger } from '@appium/support';
|
||||
import {logger} from '@appium/support';
|
||||
|
||||
const log = logger.getLogger('HTTP');
|
||||
export default log;
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import _ from 'lodash';
|
||||
import log from './logger';
|
||||
import { errors } from '../protocol';
|
||||
import { handleIdempotency } from './idempotency';
|
||||
import {errors} from '../protocol';
|
||||
import {handleIdempotency} from './idempotency';
|
||||
|
||||
function allowCrossDomain (req, res, next) {
|
||||
function allowCrossDomain(req, res, next) {
|
||||
try {
|
||||
res.header('Access-Control-Allow-Origin', '*');
|
||||
res.header('Access-Control-Allow-Methods', 'GET, POST, PUT, OPTIONS, DELETE');
|
||||
res.header('Access-Control-Allow-Headers', 'Cache-Control, Pragma, Origin, X-Requested-With, Content-Type, Accept, User-Agent');
|
||||
res.header(
|
||||
'Access-Control-Allow-Headers',
|
||||
'Cache-Control, Pragma, Origin, X-Requested-With, Content-Type, Accept, User-Agent'
|
||||
);
|
||||
|
||||
// need to respond 200 to OPTIONS
|
||||
if ('OPTIONS' === req.method) {
|
||||
@@ -19,11 +22,13 @@ function allowCrossDomain (req, res, next) {
|
||||
next();
|
||||
}
|
||||
|
||||
function allowCrossDomainAsyncExecute (basePath) {
|
||||
function allowCrossDomainAsyncExecute(basePath) {
|
||||
return (req, res, next) => {
|
||||
// there are two paths for async responses, so cover both
|
||||
// https://regex101.com/r/txYiEz/1
|
||||
const receiveAsyncResponseRegExp = new RegExp(`${_.escapeRegExp(basePath)}/session/[a-f0-9-]+/(appium/)?receive_async_response`);
|
||||
const receiveAsyncResponseRegExp = new RegExp(
|
||||
`${_.escapeRegExp(basePath)}/session/[a-f0-9-]+/(appium/)?receive_async_response`
|
||||
);
|
||||
if (!receiveAsyncResponseRegExp.test(req.url)) {
|
||||
return next();
|
||||
}
|
||||
@@ -31,10 +36,13 @@ function allowCrossDomainAsyncExecute (basePath) {
|
||||
};
|
||||
}
|
||||
|
||||
function fixPythonContentType (basePath) {
|
||||
function fixPythonContentType(basePath) {
|
||||
return (req, res, next) => {
|
||||
// hack because python client library gives us wrong content-type
|
||||
if (new RegExp(`^${_.escapeRegExp(basePath)}`).test(req.path) && /^Python/.test(req.headers['user-agent'])) {
|
||||
if (
|
||||
new RegExp(`^${_.escapeRegExp(basePath)}`).test(req.path) &&
|
||||
/^Python/.test(req.headers['user-agent'])
|
||||
) {
|
||||
if (req.headers['content-type'] === 'application/x-www-form-urlencoded') {
|
||||
req.headers['content-type'] = 'application/json; charset=utf-8';
|
||||
}
|
||||
@@ -43,14 +51,14 @@ function fixPythonContentType (basePath) {
|
||||
};
|
||||
}
|
||||
|
||||
function defaultToJSONContentType (req, res, next) {
|
||||
function defaultToJSONContentType(req, res, next) {
|
||||
if (!req.headers['content-type']) {
|
||||
req.headers['content-type'] = 'application/json; charset=utf-8';
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
function catchAllHandler (err, req, res, next) {
|
||||
function catchAllHandler(err, req, res, next) {
|
||||
if (res.headersSent) {
|
||||
return next(err);
|
||||
}
|
||||
@@ -58,36 +66,40 @@ function catchAllHandler (err, req, res, next) {
|
||||
log.error(`Uncaught error: ${err.message}`);
|
||||
log.error('Sending generic error response');
|
||||
const error = errors.UnknownError;
|
||||
res.status(error.w3cStatus()).json(patchWithSessionId(req, {
|
||||
status: error.code(),
|
||||
value: {
|
||||
error: error.error(),
|
||||
message: `An unknown server-side error occurred while processing the command: ${err.message}`,
|
||||
stacktrace: err.stack,
|
||||
}
|
||||
}));
|
||||
res.status(error.w3cStatus()).json(
|
||||
patchWithSessionId(req, {
|
||||
status: error.code(),
|
||||
value: {
|
||||
error: error.error(),
|
||||
message: `An unknown server-side error occurred while processing the command: ${err.message}`,
|
||||
stacktrace: err.stack,
|
||||
},
|
||||
})
|
||||
);
|
||||
log.error(err);
|
||||
}
|
||||
|
||||
function catch404Handler (req, res) {
|
||||
function catch404Handler(req, res) {
|
||||
log.debug(`No route found for ${req.url}`);
|
||||
const error = errors.UnknownCommandError;
|
||||
res.status(error.w3cStatus()).json(patchWithSessionId(req, {
|
||||
status: error.code(),
|
||||
value: {
|
||||
error: error.error(),
|
||||
message: 'The requested resource could not be found, or a request was ' +
|
||||
'received using an HTTP method that is not supported by the mapped ' +
|
||||
'resource',
|
||||
stacktrace: '',
|
||||
}
|
||||
}));
|
||||
res.status(error.w3cStatus()).json(
|
||||
patchWithSessionId(req, {
|
||||
status: error.code(),
|
||||
value: {
|
||||
error: error.error(),
|
||||
message:
|
||||
'The requested resource could not be found, or a request was ' +
|
||||
'received using an HTTP method that is not supported by the mapped ' +
|
||||
'resource',
|
||||
stacktrace: '',
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
const SESSION_ID_PATTERN = /\/session\/([^/]+)/;
|
||||
|
||||
function patchWithSessionId (req, body) {
|
||||
function patchWithSessionId(req, body) {
|
||||
const match = SESSION_ID_PATTERN.exec(req.url);
|
||||
if (match) {
|
||||
body.sessionId = match[1];
|
||||
@@ -96,7 +108,11 @@ function patchWithSessionId (req, body) {
|
||||
}
|
||||
|
||||
export {
|
||||
allowCrossDomain, fixPythonContentType, defaultToJSONContentType,
|
||||
catchAllHandler, allowCrossDomainAsyncExecute, handleIdempotency,
|
||||
allowCrossDomain,
|
||||
fixPythonContentType,
|
||||
defaultToJSONContentType,
|
||||
catchAllHandler,
|
||||
allowCrossDomainAsyncExecute,
|
||||
handleIdempotency,
|
||||
catch404Handler,
|
||||
};
|
||||
|
||||
@@ -6,27 +6,31 @@ import favicon from 'serve-favicon';
|
||||
import bodyParser from 'body-parser';
|
||||
import methodOverride from 'method-override';
|
||||
import log from './logger';
|
||||
import { startLogFormatter, endLogFormatter } from './express-logging';
|
||||
import {startLogFormatter, endLogFormatter} from './express-logging';
|
||||
import {
|
||||
allowCrossDomain, fixPythonContentType, defaultToJSONContentType,
|
||||
catchAllHandler, allowCrossDomainAsyncExecute, handleIdempotency,
|
||||
allowCrossDomain,
|
||||
fixPythonContentType,
|
||||
defaultToJSONContentType,
|
||||
catchAllHandler,
|
||||
allowCrossDomainAsyncExecute,
|
||||
handleIdempotency,
|
||||
catch404Handler,
|
||||
} from './middleware';
|
||||
import { guineaPig, guineaPigScrollable, guineaPigAppBanner, welcome, STATIC_DIR } from './static';
|
||||
import { produceError, produceCrash } from './crash';
|
||||
import {guineaPig, guineaPigScrollable, guineaPigAppBanner, welcome, STATIC_DIR} from './static';
|
||||
import {produceError, produceCrash} from './crash';
|
||||
import {
|
||||
addWebSocketHandler, removeWebSocketHandler, removeAllWebSocketHandlers,
|
||||
getWebSocketHandlers
|
||||
addWebSocketHandler,
|
||||
removeWebSocketHandler,
|
||||
removeAllWebSocketHandlers,
|
||||
getWebSocketHandlers,
|
||||
} from './websocket';
|
||||
import B from 'bluebird';
|
||||
import { DEFAULT_BASE_PATH } from '../constants';
|
||||
import { EventEmitter } from 'events';
|
||||
|
||||
import {DEFAULT_BASE_PATH} from '../constants';
|
||||
import {EventEmitter} from 'events';
|
||||
|
||||
const KEEP_ALIVE_TIMEOUT_MS = 10 * 60 * 1000; // 10 minutes
|
||||
|
||||
|
||||
async function server (opts = {}) {
|
||||
async function server(opts = {}) {
|
||||
const {
|
||||
routeConfiguringFunction,
|
||||
port,
|
||||
@@ -49,7 +53,13 @@ async function server (opts = {}) {
|
||||
// try/catch so any errors can be passed to reject.
|
||||
try {
|
||||
configureHttp({httpServer, reject, keepAliveTimeout});
|
||||
configureServer({app, addRoutes: routeConfiguringFunction, allowCors, basePath, extraMethodMap});
|
||||
configureServer({
|
||||
app,
|
||||
addRoutes: routeConfiguringFunction,
|
||||
allowCors,
|
||||
basePath,
|
||||
extraMethodMap,
|
||||
});
|
||||
// allow extensions to update the app and http server objects
|
||||
for (const updater of serverUpdaters) {
|
||||
await updater(app, httpServer);
|
||||
@@ -66,10 +76,9 @@ async function server (opts = {}) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
function configureServer ({
|
||||
function configureServer({
|
||||
app,
|
||||
addRoutes,
|
||||
allowCors = true,
|
||||
@@ -116,7 +125,7 @@ function configureServer ({
|
||||
app.all('/test/guinea-pig-app-banner', guineaPigAppBanner);
|
||||
}
|
||||
|
||||
function configureHttp ({httpServer, reject, keepAliveTimeout}) {
|
||||
function configureHttp({httpServer, reject, keepAliveTimeout}) {
|
||||
const serverState = {
|
||||
notifier: new EventEmitter(),
|
||||
closed: false,
|
||||
@@ -129,28 +138,32 @@ function configureHttp ({httpServer, reject, keepAliveTimeout}) {
|
||||
// http.Server.close() only stops new connections, but we need to wait until
|
||||
// all connections are closed and the `close` event is emitted
|
||||
const close = httpServer.close.bind(httpServer);
|
||||
httpServer.close = async () => await new B((resolve, reject) => {
|
||||
// https://github.com/nodejs/node-v0.x-archive/issues/9066#issuecomment-124210576
|
||||
serverState.closed = true;
|
||||
serverState.notifier.emit('shutdown');
|
||||
log.info('Waiting until the server is closed');
|
||||
httpServer.on('close', () => {
|
||||
log.info('Received server close event');
|
||||
resolve();
|
||||
httpServer.close = async () =>
|
||||
await new B((resolve, reject) => {
|
||||
// https://github.com/nodejs/node-v0.x-archive/issues/9066#issuecomment-124210576
|
||||
serverState.closed = true;
|
||||
serverState.notifier.emit('shutdown');
|
||||
log.info('Waiting until the server is closed');
|
||||
httpServer.on('close', () => {
|
||||
log.info('Received server close event');
|
||||
resolve();
|
||||
});
|
||||
close((err) => {
|
||||
if (err) reject(err); // eslint-disable-line curly
|
||||
});
|
||||
});
|
||||
close((err) => {
|
||||
if (err) reject(err); // eslint-disable-line curly
|
||||
});
|
||||
});
|
||||
|
||||
httpServer.on('error', (err) => {
|
||||
if (err.code === 'EADDRNOTAVAIL') {
|
||||
log.error('Could not start REST http interface listener. ' +
|
||||
'Requested address is not available.');
|
||||
log.error(
|
||||
'Could not start REST http interface listener. ' + 'Requested address is not available.'
|
||||
);
|
||||
} else {
|
||||
log.error('Could not start REST http interface listener. The requested ' +
|
||||
'port may already be in use. Please make sure there is no ' +
|
||||
'other instance of this server running already.');
|
||||
log.error(
|
||||
'Could not start REST http interface listener. The requested ' +
|
||||
'port may already be in use. Please make sure there is no ' +
|
||||
'other instance of this server running already.'
|
||||
);
|
||||
}
|
||||
reject(err);
|
||||
});
|
||||
@@ -159,7 +172,7 @@ function configureHttp ({httpServer, reject, keepAliveTimeout}) {
|
||||
socket.setTimeout(keepAliveTimeout);
|
||||
socket.on('error', reject);
|
||||
|
||||
function destroy () {
|
||||
function destroy() {
|
||||
socket.destroy();
|
||||
}
|
||||
socket._openReqCount = 0;
|
||||
@@ -179,7 +192,7 @@ function configureHttp ({httpServer, reject, keepAliveTimeout}) {
|
||||
});
|
||||
}
|
||||
|
||||
async function startServer ({httpServer, port, hostname, keepAliveTimeout}) {
|
||||
async function startServer({httpServer, port, hostname, keepAliveTimeout}) {
|
||||
const serverArgs = [port];
|
||||
if (hostname) {
|
||||
// If the hostname is omitted, the server will accept
|
||||
@@ -193,7 +206,7 @@ async function startServer ({httpServer, port, hostname, keepAliveTimeout}) {
|
||||
await startPromise;
|
||||
}
|
||||
|
||||
function normalizeBasePath (basePath) {
|
||||
function normalizeBasePath(basePath) {
|
||||
if (!_.isString(basePath)) {
|
||||
throw new Error(`Invalid path prefix ${basePath}`);
|
||||
}
|
||||
@@ -211,4 +224,4 @@ function normalizeBasePath (basePath) {
|
||||
return basePath;
|
||||
}
|
||||
|
||||
export { server, configureServer, normalizeBasePath };
|
||||
export {server, configureServer, normalizeBasePath};
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import path from 'path';
|
||||
import log from './logger';
|
||||
import _ from 'lodash';
|
||||
import { fs } from '@appium/support';
|
||||
import {fs} from '@appium/support';
|
||||
import B from 'bluebird';
|
||||
|
||||
|
||||
let STATIC_DIR = path.resolve(__dirname, '..', '..', '..', 'static');
|
||||
if (_.isNull(path.resolve(__dirname).match(/build[/\\]lib[/\\]express$/))) {
|
||||
// in some contexts we are not in the build directory,
|
||||
@@ -12,14 +11,14 @@ if (_.isNull(path.resolve(__dirname).match(/build[/\\]lib[/\\]express$/))) {
|
||||
STATIC_DIR = path.resolve(__dirname, '..', '..', 'static');
|
||||
}
|
||||
|
||||
async function guineaPigTemplate (req, res, page) {
|
||||
async function guineaPigTemplate(req, res, page) {
|
||||
const delay = parseInt(req.params.delay || req.query.delay || 0, 10);
|
||||
const throwError = req.params.throwError || req.query.throwError || '';
|
||||
let params = {
|
||||
throwError,
|
||||
serverTime: new Date(),
|
||||
userAgent: req.headers['user-agent'],
|
||||
comment: 'None'
|
||||
comment: 'None',
|
||||
};
|
||||
if (req.method === 'POST') {
|
||||
params.comment = req.body.comments || params.comment;
|
||||
@@ -34,7 +33,7 @@ async function guineaPigTemplate (req, res, page) {
|
||||
res.cookie('guineacookie2', 'cookié2', {path: '/'});
|
||||
res.cookie('guineacookie3', 'cant access this', {
|
||||
domain: '.blargimarg.com',
|
||||
path: '/'
|
||||
path: '/',
|
||||
});
|
||||
res.send((await getTemplate(page))(params));
|
||||
}
|
||||
@@ -42,36 +41,36 @@ async function guineaPigTemplate (req, res, page) {
|
||||
/*
|
||||
* Dynamic page mapped to /test/guinea-pig
|
||||
*/
|
||||
async function guineaPig (req, res) {
|
||||
async function guineaPig(req, res) {
|
||||
return await guineaPigTemplate(req, res, 'guinea-pig.html');
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic page mapped to /test/guinea-pig-scrollable
|
||||
*/
|
||||
async function guineaPigScrollable (req, res) {
|
||||
async function guineaPigScrollable(req, res) {
|
||||
return await guineaPigTemplate(req, res, 'guinea-pig-scrollable.html');
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic page mapped to /test/guinea-pig-app-banner
|
||||
*/
|
||||
async function guineaPigAppBanner (req, res) {
|
||||
async function guineaPigAppBanner(req, res) {
|
||||
return await guineaPigTemplate(req, res, 'guinea-pig-app-banner.html');
|
||||
}
|
||||
|
||||
/*
|
||||
* Dynamic page mapped to /welcome
|
||||
*/
|
||||
async function welcome (req, res) {
|
||||
let params = {message: 'Let\'s browse!'};
|
||||
async function welcome(req, res) {
|
||||
let params = {message: "Let's browse!"};
|
||||
log.debug(`Sending welcome response with params: ${JSON.stringify(params)}`);
|
||||
res.send((await getTemplate('welcome.html'))(params));
|
||||
}
|
||||
|
||||
async function getTemplate (templateName) {
|
||||
async function getTemplate(templateName) {
|
||||
let content = await fs.readFile(path.resolve(STATIC_DIR, 'test', templateName));
|
||||
return _.template(content.toString());
|
||||
}
|
||||
|
||||
export { guineaPig, guineaPigScrollable, guineaPigAppBanner, welcome, STATIC_DIR };
|
||||
export {guineaPig, guineaPigScrollable, guineaPigAppBanner, welcome, STATIC_DIR};
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import _ from 'lodash';
|
||||
import { URL } from 'url';
|
||||
import {URL} from 'url';
|
||||
import B from 'bluebird';
|
||||
|
||||
const DEFAULT_WS_PATHNAME_PREFIX = '/ws';
|
||||
|
||||
|
||||
/**
|
||||
* Adds websocket handler to express server instance.
|
||||
* It is expected this function is called in Express
|
||||
@@ -18,14 +17,15 @@ const DEFAULT_WS_PATHNAME_PREFIX = '/ws';
|
||||
* https://github.com/websockets/ws/pull/885 for more details
|
||||
* on how to configure the handler properly.
|
||||
*/
|
||||
async function addWebSocketHandler (handlerPathname, handlerServer) { // eslint-disable-line require-await
|
||||
// eslint-disable-next-line require-await
|
||||
async function addWebSocketHandler(handlerPathname, handlerServer) {
|
||||
if (_.isUndefined(this.webSocketsMapping)) {
|
||||
this.webSocketsMapping = {};
|
||||
// https://github.com/websockets/ws/pull/885
|
||||
this.on('upgrade', (request, socket, head) => {
|
||||
let currentPathname;
|
||||
try {
|
||||
currentPathname = (new URL(request.url)).pathname;
|
||||
currentPathname = new URL(request.url).pathname;
|
||||
} catch (ign) {
|
||||
currentPathname = request.url;
|
||||
}
|
||||
@@ -54,7 +54,8 @@ async function addWebSocketHandler (handlerPathname, handlerServer) { // eslint-
|
||||
* @returns {Object} pathnames to websocket server isntances mapping
|
||||
* matching the search criteria or an empty object otherwise.
|
||||
*/
|
||||
async function getWebSocketHandlers (keysFilter = null) { // eslint-disable-line require-await
|
||||
// eslint-disable-next-line require-await
|
||||
async function getWebSocketHandlers(keysFilter = null) {
|
||||
if (_.isEmpty(this.webSocketsMapping)) {
|
||||
return {};
|
||||
}
|
||||
@@ -77,7 +78,8 @@ async function getWebSocketHandlers (keysFilter = null) { // eslint-disable-line
|
||||
* @param {string} handlerPathname - Websocket endpoint path.
|
||||
* @returns {boolean} true if the handlerPathname was found and deleted
|
||||
*/
|
||||
async function removeWebSocketHandler (handlerPathname) { // eslint-disable-line require-await
|
||||
// eslint-disable-next-line require-await
|
||||
async function removeWebSocketHandler(handlerPathname) {
|
||||
const wsServer = this.webSocketsMapping?.[handlerPathname];
|
||||
if (!wsServer) {
|
||||
return false;
|
||||
@@ -85,7 +87,7 @@ async function removeWebSocketHandler (handlerPathname) { // eslint-disable-line
|
||||
|
||||
try {
|
||||
wsServer.close();
|
||||
for (const client of (wsServer.clients || [])) {
|
||||
for (const client of wsServer.clients || []) {
|
||||
client.terminate();
|
||||
}
|
||||
return true;
|
||||
@@ -104,7 +106,7 @@ async function removeWebSocketHandler (handlerPathname) { // eslint-disable-line
|
||||
*
|
||||
* @returns {boolean} true if at least one handler has been deleted
|
||||
*/
|
||||
async function removeAllWebSocketHandlers () {
|
||||
async function removeAllWebSocketHandlers() {
|
||||
if (_.isEmpty(this.webSocketsMapping)) {
|
||||
return false;
|
||||
}
|
||||
@@ -117,6 +119,9 @@ async function removeAllWebSocketHandlers () {
|
||||
}
|
||||
|
||||
export {
|
||||
addWebSocketHandler, removeWebSocketHandler, removeAllWebSocketHandlers,
|
||||
getWebSocketHandlers, DEFAULT_WS_PATHNAME_PREFIX,
|
||||
addWebSocketHandler,
|
||||
removeWebSocketHandler,
|
||||
removeAllWebSocketHandlers,
|
||||
getWebSocketHandlers,
|
||||
DEFAULT_WS_PATHNAME_PREFIX,
|
||||
};
|
||||
|
||||
@@ -2,13 +2,15 @@
|
||||
|
||||
import _ from 'lodash';
|
||||
|
||||
function isW3cCaps (caps) {
|
||||
function isW3cCaps(caps) {
|
||||
if (!_.isPlainObject(caps)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const isFirstMatchValid = () => _.isArray(caps.firstMatch)
|
||||
&& !_.isEmpty(caps.firstMatch) && _.every(caps.firstMatch, _.isPlainObject);
|
||||
const isFirstMatchValid = () =>
|
||||
_.isArray(caps.firstMatch) &&
|
||||
!_.isEmpty(caps.firstMatch) &&
|
||||
_.every(caps.firstMatch, _.isPlainObject);
|
||||
const isAlwaysMatchValid = () => _.isPlainObject(caps.alwaysMatch);
|
||||
if (_.has(caps, 'firstMatch') && _.has(caps, 'alwaysMatch')) {
|
||||
return isFirstMatchValid() && isAlwaysMatchValid();
|
||||
@@ -29,21 +31,19 @@ function isW3cCaps (caps) {
|
||||
* @param {AppiumLogger} log
|
||||
* @returns {Capabilities}
|
||||
*/
|
||||
function fixCaps (originalCaps, desiredCapConstraints, log) {
|
||||
function fixCaps(originalCaps, desiredCapConstraints, log) {
|
||||
let caps = _.clone(originalCaps);
|
||||
|
||||
// boolean capabilities can be passed in as strings 'false' and 'true'
|
||||
// which we want to translate into boolean values
|
||||
let booleanCaps = _.keys(
|
||||
_.pickBy(desiredCapConstraints, (k) => k.isBoolean === true),
|
||||
);
|
||||
let booleanCaps = _.keys(_.pickBy(desiredCapConstraints, (k) => k.isBoolean === true));
|
||||
for (let cap of booleanCaps) {
|
||||
let value = originalCaps[cap];
|
||||
if (_.isString(value)) {
|
||||
value = value.toLowerCase();
|
||||
if (value === 'true' || value === 'false') {
|
||||
log.warn(
|
||||
`Capability '${cap}' changed from string to boolean. This may cause unexpected behavior`,
|
||||
`Capability '${cap}' changed from string to boolean. This may cause unexpected behavior`
|
||||
);
|
||||
caps[cap] = value === 'true';
|
||||
}
|
||||
@@ -51,9 +51,7 @@ function fixCaps (originalCaps, desiredCapConstraints, log) {
|
||||
}
|
||||
|
||||
// int capabilities are often sent in as strings by frameworks
|
||||
let intCaps = _.keys(
|
||||
_.pickBy(desiredCapConstraints, (k) => k.isNumber === true),
|
||||
);
|
||||
let intCaps = _.keys(_.pickBy(desiredCapConstraints, (k) => k.isNumber === true));
|
||||
for (let cap of intCaps) {
|
||||
let value = originalCaps[cap];
|
||||
if (_.isString(value)) {
|
||||
@@ -63,7 +61,7 @@ function fixCaps (originalCaps, desiredCapConstraints, log) {
|
||||
newValue = parseFloat(value);
|
||||
}
|
||||
log.warn(
|
||||
`Capability '${cap}' changed from string ('${value}') to integer (${newValue}). This may cause unexpected behavior`,
|
||||
`Capability '${cap}' changed from string ('${value}') to integer (${newValue}). This may cause unexpected behavior`
|
||||
);
|
||||
caps[cap] = newValue;
|
||||
}
|
||||
@@ -72,10 +70,7 @@ function fixCaps (originalCaps, desiredCapConstraints, log) {
|
||||
return caps;
|
||||
}
|
||||
|
||||
export {
|
||||
isW3cCaps,
|
||||
fixCaps
|
||||
};
|
||||
export {isW3cCaps, fixCaps};
|
||||
|
||||
/**
|
||||
* @typedef {import('@appium/types').Capabilities} Capabilities
|
||||
|
||||
@@ -7,62 +7,79 @@ B.config({
|
||||
});
|
||||
|
||||
// BaseDriver exports
|
||||
import { BaseDriver } from './basedriver/driver';
|
||||
export { DriverCore } from './basedriver/core';
|
||||
import { DeviceSettings } from './basedriver/device-settings';
|
||||
import {BaseDriver} from './basedriver/driver';
|
||||
export {DriverCore} from './basedriver/core';
|
||||
import {DeviceSettings} from './basedriver/device-settings';
|
||||
|
||||
export { BaseDriver, DeviceSettings };
|
||||
export {BaseDriver, DeviceSettings};
|
||||
export default BaseDriver;
|
||||
|
||||
|
||||
// MJSONWP exports
|
||||
import * as protocol from './protocol';
|
||||
import {
|
||||
DEFAULT_BASE_PATH, PROTOCOLS
|
||||
} from './constants';
|
||||
import {DEFAULT_BASE_PATH, PROTOCOLS} from './constants';
|
||||
|
||||
const {
|
||||
routeConfiguringFunction, errors, isErrorType,
|
||||
errorFromMJSONWPStatusCode, errorFromW3CJsonCode, ALL_COMMANDS, METHOD_MAP,
|
||||
routeToCommandName, NO_SESSION_ID_COMMANDS, isSessionCommand,
|
||||
determineProtocol, CREATE_SESSION_COMMAND,
|
||||
DELETE_SESSION_COMMAND, GET_STATUS_COMMAND,
|
||||
routeConfiguringFunction,
|
||||
errors,
|
||||
isErrorType,
|
||||
errorFromMJSONWPStatusCode,
|
||||
errorFromW3CJsonCode,
|
||||
ALL_COMMANDS,
|
||||
METHOD_MAP,
|
||||
routeToCommandName,
|
||||
NO_SESSION_ID_COMMANDS,
|
||||
isSessionCommand,
|
||||
determineProtocol,
|
||||
CREATE_SESSION_COMMAND,
|
||||
DELETE_SESSION_COMMAND,
|
||||
GET_STATUS_COMMAND,
|
||||
} = protocol;
|
||||
|
||||
export {
|
||||
routeConfiguringFunction, errors, isErrorType, PROTOCOLS,
|
||||
errorFromMJSONWPStatusCode, errorFromW3CJsonCode, determineProtocol,
|
||||
errorFromMJSONWPStatusCode as errorFromCode, ALL_COMMANDS, METHOD_MAP,
|
||||
routeToCommandName, NO_SESSION_ID_COMMANDS, isSessionCommand,
|
||||
DEFAULT_BASE_PATH, CREATE_SESSION_COMMAND,
|
||||
DELETE_SESSION_COMMAND, GET_STATUS_COMMAND,
|
||||
routeConfiguringFunction,
|
||||
errors,
|
||||
isErrorType,
|
||||
PROTOCOLS,
|
||||
errorFromMJSONWPStatusCode,
|
||||
errorFromW3CJsonCode,
|
||||
determineProtocol,
|
||||
errorFromMJSONWPStatusCode as errorFromCode,
|
||||
ALL_COMMANDS,
|
||||
METHOD_MAP,
|
||||
routeToCommandName,
|
||||
NO_SESSION_ID_COMMANDS,
|
||||
isSessionCommand,
|
||||
DEFAULT_BASE_PATH,
|
||||
CREATE_SESSION_COMMAND,
|
||||
DELETE_SESSION_COMMAND,
|
||||
GET_STATUS_COMMAND,
|
||||
};
|
||||
|
||||
// Express exports
|
||||
import * as staticIndex from './express/static';
|
||||
const { STATIC_DIR } = staticIndex;
|
||||
export { STATIC_DIR };
|
||||
const {STATIC_DIR} = staticIndex;
|
||||
export {STATIC_DIR};
|
||||
|
||||
import * as serverIndex from './express/server';
|
||||
const { server, normalizeBasePath } = serverIndex;
|
||||
export { server, normalizeBasePath };
|
||||
const {server, normalizeBasePath} = serverIndex;
|
||||
export {server, normalizeBasePath};
|
||||
|
||||
// jsonwp-proxy exports
|
||||
import * as proxyIndex from './jsonwp-proxy/proxy';
|
||||
const { JWProxy } = proxyIndex;
|
||||
export { JWProxy };
|
||||
const {JWProxy} = proxyIndex;
|
||||
export {JWProxy};
|
||||
|
||||
// jsonwp-status exports
|
||||
import * as statusIndex from './jsonwp-status/status';
|
||||
const { codes: statusCodes, getSummaryByCode } = statusIndex;
|
||||
export { statusCodes, getSummaryByCode };
|
||||
const {codes: statusCodes, getSummaryByCode} = statusIndex;
|
||||
export {statusCodes, getSummaryByCode};
|
||||
|
||||
// W3C capabilities parser
|
||||
import * as caps from './basedriver/capabilities';
|
||||
const { processCapabilities, isStandardCap, validateCaps } = caps;
|
||||
export { processCapabilities, isStandardCap, validateCaps };
|
||||
const {processCapabilities, isStandardCap, validateCaps} = caps;
|
||||
export {processCapabilities, isStandardCap, validateCaps};
|
||||
|
||||
// Web socket helpers
|
||||
import * as ws from './express/websocket';
|
||||
const { DEFAULT_WS_PATHNAME_PREFIX } = ws;
|
||||
export { DEFAULT_WS_PATHNAME_PREFIX };
|
||||
const {DEFAULT_WS_PATHNAME_PREFIX} = ws;
|
||||
export {DEFAULT_WS_PATHNAME_PREFIX};
|
||||
|
||||
@@ -1,33 +1,29 @@
|
||||
import _ from 'lodash';
|
||||
import { logger, util } from '@appium/support';
|
||||
import { duplicateKeys } from '../basedriver/helpers';
|
||||
import {
|
||||
MJSONWP_ELEMENT_KEY, W3C_ELEMENT_KEY, PROTOCOLS
|
||||
} from '../constants';
|
||||
import {logger, util} from '@appium/support';
|
||||
import {duplicateKeys} from '../basedriver/helpers';
|
||||
import {MJSONWP_ELEMENT_KEY, W3C_ELEMENT_KEY, PROTOCOLS} from '../constants';
|
||||
|
||||
export const COMMAND_URLS_CONFLICTS = [
|
||||
{
|
||||
commandNames: ['execute', 'executeAsync'],
|
||||
jsonwpConverter: (url) => url.replace(/\/execute.*/,
|
||||
url.includes('async') ? '/execute_async' : '/execute'),
|
||||
w3cConverter: (url) => url.replace(/\/execute.*/,
|
||||
url.includes('async') ? '/execute/async' : '/execute/sync'),
|
||||
jsonwpConverter: (url) =>
|
||||
url.replace(/\/execute.*/, url.includes('async') ? '/execute_async' : '/execute'),
|
||||
w3cConverter: (url) =>
|
||||
url.replace(/\/execute.*/, url.includes('async') ? '/execute/async' : '/execute/sync'),
|
||||
},
|
||||
{
|
||||
commandNames: ['getElementScreenshot'],
|
||||
jsonwpConverter: (url) => url.replace(/\/element\/([^/]+)\/screenshot$/,
|
||||
'/screenshot/$1'),
|
||||
w3cConverter: (url) => url.replace(/\/screenshot\/([^/]+)/,
|
||||
'/element/$1/screenshot'),
|
||||
jsonwpConverter: (url) => url.replace(/\/element\/([^/]+)\/screenshot$/, '/screenshot/$1'),
|
||||
w3cConverter: (url) => url.replace(/\/screenshot\/([^/]+)/, '/element/$1/screenshot'),
|
||||
},
|
||||
{
|
||||
commandNames: ['getWindowHandles', 'getWindowHandle'],
|
||||
jsonwpConverter (url) {
|
||||
jsonwpConverter(url) {
|
||||
return /\/window$/.test(url)
|
||||
? url.replace(/\/window$/, '/window_handle')
|
||||
: url.replace(/\/window\/handle(s?)$/, '/window_handle$1');
|
||||
},
|
||||
w3cConverter (url) {
|
||||
w3cConverter(url) {
|
||||
return /\/window_handle$/.test(url)
|
||||
? url.replace(/\/window_handle$/, '/window')
|
||||
: url.replace(/\/window_handles$/, '/window/handles');
|
||||
@@ -40,29 +36,28 @@ export const COMMAND_URLS_CONFLICTS = [
|
||||
const jsonwpUrl = w3cUrl.replace(w3cPropertyRegex, '/element/$1/attribute/$2');
|
||||
return jsonwpUrl;
|
||||
},
|
||||
w3cConverter: (jsonwpUrl) => jsonwpUrl // Don't convert JSONWP URL to W3C. W3C accepts /attribute and /property
|
||||
}
|
||||
w3cConverter: (jsonwpUrl) => jsonwpUrl, // Don't convert JSONWP URL to W3C. W3C accepts /attribute and /property
|
||||
},
|
||||
];
|
||||
const {MJSONWP, W3C} = PROTOCOLS;
|
||||
const DEFAULT_LOG = logger.getLogger('Protocol Converter');
|
||||
|
||||
|
||||
class ProtocolConverter {
|
||||
constructor (proxyFunc, log = null) {
|
||||
constructor(proxyFunc, log = null) {
|
||||
this.proxyFunc = proxyFunc;
|
||||
this._downstreamProtocol = null;
|
||||
this._log = log;
|
||||
}
|
||||
|
||||
get log () {
|
||||
get log() {
|
||||
return this._log ?? DEFAULT_LOG;
|
||||
}
|
||||
|
||||
set downstreamProtocol (value) {
|
||||
set downstreamProtocol(value) {
|
||||
this._downstreamProtocol = value;
|
||||
}
|
||||
|
||||
get downstreamProtocol () {
|
||||
get downstreamProtocol() {
|
||||
return this._downstreamProtocol;
|
||||
}
|
||||
|
||||
@@ -74,25 +69,29 @@ class ProtocolConverter {
|
||||
* @param {Object} body Request body
|
||||
* @return {Array} Array of W3C + MJSONWP compatible timeout objects
|
||||
*/
|
||||
getTimeoutRequestObjects (body) {
|
||||
getTimeoutRequestObjects(body) {
|
||||
if (this.downstreamProtocol === W3C && _.has(body, 'ms') && _.has(body, 'type')) {
|
||||
const typeToW3C = (x) => x === 'page load' ? 'pageLoad' : x;
|
||||
return [{
|
||||
[typeToW3C(body.type)]: body.ms,
|
||||
}];
|
||||
const typeToW3C = (x) => (x === 'page load' ? 'pageLoad' : x);
|
||||
return [
|
||||
{
|
||||
[typeToW3C(body.type)]: body.ms,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
if (this.downstreamProtocol === MJSONWP && (!_.has(body, 'ms') || !_.has(body, 'type'))) {
|
||||
const typeToJSONWP = (x) => x === 'pageLoad' ? 'page load' : x;
|
||||
return _.toPairs(body)
|
||||
// Only transform the entry if ms value is a valid positive float number
|
||||
.filter((pair) => /^\d+(?:[.,]\d*?)?$/.test(`${pair[1]}`))
|
||||
.map(function (pair) {
|
||||
return {
|
||||
type: typeToJSONWP(pair[0]),
|
||||
ms: pair[1],
|
||||
};
|
||||
});
|
||||
const typeToJSONWP = (x) => (x === 'pageLoad' ? 'page load' : x);
|
||||
return (
|
||||
_.toPairs(body)
|
||||
// Only transform the entry if ms value is a valid positive float number
|
||||
.filter((pair) => /^\d+(?:[.,]\d*?)?$/.test(`${pair[1]}`))
|
||||
.map(function (pair) {
|
||||
return {
|
||||
type: typeToJSONWP(pair[0]),
|
||||
ms: pair[1],
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return [body];
|
||||
@@ -104,11 +103,15 @@ class ProtocolConverter {
|
||||
* @param {String} method Endpoint method
|
||||
* @param {Object} body Request body
|
||||
*/
|
||||
async proxySetTimeouts (url, method, body) {
|
||||
async proxySetTimeouts(url, method, body) {
|
||||
let response, resBody;
|
||||
|
||||
const timeoutRequestObjects = this.getTimeoutRequestObjects(body);
|
||||
this.log.debug(`Will send the following request bodies to /timeouts: ${JSON.stringify(timeoutRequestObjects)}`);
|
||||
this.log.debug(
|
||||
`Will send the following request bodies to /timeouts: ${JSON.stringify(
|
||||
timeoutRequestObjects
|
||||
)}`
|
||||
);
|
||||
for (const timeoutObj of timeoutRequestObjects) {
|
||||
[response, resBody] = await this.proxyFunc(url, method, timeoutObj);
|
||||
|
||||
@@ -127,7 +130,7 @@ class ProtocolConverter {
|
||||
return [response, resBody];
|
||||
}
|
||||
|
||||
async proxySetWindow (url, method, body) {
|
||||
async proxySetWindow(url, method, body) {
|
||||
const bodyObj = util.safeJsonParse(body);
|
||||
if (_.isPlainObject(bodyObj)) {
|
||||
if (this.downstreamProtocol === W3C && _.has(bodyObj, 'name') && !_.has(bodyObj, 'handle')) {
|
||||
@@ -137,7 +140,11 @@ class ProtocolConverter {
|
||||
handle: bodyObj.name,
|
||||
});
|
||||
}
|
||||
if (this.downstreamProtocol === MJSONWP && _.has(bodyObj, 'handle') && !_.has(bodyObj, 'name')) {
|
||||
if (
|
||||
this.downstreamProtocol === MJSONWP &&
|
||||
_.has(bodyObj, 'handle') &&
|
||||
!_.has(bodyObj, 'name')
|
||||
) {
|
||||
this.log.debug(`Copied 'handle' value '${bodyObj.handle}' to 'name' as per JSONWP spec`);
|
||||
return await this.proxyFunc(url, method, {
|
||||
...bodyObj,
|
||||
@@ -149,48 +156,54 @@ class ProtocolConverter {
|
||||
return await this.proxyFunc(url, method, body);
|
||||
}
|
||||
|
||||
async proxySetValue (url, method, body) {
|
||||
async proxySetValue(url, method, body) {
|
||||
const bodyObj = util.safeJsonParse(body);
|
||||
if (_.isPlainObject(bodyObj) && (util.hasValue(bodyObj.text) || util.hasValue(bodyObj.value))) {
|
||||
let {text, value} = bodyObj;
|
||||
if (util.hasValue(text) && !util.hasValue(value)) {
|
||||
value = _.isString(text)
|
||||
? [...text]
|
||||
: (_.isArray(text) ? text : []);
|
||||
this.log.debug(`Added 'value' property ${JSON.stringify(value)} to 'setValue' request body`);
|
||||
value = _.isString(text) ? [...text] : _.isArray(text) ? text : [];
|
||||
this.log.debug(
|
||||
`Added 'value' property ${JSON.stringify(value)} to 'setValue' request body`
|
||||
);
|
||||
} else if (!util.hasValue(text) && util.hasValue(value)) {
|
||||
text = _.isArray(value)
|
||||
? value.join('')
|
||||
: (_.isString(value) ? value : '');
|
||||
text = _.isArray(value) ? value.join('') : _.isString(value) ? value : '';
|
||||
this.log.debug(`Added 'text' property ${JSON.stringify(text)} to 'setValue' request body`);
|
||||
}
|
||||
return await this.proxyFunc(url, method, Object.assign({}, bodyObj, {
|
||||
text,
|
||||
value,
|
||||
}));
|
||||
return await this.proxyFunc(
|
||||
url,
|
||||
method,
|
||||
Object.assign({}, bodyObj, {
|
||||
text,
|
||||
value,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return await this.proxyFunc(url, method, body);
|
||||
}
|
||||
|
||||
async proxySetFrame (url, method, body) {
|
||||
async proxySetFrame(url, method, body) {
|
||||
const bodyObj = util.safeJsonParse(body);
|
||||
return _.has(bodyObj, 'id') && _.isPlainObject(bodyObj.id)
|
||||
? await this.proxyFunc(url, method, {
|
||||
...bodyObj,
|
||||
id: duplicateKeys(bodyObj.id, MJSONWP_ELEMENT_KEY, W3C_ELEMENT_KEY),
|
||||
})
|
||||
...bodyObj,
|
||||
id: duplicateKeys(bodyObj.id, MJSONWP_ELEMENT_KEY, W3C_ELEMENT_KEY),
|
||||
})
|
||||
: await this.proxyFunc(url, method, body);
|
||||
}
|
||||
|
||||
async proxyPerformActions (url, method, body) {
|
||||
async proxyPerformActions(url, method, body) {
|
||||
const bodyObj = util.safeJsonParse(body);
|
||||
return _.isPlainObject(bodyObj)
|
||||
? await this.proxyFunc(url, method, duplicateKeys(bodyObj, MJSONWP_ELEMENT_KEY, W3C_ELEMENT_KEY))
|
||||
? await this.proxyFunc(
|
||||
url,
|
||||
method,
|
||||
duplicateKeys(bodyObj, MJSONWP_ELEMENT_KEY, W3C_ELEMENT_KEY)
|
||||
)
|
||||
: await this.proxyFunc(url, method, body);
|
||||
}
|
||||
|
||||
async proxyReleaseActions (url, method) {
|
||||
async proxyReleaseActions(url, method) {
|
||||
return await this.proxyFunc(url, method);
|
||||
}
|
||||
|
||||
@@ -204,7 +217,7 @@ class ProtocolConverter {
|
||||
* @param {?string|object} body
|
||||
* @returns The proxyfying result as [response, responseBody] tuple
|
||||
*/
|
||||
async convertAndProxy (commandName, url, method, body) {
|
||||
async convertAndProxy(commandName, url, method, body) {
|
||||
if (!this.downstreamProtocol) {
|
||||
return await this.proxyFunc(url, method, body);
|
||||
}
|
||||
@@ -233,16 +246,19 @@ class ProtocolConverter {
|
||||
continue;
|
||||
}
|
||||
|
||||
const rewrittenUrl = this.downstreamProtocol === MJSONWP
|
||||
? jsonwpConverter(url)
|
||||
: w3cConverter(url);
|
||||
const rewrittenUrl =
|
||||
this.downstreamProtocol === MJSONWP ? jsonwpConverter(url) : w3cConverter(url);
|
||||
if (rewrittenUrl === url) {
|
||||
this.log.debug(`Did not know how to rewrite the original URL '${url}' ` +
|
||||
`for ${this.downstreamProtocol} protocol`);
|
||||
this.log.debug(
|
||||
`Did not know how to rewrite the original URL '${url}' ` +
|
||||
`for ${this.downstreamProtocol} protocol`
|
||||
);
|
||||
break;
|
||||
}
|
||||
this.log.info(`Rewrote the original URL '${url}' to '${rewrittenUrl}' ` +
|
||||
`for ${this.downstreamProtocol} protocol`);
|
||||
this.log.info(
|
||||
`Rewrote the original URL '${url}' to '${rewrittenUrl}' ` +
|
||||
`for ${this.downstreamProtocol} protocol`
|
||||
);
|
||||
return await this.proxyFunc(rewrittenUrl, method, body);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
import _ from 'lodash';
|
||||
import { logger, util } from '@appium/support';
|
||||
import {logger, util} from '@appium/support';
|
||||
import axios from 'axios';
|
||||
import { getSummaryByCode } from '../jsonwp-status/status';
|
||||
import {getSummaryByCode} from '../jsonwp-status/status';
|
||||
import {
|
||||
errors, isErrorType, errorFromMJSONWPStatusCode, errorFromW3CJsonCode,
|
||||
errors,
|
||||
isErrorType,
|
||||
errorFromMJSONWPStatusCode,
|
||||
errorFromW3CJsonCode,
|
||||
getResponseForW3CError,
|
||||
} from '../protocol/errors';
|
||||
import { routeToCommandName } from '../protocol';
|
||||
import { MAX_LOG_BODY_LENGTH, DEFAULT_BASE_PATH, PROTOCOLS } from '../constants';
|
||||
import {routeToCommandName} from '../protocol';
|
||||
import {MAX_LOG_BODY_LENGTH, DEFAULT_BASE_PATH, PROTOCOLS} from '../constants';
|
||||
import ProtocolConverter from './protocol-converter';
|
||||
import { formatResponseValue, formatStatus } from '../protocol/helpers';
|
||||
import {formatResponseValue, formatStatus} from '../protocol/helpers';
|
||||
import http from 'http';
|
||||
import https from 'https';
|
||||
|
||||
const DEFAULT_LOG = logger.getLogger('WD Proxy');
|
||||
const DEFAULT_REQUEST_TIMEOUT = 240000;
|
||||
const COMPACT_ERROR_PATTERNS = [
|
||||
/\bECONNREFUSED\b/,
|
||||
/socket hang up/,
|
||||
];
|
||||
const COMPACT_ERROR_PATTERNS = [/\bECONNREFUSED\b/, /socket hang up/];
|
||||
|
||||
const {MJSONWP, W3C} = PROTOCOLS;
|
||||
|
||||
class JWProxy {
|
||||
constructor (opts = {}) {
|
||||
constructor(opts = {}) {
|
||||
_.defaults(this, opts, {
|
||||
scheme: 'http',
|
||||
server: 'localhost',
|
||||
@@ -47,7 +47,7 @@ class JWProxy {
|
||||
this._log = opts.log;
|
||||
}
|
||||
|
||||
get log () {
|
||||
get log() {
|
||||
return this._log ?? DEFAULT_LOG;
|
||||
}
|
||||
|
||||
@@ -60,7 +60,7 @@ class JWProxy {
|
||||
* @param {AxiosRequestConfig} requestConfig
|
||||
* @returns {AxiosResponse}
|
||||
*/
|
||||
async request (requestConfig) {
|
||||
async request(requestConfig) {
|
||||
const reqPromise = axios(requestConfig);
|
||||
this._activeRequests.push(reqPromise);
|
||||
try {
|
||||
@@ -70,28 +70,28 @@ class JWProxy {
|
||||
}
|
||||
}
|
||||
|
||||
getActiveRequestsCount () {
|
||||
getActiveRequestsCount() {
|
||||
return this._activeRequests.length;
|
||||
}
|
||||
|
||||
cancelActiveRequests () {
|
||||
cancelActiveRequests() {
|
||||
this._activeRequests = [];
|
||||
}
|
||||
|
||||
endpointRequiresSessionId (endpoint) {
|
||||
endpointRequiresSessionId(endpoint) {
|
||||
return !_.includes(['/session', '/sessions', '/status'], endpoint);
|
||||
}
|
||||
|
||||
set downstreamProtocol (value) {
|
||||
set downstreamProtocol(value) {
|
||||
this._downstreamProtocol = value;
|
||||
this.protocolConverter.downstreamProtocol = value;
|
||||
}
|
||||
|
||||
get downstreamProtocol () {
|
||||
get downstreamProtocol() {
|
||||
return this._downstreamProtocol;
|
||||
}
|
||||
|
||||
getUrlForProxy (url) {
|
||||
getUrlForProxy(url) {
|
||||
if (url === '') {
|
||||
url = '/';
|
||||
}
|
||||
@@ -99,12 +99,12 @@ class JWProxy {
|
||||
const endpointRe = '(/(session|status))';
|
||||
let remainingUrl = '';
|
||||
if (/^http/.test(url)) {
|
||||
const first = (new RegExp(`(https?://.+)${endpointRe}`)).exec(url);
|
||||
const first = new RegExp(`(https?://.+)${endpointRe}`).exec(url);
|
||||
if (!first) {
|
||||
throw new Error('Got a complete url but could not extract JWP endpoint');
|
||||
}
|
||||
remainingUrl = url.replace(first[1], '');
|
||||
} else if ((new RegExp('^/')).test(url)) {
|
||||
} else if (new RegExp('^/').test(url)) {
|
||||
remainingUrl = url;
|
||||
} else {
|
||||
throw new Error(`Did not know what to do with url '${url}'`);
|
||||
@@ -115,7 +115,7 @@ class JWProxy {
|
||||
remainingUrl = stripPrefixRe.exec(remainingUrl)[1];
|
||||
}
|
||||
|
||||
if (!(new RegExp(endpointRe)).test(remainingUrl)) {
|
||||
if (!new RegExp(endpointRe).test(remainingUrl)) {
|
||||
remainingUrl = `/session/${this.sessionId}${remainingUrl}`;
|
||||
}
|
||||
|
||||
@@ -139,12 +139,13 @@ class JWProxy {
|
||||
return proxyBase + remainingUrl;
|
||||
}
|
||||
|
||||
async proxy (url, method, body = null) {
|
||||
async proxy(url, method, body = null) {
|
||||
method = method.toUpperCase();
|
||||
const newUrl = this.getUrlForProxy(url);
|
||||
const truncateBody = (content) => _.truncate(
|
||||
_.isString(content) ? content : JSON.stringify(content),
|
||||
{ length: MAX_LOG_BODY_LENGTH });
|
||||
const truncateBody = (content) =>
|
||||
_.truncate(_.isString(content) ? content : JSON.stringify(content), {
|
||||
length: MAX_LOG_BODY_LENGTH,
|
||||
});
|
||||
const reqOpts = {
|
||||
url: newUrl,
|
||||
method,
|
||||
@@ -171,14 +172,16 @@ class JWProxy {
|
||||
}
|
||||
}
|
||||
|
||||
this.log.debug(`Proxying [${method} ${url || '/'}] to [${method} ${newUrl}] ` +
|
||||
(reqOpts.data ? `with body: ${truncateBody(reqOpts.data)}` : 'with no body'));
|
||||
this.log.debug(
|
||||
`Proxying [${method} ${url || '/'}] to [${method} ${newUrl}] ` +
|
||||
(reqOpts.data ? `with body: ${truncateBody(reqOpts.data)}` : 'with no body')
|
||||
);
|
||||
|
||||
const throwProxyError = (error) => {
|
||||
const err = new Error(`The request to ${url} has failed`);
|
||||
err.response = {
|
||||
data: error,
|
||||
status: 500
|
||||
status: 500,
|
||||
};
|
||||
throw err;
|
||||
};
|
||||
@@ -216,9 +219,11 @@ class JWProxy {
|
||||
if (util.hasValue(e.response)) {
|
||||
if (!isResponseLogged) {
|
||||
const error = truncateBody(e.response.data);
|
||||
this.log.info(util.hasValue(e.response.status)
|
||||
? `Got response with status ${e.response.status}: ${error}`
|
||||
: `Got response with unknown status: ${error}`);
|
||||
this.log.info(
|
||||
util.hasValue(e.response.status)
|
||||
? `Got response with status ${e.response.status}: ${error}`
|
||||
: `Got response with unknown status: ${error}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
proxyErrorMsg = `Could not proxy command to the remote server. Original error: ${e.message}`;
|
||||
@@ -232,7 +237,7 @@ class JWProxy {
|
||||
}
|
||||
}
|
||||
|
||||
getProtocolFromResBody (resObj) {
|
||||
getProtocolFromResBody(resObj) {
|
||||
if (_.isInteger(resObj.status)) {
|
||||
return MJSONWP;
|
||||
}
|
||||
@@ -241,14 +246,16 @@ class JWProxy {
|
||||
}
|
||||
}
|
||||
|
||||
requestToCommandName (url, method) {
|
||||
requestToCommandName(url, method) {
|
||||
const extractCommandName = (pattern) => {
|
||||
const pathMatch = pattern.exec(url);
|
||||
return pathMatch ? routeToCommandName(pathMatch[1], method, this.reqBasePath) : null;
|
||||
};
|
||||
let commandName = routeToCommandName(url, method, this.reqBasePath);
|
||||
if (!commandName && _.includes(url, `${this.reqBasePath}/session/`)) {
|
||||
commandName = extractCommandName(new RegExp(`${_.escapeRegExp(this.reqBasePath)}/session/[^/]+(.+)`));
|
||||
commandName = extractCommandName(
|
||||
new RegExp(`${_.escapeRegExp(this.reqBasePath)}/session/[^/]+(.+)`)
|
||||
);
|
||||
}
|
||||
if (!commandName && _.includes(url, this.reqBasePath)) {
|
||||
commandName = extractCommandName(new RegExp(`${_.escapeRegExp(this.reqBasePath)}(/.+)`));
|
||||
@@ -256,7 +263,7 @@ class JWProxy {
|
||||
return commandName;
|
||||
}
|
||||
|
||||
async proxyCommand (url, method, body = null) {
|
||||
async proxyCommand(url, method, body = null) {
|
||||
const commandName = this.requestToCommandName(url, method);
|
||||
if (!commandName) {
|
||||
return await this.proxy(url, method, body);
|
||||
@@ -266,7 +273,7 @@ class JWProxy {
|
||||
return await this.protocolConverter.convertAndProxy(commandName, url, method, body);
|
||||
}
|
||||
|
||||
async command (url, method, body = null) {
|
||||
async command(url, method, body = null) {
|
||||
let response;
|
||||
let resBodyObj;
|
||||
try {
|
||||
@@ -289,7 +296,10 @@ class JWProxy {
|
||||
if (_.has(message, 'message')) {
|
||||
message = message.message;
|
||||
}
|
||||
throw errorFromMJSONWPStatusCode(status, _.isEmpty(message) ? getSummaryByCode(status) : message);
|
||||
throw errorFromMJSONWPStatusCode(
|
||||
status,
|
||||
_.isEmpty(message) ? getSummaryByCode(status) : message
|
||||
);
|
||||
}
|
||||
} else if (protocol === W3C) {
|
||||
// Got response in W3C format
|
||||
@@ -297,22 +307,30 @@ class JWProxy {
|
||||
return resBodyObj.value;
|
||||
}
|
||||
if (_.isPlainObject(resBodyObj.value) && resBodyObj.value.error) {
|
||||
throw errorFromW3CJsonCode(resBodyObj.value.error, resBodyObj.value.message, resBodyObj.value.stacktrace);
|
||||
throw errorFromW3CJsonCode(
|
||||
resBodyObj.value.error,
|
||||
resBodyObj.value.message,
|
||||
resBodyObj.value.stacktrace
|
||||
);
|
||||
}
|
||||
} else if (response.statusCode === 200) {
|
||||
// Unknown protocol. Keeping it because of the backward compatibility
|
||||
return resBodyObj;
|
||||
}
|
||||
throw new errors.UnknownError(`Did not know what to do with response code '${response.statusCode}' ` +
|
||||
`and response body '${_.truncate(JSON.stringify(resBodyObj), {length: 300})}'`);
|
||||
throw new errors.UnknownError(
|
||||
`Did not know what to do with response code '${response.statusCode}' ` +
|
||||
`and response body '${_.truncate(JSON.stringify(resBodyObj), {
|
||||
length: 300,
|
||||
})}'`
|
||||
);
|
||||
}
|
||||
|
||||
getSessionIdFromUrl (url) {
|
||||
getSessionIdFromUrl(url) {
|
||||
const match = url.match(/\/session\/([^/]+)/);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
async proxyReqRes (req, res) {
|
||||
async proxyReqRes(req, res) {
|
||||
// ! this method must not throw any exceptions
|
||||
// ! make sure to call res.send before return
|
||||
let statusCode;
|
||||
@@ -331,7 +349,7 @@ class JWProxy {
|
||||
if (!_.isPlainObject(resBodyObj)) {
|
||||
const error = new errors.UnknownError(
|
||||
`The downstream server response with the status code ${statusCode} is not a valid JSON object: ` +
|
||||
_.truncate(`${resBodyObj}`, {length: 300})
|
||||
_.truncate(`${resBodyObj}`, {length: 300})
|
||||
);
|
||||
[statusCode, resBodyObj] = getResponseForW3CError(error);
|
||||
}
|
||||
@@ -354,5 +372,5 @@ class JWProxy {
|
||||
}
|
||||
}
|
||||
|
||||
export { JWProxy };
|
||||
export {JWProxy};
|
||||
export default JWProxy;
|
||||
|
||||
@@ -3,111 +3,118 @@ import _ from 'lodash';
|
||||
const codes = {
|
||||
Success: {
|
||||
code: 0,
|
||||
summary: 'The command executed successfully.'
|
||||
summary: 'The command executed successfully.',
|
||||
},
|
||||
NoSuchDriver: {
|
||||
code: 6,
|
||||
summary: 'A session is either terminated or not started'
|
||||
summary: 'A session is either terminated or not started',
|
||||
},
|
||||
NoSuchElement: {
|
||||
code: 7,
|
||||
summary: 'An element could not be located on the page using the given search parameters.'
|
||||
summary: 'An element could not be located on the page using the given search parameters.',
|
||||
},
|
||||
NoSuchFrame: {
|
||||
code: 8,
|
||||
summary: 'A request to switch to a frame could not be satisfied because the frame could not be found.'
|
||||
summary:
|
||||
'A request to switch to a frame could not be satisfied because the frame could not be found.',
|
||||
},
|
||||
UnknownCommand: {
|
||||
code: 9,
|
||||
summary: 'The requested resource could not be found, or a request was received using an HTTP method that is not supported by the mapped resource.'
|
||||
summary:
|
||||
'The requested resource could not be found, or a request was received using an HTTP method that is not supported by the mapped resource.',
|
||||
},
|
||||
StaleElementReference: {
|
||||
code: 10,
|
||||
summary: 'An element command failed because the referenced element is no longer attached to the DOM.'
|
||||
summary:
|
||||
'An element command failed because the referenced element is no longer attached to the DOM.',
|
||||
},
|
||||
ElementNotVisible: {
|
||||
code: 11,
|
||||
summary: 'An element command could not be completed because the element is not visible on the page.'
|
||||
summary:
|
||||
'An element command could not be completed because the element is not visible on the page.',
|
||||
},
|
||||
InvalidElementState: {
|
||||
code: 12,
|
||||
summary: 'An element command could not be completed because the element is in an invalid state (e.g. attempting to click a disabled element).'
|
||||
summary:
|
||||
'An element command could not be completed because the element is in an invalid state (e.g. attempting to click a disabled element).',
|
||||
},
|
||||
UnknownError: {
|
||||
code: 13,
|
||||
summary: 'An unknown server-side error occurred while processing the command.'
|
||||
summary: 'An unknown server-side error occurred while processing the command.',
|
||||
},
|
||||
ElementIsNotSelectable: {
|
||||
code: 15,
|
||||
summary: 'An attempt was made to select an element that cannot be selected.'
|
||||
summary: 'An attempt was made to select an element that cannot be selected.',
|
||||
},
|
||||
JavaScriptError: {
|
||||
code: 17,
|
||||
summary: 'An error occurred while executing user supplied JavaScript.'
|
||||
summary: 'An error occurred while executing user supplied JavaScript.',
|
||||
},
|
||||
XPathLookupError: {
|
||||
code: 19,
|
||||
summary: 'An error occurred while searching for an element by XPath.'
|
||||
summary: 'An error occurred while searching for an element by XPath.',
|
||||
},
|
||||
Timeout: {
|
||||
code: 21,
|
||||
summary: 'An operation did not complete before its timeout expired.'
|
||||
summary: 'An operation did not complete before its timeout expired.',
|
||||
},
|
||||
NoSuchWindow: {
|
||||
code: 23,
|
||||
summary: 'A request to switch to a different window could not be satisfied because the window could not be found.'
|
||||
summary:
|
||||
'A request to switch to a different window could not be satisfied because the window could not be found.',
|
||||
},
|
||||
InvalidCookieDomain: {
|
||||
code: 24,
|
||||
summary: 'An illegal attempt was made to set a cookie under a different domain than the current page.'
|
||||
summary:
|
||||
'An illegal attempt was made to set a cookie under a different domain than the current page.',
|
||||
},
|
||||
UnableToSetCookie: {
|
||||
code: 25,
|
||||
summary: 'A request to set a cookie\'s value could not be satisfied.'
|
||||
summary: "A request to set a cookie's value could not be satisfied.",
|
||||
},
|
||||
UnexpectedAlertOpen: {
|
||||
code: 26,
|
||||
summary: 'A modal dialog was open, blocking this operation'
|
||||
summary: 'A modal dialog was open, blocking this operation',
|
||||
},
|
||||
NoAlertOpenError: {
|
||||
code: 27,
|
||||
summary: 'An attempt was made to operate on a modal dialog when one was not open.'
|
||||
summary: 'An attempt was made to operate on a modal dialog when one was not open.',
|
||||
},
|
||||
ScriptTimeout: {
|
||||
code: 28,
|
||||
summary: 'A script did not complete before its timeout expired.'
|
||||
summary: 'A script did not complete before its timeout expired.',
|
||||
},
|
||||
InvalidElementCoordinates: {
|
||||
code: 29,
|
||||
summary: 'The coordinates provided to an interactions operation are invalid.'
|
||||
summary: 'The coordinates provided to an interactions operation are invalid.',
|
||||
},
|
||||
IMENotAvailable: {
|
||||
code: 30,
|
||||
summary: 'IME was not available.'
|
||||
summary: 'IME was not available.',
|
||||
},
|
||||
IMEEngineActivationFailed: {
|
||||
code: 31,
|
||||
summary: 'An IME engine could not be started.'
|
||||
summary: 'An IME engine could not be started.',
|
||||
},
|
||||
InvalidSelector: {
|
||||
code: 32,
|
||||
summary: 'Argument was an invalid selector (e.g. XPath/CSS).'
|
||||
summary: 'Argument was an invalid selector (e.g. XPath/CSS).',
|
||||
},
|
||||
SessionNotCreatedException: {
|
||||
code: 33,
|
||||
summary: 'A new session could not be created.'
|
||||
summary: 'A new session could not be created.',
|
||||
},
|
||||
MoveTargetOutOfBounds: {
|
||||
code: 34,
|
||||
summary: 'Target provided for a move action is out of bounds.'
|
||||
summary: 'Target provided for a move action is out of bounds.',
|
||||
},
|
||||
NoSuchContext: {
|
||||
code: 35,
|
||||
summary: 'No such context found.'
|
||||
}
|
||||
summary: 'No such context found.',
|
||||
},
|
||||
};
|
||||
|
||||
function getSummaryByCode (code) {
|
||||
function getSummaryByCode(code) {
|
||||
code = parseInt(code, 10);
|
||||
for (let obj of _.values(codes)) {
|
||||
if (!_.isUndefined(obj.code) && obj.code === code) {
|
||||
@@ -118,4 +125,4 @@ function getSummaryByCode (code) {
|
||||
}
|
||||
|
||||
export default codes;
|
||||
export { codes, getSummaryByCode };
|
||||
export {codes, getSummaryByCode};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user