Merge branch 'master' into ocis-1132

This commit is contained in:
A.Unger
2020-12-04 12:39:57 +01:00
40 changed files with 2093 additions and 438 deletions

View File

@@ -17,12 +17,12 @@ config = {
},
'apiTests': {
'coreBranch': 'master',
'coreCommit': 'ed326fd54c7f9142389e49aad87653a905c6ddb7',
'coreCommit': '027bc8b5fe8ad29967184eab200abd4211ef9006',
'numberOfParts': 10
},
'uiTests': {
'phoenixBranch': 'master',
'phoenixCommit': 'bed4effca2eb492d0ee20eee48575ea1eb92cd35',
'phoenixCommit': 'c548282ab652b78e8c077d8bdf32b25060414047',
'suites': {
'webUIBasic': [
'webUILogin',
@@ -137,9 +137,6 @@ def main(ctx):
releaseSubmodule(ctx),
]
purge = purgeBuildArtifactCache(ctx, 'ocis-binary-amd64')
purge['depends_on'] = getPipelineNames(testPipelines(ctx))
after = [
manifest(ctx),
changelog(ctx),
@@ -147,10 +144,16 @@ def main(ctx):
badges(ctx),
docs(ctx),
updateDeployment(ctx),
purge,
]
if ctx.build.event == "cron":
before.append(benchmark(ctx))
purge = purgeBuildArtifactCache(ctx, 'ocis-binary-amd64')
purge['depends_on'] = getPipelineNames(before)
before.append(purge)
notify_pipeline = notify(ctx)
notify_pipeline['depends_on'] = \
getPipelineNames(before)
@@ -174,7 +177,16 @@ def main(ctx):
pipelines = docs_pipelines + [ notify_pipeline ]
else:
pipelines = before + stages + after
purge_dependencies = testPipelines(ctx)
if '[with-benchmarks]' in (ctx.build.title + ctx.build.message):
before.append(benchmark(ctx))
purge_dependencies.append(benchmark(ctx))
purge = purgeBuildArtifactCache(ctx, 'ocis-binary-amd64')
purge['depends_on'] = getPipelineNames(purge_dependencies)
pipelines = before + stages + after + [purge]
notify_pipeline = notify(ctx)
notify_pipeline['depends_on'] = \
@@ -525,6 +537,52 @@ def coreApiTests(ctx, coreBranch = 'master', coreCommit = '', part_number = 1, n
},
}
def benchmark(ctx):
return {
'kind': 'pipeline',
'type': 'docker',
'name': 'benchmark',
'failure': 'ignore',
'platform': {
'os': 'linux',
'arch': 'amd64',
},
'steps':
restoreBuildArtifactCache(ctx, 'ocis-binary-amd64', 'ocis/bin/ocis') +
ocisServer('ocis') + [
{
'name': 'build benchmarks',
'image': 'node',
'pull': 'always',
'commands': [
'cd tests/k6',
'yarn',
'yarn build',
],
},
{
'name': 'run benchmarks',
'image': 'loadimpact/k6',
'pull': 'always',
'environment': {
'OC_HOST': 'https://ocis-server:9200',
},
'commands': [
'cd tests/k6',
'for f in ./dist/test-* ; do k6 run "$f" -q; done',
],
},
],
'depends_on': getPipelineNames([buildOcisBinaryForTesting(ctx)]),
'trigger': {
'ref': [
'refs/heads/master',
'refs/tags/v*',
'refs/pull/**',
],
},
}
def uiTests(ctx, phoenixBranch, phoenixCommit):
suiteNames = config['uiTests']['suites'].keys()
return [uiTestPipeline(ctx, suiteName, phoenixBranch, phoenixCommit) for suiteName in suiteNames]
@@ -1609,7 +1667,7 @@ def genericCache(name, action, mounts, cache_key):
'from_secret': 'cache_s3_endpoint'
},
'bucket': 'cache',
'region': 'us-east-1', # not used at all, but failes if not given!
'region': 'us-east-1', # not used at all, but fails if not given!
'path_style': 'true',
'cache_key': cache_key,
'rebuild': rebuild,

2
.gitignore vendored
View File

@@ -11,7 +11,7 @@ node_modules/
.idea
*/yarn-error.log
yarn-error.log
# Konnectd
konnectd/assets/identifier

View File

@@ -12,6 +12,7 @@
* Bugfix - Fix choose account dialogue: [#846](https://github.com/owncloud/ocis/pull/846)
* Bugfix - Fix id or username query handling: [#745](https://github.com/owncloud/ocis/pull/745)
* Bugfix - Fix konnectd build: [#809](https://github.com/owncloud/ocis/pull/809)
* Bugfix - Fix path of files shared with me in ocs api: [#204](https://github.com/owncloud/product/issues/204)
* Bugfix - Use micro default client: [#718](https://github.com/owncloud/ocis/pull/718)
* Bugfix - Allow consent-prompt with switch-account: [#788](https://github.com/owncloud/ocis/pull/788)
* Bugfix - Mint token with uid and gid: [#737](https://github.com/owncloud/ocis/pull/737)
@@ -23,7 +24,6 @@
* Change - Cache password validation: [#958](https://github.com/owncloud/ocis/pull/958)
* Change - Filesystem based index: [#709](https://github.com/owncloud/ocis/pull/709)
* Change - Rebuild index command for accounts: [#748](https://github.com/owncloud/ocis/pull/748)
* Change - Add k6: [#941](https://github.com/owncloud/ocis/pull/941)
* Change - Add the thumbnails command: [#156](https://github.com/owncloud/ocis/issues/156)
* Change - Use bcrypt to hash the user passwords: [#510](https://github.com/owncloud/ocis/issues/510)
* Change - Replace the library which scales the images: [#910](https://github.com/owncloud/ocis/pull/910)
@@ -42,6 +42,7 @@
* Change - Account management permissions for Admin role: [#124](https://github.com/owncloud/product/issues/124)
* Change - Update phoenix to v0.18.0: [#651](https://github.com/owncloud/ocis/pull/651)
* Change - Default apps in ownCloud Web: [#688](https://github.com/owncloud/ocis/pull/688)
* Change - Proxy allow insecure upstreams: [#1007](https://github.com/owncloud/ocis/pull/1007)
* Change - Make ocis-settings available: [#287](https://github.com/owncloud/ocis/pull/287)
* Change - Start ocis-proxy with the ocis server command: [#119](https://github.com/owncloud/ocis/issues/119)
* Change - Theme welcome and choose account pages: [#887](https://github.com/owncloud/ocis/pull/887)
@@ -63,6 +64,7 @@
* Enhancement - Add basic auth option: [#627](https://github.com/owncloud/ocis/pull/627)
* Enhancement - Document how to run OCIS on top of EOS: [#172](https://github.com/owncloud/ocis/pull/172)
* Enhancement - Add the glauth service: [#244](https://github.com/owncloud/product/issues/244)
* Enhancement - Add k6: [#941](https://github.com/owncloud/ocis/pull/941)
* Enhancement - Add the konnectd service: [#244](https://github.com/owncloud/product/issues/244)
* Enhancement - Add the ocis-phoenix service: [#244](https://github.com/owncloud/product/issues/244)
* Enhancement - Add the ocis-pkg package: [#244](https://github.com/owncloud/product/issues/244)
@@ -184,6 +186,13 @@
https://github.com/owncloud/ocis/pull/809
* Bugfix - Fix path of files shared with me in ocs api: [#204](https://github.com/owncloud/product/issues/204)
The path of files shared with me using the ocs api was pointing to an incorrect location.
https://github.com/owncloud/product/issues/204
https://github.com/owncloud/ocis/pull/994
* Bugfix - Use micro default client: [#718](https://github.com/owncloud/ocis/pull/718)
Tags: glauth
@@ -292,12 +301,6 @@
https://github.com/owncloud/ocis/pull/748
* Change - Add k6: [#941](https://github.com/owncloud/ocis/pull/941)
Add k6 as a load testing framework
https://github.com/owncloud/ocis/pull/941
* Change - Add the thumbnails command: [#156](https://github.com/owncloud/ocis/issues/156)
Tags: thumbnails
@@ -480,6 +483,15 @@
https://github.com/owncloud/ocis/pull/688
* Change - Proxy allow insecure upstreams: [#1007](https://github.com/owncloud/ocis/pull/1007)
Tags: proxy
We can now configure the proxy if insecure upstream servers are allowed. This was added since
you need to disable certificate checks fore some situations like testing.
https://github.com/owncloud/ocis/pull/1007
* Change - Make ocis-settings available: [#287](https://github.com/owncloud/ocis/pull/287)
Tags: settings
@@ -732,6 +744,15 @@
https://github.com/owncloud/product/issues/244
* Enhancement - Add k6: [#941](https://github.com/owncloud/ocis/pull/941)
Tags: tests
Add k6 as a performance testing framework
https://github.com/owncloud/ocis/pull/941
https://github.com/owncloud/ocis/pull/983
* Enhancement - Add the konnectd service: [#244](https://github.com/owncloud/product/issues/244)
Tags: konnectd

View File

@@ -1,5 +1,8 @@
Change: Add k6
Enhancement: Add k6
add k6 as a load testing framework
Tags: tests
add k6 as a performance testing framework
https://github.com/owncloud/ocis/pull/941
https://github.com/owncloud/ocis/pull/983

View File

@@ -0,0 +1,8 @@
Change: Proxy allow insecure upstreams
Tags: proxy
We can now configure the proxy if insecure upstream servers are allowed.
This was added since you need to disable certificate checks fore some situations like testing.
https://github.com/owncloud/ocis/pull/1007

View File

@@ -37,6 +37,8 @@ You may add flags to your commit message or PR title in order to speed up pipeli
- `[tests-only]`: please add this flag, if you only changed tests or test-related tooling. You do not need to add a changelog for tests-only changes.
- `[with-benchmarks]`: please add this flag, if you want benchmarks to be run in CI.
### Knowledge base
- My pipeline fails because some CI related files or commands are missing.

View File

@@ -117,6 +117,7 @@ type Config struct {
PreSignedURL PreSignedURL
AutoprovisionAccounts bool
EnableBasicAuth bool
InsecureBackends bool
}
// OIDC is the config for the OpenID-Connect middleware. If set the proxy will try to authenticate every request

View File

@@ -185,6 +185,13 @@ func ServerWithConfig(cfg *config.Config) []cli.Flag {
EnvVars: []string{"PROXY_REVA_GATEWAY_ADDR"},
Destination: &cfg.Reva.Address,
},
&cli.BoolFlag{
Name: "insecure",
Value: false,
Usage: "allow insecure communication to upstream servers",
EnvVars: []string{"PROXY_INSECURE_BACKENDS"},
Destination: &cfg.InsecureBackends,
},
// OIDC

View File

@@ -2,11 +2,14 @@ package proxy
import (
"context"
"crypto/tls"
"net"
"net/http"
"net/http/httputil"
"net/url"
"regexp"
"strings"
"time"
"github.com/owncloud/ocis/proxy/pkg/proxy/policy"
"go.opencensus.io/plugin/ochttp/propagation/tracecontext"
@@ -37,6 +40,24 @@ func NewMultiHostReverseProxy(opts ...Option) *MultiHostReverseProxy {
}
rp.Director = rp.directorSelectionDirector
// equals http.DefaultTransport except TLSClientConfig
rp.Transport = &http.Transport{
Proxy: http.ProxyFromEnvironment,
DialContext: (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
DualStack: true,
}).DialContext,
ForceAttemptHTTP2: true,
MaxIdleConns: 100,
IdleConnTimeout: 90 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ExpectContinueTimeout: 1 * time.Second,
TLSClientConfig: &tls.Config{
InsecureSkipVerify: options.Config.InsecureBackends,
},
}
if options.Config.Policies == nil {
rp.logger.Info().Str("source", "runtime").Msg("Policies")
options.Config.Policies = defaultPolicies()

2
tests/k6/.eslintignore Normal file
View File

@@ -0,0 +1,2 @@
node_modules
dist

23
tests/k6/.eslintrc Normal file
View File

@@ -0,0 +1,23 @@
{
"plugins": [
"simple-import-sort",
"import"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 2020,
"sourceType": "module"
},
"extends": [
"plugin:@typescript-eslint/recommended",
"prettier/@typescript-eslint",
"plugin:prettier/recommended"
],
"rules": {
"simple-import-sort/imports": "error",
"simple-import-sort/exports": "error",
"import/first": "error",
"import/newline-after-import": "error",
"import/no-duplicates": "error",
}
}

7
tests/k6/.prettierrc Normal file
View File

@@ -0,0 +1,7 @@
{
"semi": true,
"trailingComma": "all",
"singleQuote": true,
"printWidth": 120,
"tabWidth": 4
}

View File

@@ -11,5 +11,13 @@ $ yarn build
## How to run
```console
k6 run ./dist/TESTNAME.js
k6 run ./dist/NAME_OF_TEST.js
```
## Environment variables
```console
$ OC_LOGIN=USERNAME OC_PASSWORD=PASSWORD k6 run ...
$ OC_HOST=URL k6 run ...
$ OC_OIDC_HOST=URL k6 run ...
$ OC_OIDC_ENABLED=BOOL k6 run ...
```

View File

@@ -4,9 +4,19 @@
"main": "index.js",
"scripts": {
"clean": "rm -rf ./dist",
"lint": "eslint './src/**/*.ts' --fix",
"build": "rollup -c",
"build:w": "rollup -c -w",
"postinstall": "node ./scripts/postinstall.js"
"build:w": "rollup -c -w"
},
"husky": {
"hooks": {
"pre-commit": "lint-staged"
}
},
"lint-staged": {
"*.{ts}": [
"eslint --fix"
]
},
"devDependencies": {
"@babel/core": "^7.9.0",
@@ -19,31 +29,31 @@
"@rollup/plugin-json": "^4.0.3",
"@rollup/plugin-node-resolve": "^7.1.3",
"@rollup/pluginutils": "^4.1.0",
"@types/faker": "^5.1.4",
"@types/jest": "^25.2.1",
"@types/k6": "^0.28.2",
"@types/lodash": "^4.14.165",
"@typescript-eslint/eslint-plugin": "^2.29.0",
"@typescript-eslint/parser": "^2.29.0",
"axios": "^0.21.0",
"@typescript-eslint/eslint-plugin": "^4.9.0",
"@typescript-eslint/parser": "^4.9.0",
"babel-plugin-lodash": "^3.3.4",
"eslint": "^6.8.0",
"eslint-config-prettier": "^6.11.0",
"eslint": "^7.14.0",
"eslint-config-prettier": "^6.15.0",
"eslint-plugin-import": "^2.22.1",
"eslint-plugin-jest": "^23.8.2",
"eslint-plugin-prettier": "^3.1.3",
"eslint-plugin-prettier": "^3.2.0",
"eslint-plugin-simple-import-sort": "^6.0.1",
"husky": "^4.3.0",
"jest": "^25.4.0",
"k6": "^0.0.0",
"lint-staged": "^10.1.7",
"ora": "^5.1.0",
"prettier": "^2.0.5",
"prettier": "^2.2.1",
"prettier-eslint": "^12.0.0",
"rollup": "^2.7.2",
"rollup-plugin-babel": "^4.4.0",
"rollup-plugin-multi-input": "^1.1.1",
"rollup-plugin-terser": "^5.3.0",
"shelljs": "^0.8.4",
"typescript": "^3.8.3"
"typescript": "^4.1.2"
},
"dependencies": {
"lodash": "^4.17.20"
"lodash": "^4.17.20",
"query-string": "^6.13.7"
}
}

View File

@@ -1,46 +1,40 @@
import commonjs from '@rollup/plugin-commonjs'
import json from '@rollup/plugin-json'
import resolve from '@rollup/plugin-node-resolve'
import babel from 'rollup-plugin-babel'
import { terser } from 'rollup-plugin-terser'
import commonjs from '@rollup/plugin-commonjs';
import json from '@rollup/plugin-json';
import resolve from '@rollup/plugin-node-resolve';
import babel from 'rollup-plugin-babel';
import { terser } from 'rollup-plugin-terser';
import multiInput from 'rollup-plugin-multi-input';
import path from 'path';
import utils from '@rollup/pluginutils';
import pkg from './package.json';
const extensions = ['.js', '.ts'];
export default [
{
input: ['src/test-*.ts'],
external: utils.createFilter([
'k6/**',
...Object.keys(pkg.devDependencies),
], null, { resolve: false }),
output: [
{
dir: 'dist',
format: 'cjs',
exports: 'named',
chunkFileNames: '_chunks/[name]-[hash].js'
},
],
plugins: [
multiInput({
transformOutputPath: (output, input) => path.basename(output),
}),
json(),
resolve(
{
extensions,
}
),
commonjs(),
babel({
extensions,
include: ['src/**/*'],
}),
terser(),
],
}
]
{
input: ['src/test/**/*.ts', '!src/test/**/*.lib.ts', '!src/test/**/index.ts', '!src/test/**/_*.ts'],
external: utils.createFilter(['k6/**', ...Object.keys(pkg.devDependencies)], null, { resolve: false }),
output: [
{
dir: 'dist',
format: 'cjs',
exports: 'named',
chunkFileNames: '_chunks/[name]-[hash].js',
},
],
plugins: [
multiInput({
transformOutputPath: (output, input) => `${output.split('/').join('-')}`,
}),
json(),
resolve({
extensions,
}),
commonjs(),
babel({
extensions,
include: ['src/**/*'],
}),
terser(),
],
},
];

View File

@@ -1,53 +0,0 @@
const shell = require('shelljs')
const path = require("path")
const fs = require('fs');
const ora = require('ora');
const axios = require('axios');
const downloadFile = async (url, name) => {
const parsedPath = path.parse(url)
const destDir = './dist/_files/'
const destFile = path.join(destDir, name || parsedPath.base)
if (!fs.existsSync(destDir)) {
shell.mkdir('-p', destDir)
}
if(fs.existsSync(destFile)){
return
}
const spinner = ora(`downloading: ${ url }`).start();
const { data } = await axios({
method: "get",
url: url,
responseType: "stream"
});
const stream = fs.createWriteStream(destFile);
data.pipe(stream);
return new Promise((resolve, reject) => {
data.on('error', err => {
console.error(err);
spinner.stop();
reject(err);
});
data.on('end', () => {
stream.end();
spinner.stop();
resolve();
});
});
}
(async () => {
await downloadFile('https://www.sample-videos.com/img/Sample-jpg-image-50kb.jpg', 'kb_50.jpg')
await downloadFile('http://ipv4.download.thinkbroadband.com/5MB.zip', 'mb_5.zip')
await downloadFile('http://ipv4.download.thinkbroadband.com/10MB.zip', 'mb_10.zip')
await downloadFile('http://ipv4.download.thinkbroadband.com/20MB.zip', 'mb_20.zip')
await downloadFile('http://ipv4.download.thinkbroadband.com/50MB.zip', 'mb_50.zip')
await downloadFile('http://ipv4.download.thinkbroadband.com/100MB.zip', 'mb_100.zip')
await downloadFile('http://ipv4.download.thinkbroadband.com/200MB.zip', 'mb_200.zip')
})()

View File

@@ -1,28 +0,0 @@
import encoding from 'k6/encoding';
import {bytes} from "k6";
import http, {RefinedResponse, ResponseType} from "k6/http";
import * as defaults from "./defaults";
import * as types from "./types";
export const uploadFile = <RT extends ResponseType | undefined>(account: types.Account, data: bytes, name: string): RefinedResponse<RT> => {
return http.put(
`https://${defaults.host.name}/remote.php/dav/files/${account.login}/${name}`,
data as any,
{
headers: {
Authorization: `Basic ${encoding.b64encode(`${account.login}:${account.password}`)}`,
}
}
);
}
export const userInfo = <RT extends ResponseType | undefined>(account: any): RefinedResponse<RT> => {
return http.get(
`https://${defaults.host.name}/ocs/v1.php/cloud/users/${account.login}`,
{
headers: {
Authorization: `Basic ${encoding.b64encode(`${account.login}:${account.password}`)}`,
},
}
);
}

View File

@@ -0,0 +1,53 @@
import { bytes } from 'k6';
import encoding from 'k6/encoding';
import http, { RefinedParams, RefinedResponse, RequestBody, ResponseType } from 'k6/http';
import { merge } from 'lodash';
import * as defaults from '../defaults';
import * as types from '../types';
export const buildHeaders = ({ credential }: { credential?: types.Credential }): { [key: string]: string } => {
const isOIDCGuard = (credential as types.Token).tokenType !== undefined;
const authOIDC = credential as types.Token;
const authBasic = credential as types.Account;
return {
...(credential && {
Authorization: isOIDCGuard
? `${authOIDC.tokenType} ${authOIDC.accessToken}`
: `Basic ${encoding.b64encode(`${authBasic.login}:${authBasic.password}`)}`,
}),
};
};
export const buildURL = ({ path }: { path: string }): string => {
return [defaults.ENV.HOST, ...path.split('/').filter(Boolean)].join('/');
};
export const request = ({
method,
path,
body = {},
params = {},
credential,
}: {
method: 'PROPFIND' | 'PUT' | 'GET' | 'POST' | 'DELETE' | 'MKCOL';
path: string;
credential: types.Credential;
body?: RequestBody | bytes | null;
params?: RefinedParams<ResponseType> | null;
}): RefinedResponse<ResponseType> => {
return http.request(
method,
buildURL({ path }),
body as never,
merge(
{
headers: {
...buildHeaders({ credential }),
},
},
params,
),
);
};

112
tests/k6/src/lib/api/dav.ts Normal file
View File

@@ -0,0 +1,112 @@
import { RefinedResponse, ResponseType } from 'k6/http';
import * as types from '../types';
import * as api from './api';
export class Upload {
public static exec({
credential,
userName,
path = '',
asset,
tags,
}: {
credential: types.Credential;
userName: string;
asset: types.Asset;
path?: string;
tags?: types.Tags;
}): RefinedResponse<ResponseType> {
return api.request({
method: 'PUT',
credential,
path: `/remote.php/dav/files/${userName}/${path}/${asset.name}`,
params: { tags },
body: asset.bytes,
});
}
}
export class Download {
public static exec({
credential,
userName,
path,
tags,
}: {
credential: types.Credential;
userName: string;
path: string;
tags?: types.Tags;
}): RefinedResponse<ResponseType> {
return api.request({
method: 'GET',
credential,
path: `/remote.php/dav/files/${userName}/${path}`,
params: { tags },
});
}
}
export class Delete {
public static exec({
credential,
userName,
path,
tags,
}: {
credential: types.Credential;
userName: string;
path: string;
tags?: types.Tags;
}): RefinedResponse<ResponseType> {
return api.request({
method: 'DELETE',
credential,
path: `/remote.php/dav/files/${userName}/${path}`,
params: { tags },
});
}
}
export class Create {
public static exec({
credential,
userName,
path,
tags,
}: {
credential: types.Credential;
userName: string;
path: string;
tags?: types.Tags;
}): RefinedResponse<ResponseType> {
return api.request({
method: 'MKCOL',
credential,
path: `/remote.php/dav/files/${userName}/${path}`,
params: { tags },
});
}
}
export class Propfind {
public static exec({
credential,
userName,
path = '',
tags,
}: {
credential: types.Credential;
userName: string;
path?: string;
tags?: types.Tags;
}): RefinedResponse<ResponseType> {
return api.request({
method: 'PROPFIND',
credential,
path: `/remote.php/dav/files/${userName}/${path}`,
params: { tags },
});
}
}

View File

@@ -0,0 +1,3 @@
export * as api from './api';
export * as dav from './dav';
export * as users from './users';

View File

@@ -0,0 +1,47 @@
import { RefinedResponse, ResponseType } from 'k6/http';
import * as types from '../types';
import * as api from './api';
export class Create {
public static exec({
userName,
password,
email,
credential,
tags,
}: {
credential: types.Credential;
userName: string;
password: string;
email: string;
tags?: types.Tags;
}): RefinedResponse<ResponseType> {
return api.request({
method: 'POST',
credential,
path: `/ocs/v1.php/cloud/users`,
params: { tags },
body: { userid: userName, password, email },
});
}
}
export class Delete {
public static exec({
userName,
credential,
tags,
}: {
credential: types.Credential;
userName: string;
tags?: types.Tags;
}): RefinedResponse<ResponseType> {
return api.request({
method: 'DELETE',
credential,
path: `/ocs/v1.php/cloud/users/${userName}`,
params: { tags },
});
}
}

156
tests/k6/src/lib/auth.ts Normal file
View File

@@ -0,0 +1,156 @@
import { fail } from 'k6';
import http from 'k6/http';
import { get } from 'lodash';
import queryString from 'query-string';
import * as defaults from './defaults';
import * as types from './types';
export default class Factory {
private provider!: types.AuthProvider;
public account!: types.Account;
constructor(account: types.Account) {
this.account = account;
if (defaults.ENV.OIDC_ENABLED) {
this.provider = new OIDCProvider(account);
return;
}
this.provider = new AccountProvider(account);
}
public get credential(): types.Credential {
return this.provider.credential;
}
}
class AccountProvider implements types.AuthProvider {
private account: types.Account;
constructor(account: types.Account) {
this.account = account;
}
public get credential(): types.Account {
return this.account;
}
}
class OIDCProvider implements types.AuthProvider {
private account: types.Account;
private redirectUri = `${defaults.ENV.OIDC_HOST}/oidc-callback.html`;
private logonUri = `${defaults.ENV.OIDC_HOST}/signin/v1/identifier/_/logon`;
private tokenUrl = `${defaults.ENV.OIDC_HOST}/konnect/v1/token`;
private cache!: {
validTo: Date;
token: types.Token;
};
constructor(account: types.Account) {
this.account = account;
}
public get credential(): types.Token {
if (!this.cache || this.cache.validTo <= new Date()) {
const continueURI = this.getContinueURI();
const code = this.getCode(continueURI);
const token = this.getToken(code);
this.cache = {
validTo: ((): Date => {
const offset = 5;
const d = new Date();
d.setSeconds(d.getSeconds() + token.expiresIn - offset);
return d;
})(),
token,
};
}
return this.cache.token;
}
private getContinueURI(): string {
const logonResponse = http.post(
this.logonUri,
JSON.stringify({
params: [this.account.login, this.account.password, '1'],
hello: {
scope: 'openid profile email',
client_id: 'phoenix',
redirect_uri: this.redirectUri,
flow: 'oidc',
},
state: 'vp42cf',
}),
{
headers: {
'Kopano-Konnect-XSRF': '1',
Referer: defaults.ENV.OIDC_HOST,
'Content-Type': 'application/json',
},
},
);
const continueURI = get(logonResponse.json(), 'hello.continue_uri');
if (logonResponse.status != 200 || !continueURI) {
fail(this.logonUri);
}
return continueURI;
}
private getCode(continueURI: string): string {
const authorizeUri = `${continueURI}?${queryString.stringify({
client_id: 'phoenix',
prompt: 'none',
redirect_uri: this.redirectUri,
response_mode: 'query',
response_type: 'code',
scope: 'openid profile email',
})}`;
const authorizeResponse = http.get(authorizeUri, {
redirects: 0,
});
const code = get(queryString.parseUrl(authorizeResponse.headers.Location), 'query.code');
if (authorizeResponse.status != 302 || !code) {
fail(continueURI);
}
return code;
}
private getToken(code: string): types.Token {
const tokenResponse = http.post(this.tokenUrl, {
client_id: 'phoenix',
code,
redirect_uri: this.redirectUri,
grant_type: 'authorization_code',
});
const token = {
accessToken: get(tokenResponse.json(), 'access_token'),
tokenType: get(tokenResponse.json(), 'token_type'),
idToken: get(tokenResponse.json(), 'id_token'),
expiresIn: get(tokenResponse.json(), 'expires_in'),
};
if (
tokenResponse.status != 200 ||
!token.accessToken ||
!token.tokenType ||
!token.idToken ||
!token.expiresIn
) {
fail(this.tokenUrl);
}
return token;
}
}

View File

@@ -1,16 +1,29 @@
import * as types from './types';
export const host = {
name: __ENV.OC_HOST_NAME || 'localhost:9200'
export class ENV {
public static readonly HOST = __ENV.OC_HOST || 'https://localhost:9200';
public static readonly LOGIN = __ENV.OC_LOGIN;
public static readonly PASSWORD = __ENV.OC_PASSWORD;
public static readonly OIDC_HOST = __ENV.OC_OIDC_HOST || ENV.HOST;
public static readonly OIDC_ENABLED = __ENV.OC_OIDC_ENABLED === 'true' || false;
}
export const accounts: { [key: string]: types.Account; } = {
einstein: {
login: 'einstein',
password: 'relativity',
},
richard: {
login: 'richard',
password: 'superfluidity',
},
}
export class ACCOUNTS {
public static readonly ADMIN = 'admin';
public static readonly EINSTEIN = 'einstein';
public static readonly RICHARD = 'richard';
public static readonly ALL: { [key: string]: types.Account } = {
admin: {
login: 'admin',
password: 'admin',
},
einstein: {
login: 'einstein',
password: 'relativity',
},
richard: {
login: 'richard',
password: 'superfluidity',
},
};
}

View File

@@ -1,3 +1,6 @@
export * as defaults from './defaults'
export * as api from './api'
export * as utils from './utils'
export * as api from './api';
export { default as auth } from './auth';
export * as defaults from './defaults';
export * as playbook from './playbook';
export * as types from './types';
export * as utils from './utils';

View File

@@ -0,0 +1,178 @@
import { check } from 'k6';
import { RefinedResponse, ResponseType } from 'k6/http';
import * as api from '../api';
import * as types from '../types';
import { Play } from './playbook';
export class Upload extends Play {
constructor({ name, metricID = 'default' }: { name?: string; metricID?: string } = {}) {
super({ name: name || `oc_${metricID}_play_dav_upload` });
}
public exec({
credential,
userName,
path,
asset,
tags,
}: {
credential: types.Credential;
path?: string;
userName: string;
asset: types.Asset;
tags?: types.Tags;
}): { response: RefinedResponse<ResponseType>; tags: types.Tags } {
tags = { ...this.tags, ...tags };
const response = api.dav.Upload.exec({ credential: credential, asset, userName, tags, path });
check(
response,
{
'dav upload status is 201': () => response.status === 201,
},
tags,
) || this.metricErrorRate.add(1, tags);
this.metricTrend.add(response.timings.duration, tags);
return { response, tags };
}
}
export class Delete extends Play {
constructor({ name, metricID = 'default' }: { name?: string; metricID?: string } = {}) {
super({ name: name || `oc_${metricID}_play_dav_delete` });
}
public exec({
credential,
userName,
path,
tags,
}: {
credential: types.Credential;
path: string;
userName: string;
tags?: types.Tags;
}): { response: RefinedResponse<ResponseType>; tags: types.Tags } {
tags = { ...this.tags, ...tags };
const response = api.dav.Delete.exec({ credential: credential, userName, tags, path });
check(
response,
{
'dav delete status is 204': () => response.status === 204,
},
tags,
) || this.metricErrorRate.add(1, tags);
this.metricTrend.add(response.timings.duration, tags);
return { response, tags };
}
}
export class Download extends Play {
constructor({ name, metricID = 'default' }: { name?: string; metricID?: string } = {}) {
super({ name: name || `oc_${metricID}_play_dav_download` });
}
public exec({
credential,
userName,
path,
tags,
}: {
credential: types.Credential;
path: string;
userName: string;
tags?: types.Tags;
}): { response: RefinedResponse<ResponseType>; tags: types.Tags } {
tags = { ...this.tags, ...tags };
const response = api.dav.Download.exec({ credential: credential, userName, tags, path });
check(
response,
{
'dav download status is 200': () => response.status === 200,
},
tags,
) || this.metricErrorRate.add(1, tags);
this.metricTrend.add(response.timings.duration, tags);
return { response, tags };
}
}
export class Create extends Play {
constructor({ name, metricID = 'default' }: { name?: string; metricID?: string } = {}) {
super({ name: name || `oc_${metricID}_play_dav_create` });
}
public exec({
credential,
userName,
path,
tags,
}: {
credential: types.Credential;
path: string;
userName: string;
tags?: types.Tags;
}): { response: RefinedResponse<ResponseType>; tags: types.Tags } {
tags = { ...this.tags, ...tags };
const response = api.dav.Create.exec({ credential: credential, userName, tags, path });
check(
response,
{
'dav create status is 201': () => response.status === 201,
},
tags,
) || this.metricErrorRate.add(1, tags);
this.metricTrend.add(response.timings.duration, tags);
return { response, tags };
}
}
export class Propfind extends Play {
constructor({ name, metricID = 'default' }: { name?: string; metricID?: string } = {}) {
super({ name: name || `oc_${metricID}_play_dav_propfind` });
}
public exec({
credential,
userName,
path,
tags,
}: {
credential: types.Credential;
path?: string;
userName: string;
tags?: types.Tags;
}): { response: RefinedResponse<ResponseType>; tags: types.Tags } {
tags = { ...this.tags, ...tags };
const response = api.dav.Propfind.exec({ credential: credential, userName, tags, path });
check(
response,
{
'dav propfind status is 207': () => response.status === 207,
},
tags,
) || this.metricErrorRate.add(1, tags);
this.metricTrend.add(response.timings.duration, tags);
return { response, tags };
}
}

View File

@@ -0,0 +1,2 @@
export * as dav from './dav';
export * as users from './users';

View File

@@ -0,0 +1,19 @@
import { Gauge, Trend } from 'k6/metrics';
export class Play {
public readonly name: string;
public readonly metricTrendName: string;
public readonly metricErrorRateName: string;
public readonly metricTrend: Trend;
public readonly metricErrorRate: Gauge;
protected tags: { [key: string]: string };
constructor({ name }: { name: string }) {
this.name = name;
this.metricTrendName = `${this.name}_trend`;
this.metricErrorRateName = `${this.name}_error_rate`;
this.metricTrend = new Trend(this.metricTrendName, true);
this.metricErrorRate = new Gauge(this.metricErrorRateName);
this.tags = { play: this.name };
}
}

View File

@@ -0,0 +1,74 @@
import { check } from 'k6';
import { RefinedResponse, ResponseType } from 'k6/http';
import * as api from '../api';
import * as types from '../types';
import { Play } from './playbook';
export class Create extends Play {
constructor({ name, metricID = 'default' }: { name?: string; metricID?: string } = {}) {
super({ name: name || `oc_${metricID}_play_users_create` });
}
public exec({
credential,
userName,
password,
email,
tags,
}: {
credential: types.Credential;
userName: string;
password: string;
email: string;
tags?: types.Tags;
}): { response: RefinedResponse<ResponseType>; tags: types.Tags } {
tags = { ...this.tags, ...tags };
const response = api.users.Create.exec({ credential: credential, userName, password, tags, email });
check(
response,
{
'users create status is 200': () => response.status === 200,
},
tags,
) || this.metricErrorRate.add(1, tags);
this.metricTrend.add(response.timings.duration, tags);
return { response, tags };
}
}
export class Delete extends Play {
constructor({ name, metricID = 'default' }: { name?: string; metricID?: string } = {}) {
super({ name: name || `oc_${metricID}_play_users_delete` });
}
public exec({
credential,
userName,
tags,
}: {
credential: types.Credential;
userName: string;
tags?: types.Tags;
}): { response: RefinedResponse<ResponseType>; tags: types.Tags } {
tags = { ...this.tags, ...tags };
const response = api.users.Delete.exec({ credential: credential, userName, tags });
check(
response,
{
'users delete status is 200': () => response.status === 200,
},
tags,
) || this.metricErrorRate.add(1, tags);
this.metricTrend.add(response.timings.duration, tags);
return { response, tags };
}
}

View File

@@ -1,4 +1,28 @@
import { bytes } from 'k6';
export interface Asset {
bytes: bytes;
name: string;
}
export interface Token {
accessToken: string;
tokenType: string;
idToken: string;
expiresIn: number;
}
export interface Account {
login: string
password: string
}
login: string;
password: string;
}
export type Credential = Token | Account;
export interface AuthProvider {
credential: Credential;
}
export type Tags = { [key: string]: string };
export declare type AssetUnit = 'KB' | 'MB' | 'GB';

View File

@@ -1,3 +1,61 @@
export const randomString = (): string => {
return Math.random().toString(36).slice(2)
}
import { bytes } from 'k6';
import { randomBytes as k6_randomBytes } from 'k6/crypto';
import * as defaults from './defaults';
import * as types from './types';
export const randomNumber = ({ min, max }: { min: number; max: number }): number => {
return Math.random() * (max - min) + min;
};
export const randomString = ({ length = 10 }: { length?: number } = {}): string => {
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz';
let str = '';
for (let i = 0; i < length; i++) {
str += chars.charAt(Math.floor(Math.random() * chars.length));
}
return str;
};
export const buildAccount = ({ login = defaults.ACCOUNTS.EINSTEIN }: { login: string }): types.Account => {
if (defaults.ENV.LOGIN && defaults.ENV.PASSWORD) {
return {
login: defaults.ENV.LOGIN,
password: defaults.ENV.PASSWORD,
};
}
return defaults.ACCOUNTS.ALL[login];
};
export const buildAsset = ({
name = 'dummy.zip',
size = 50,
unit = 'KB',
}: {
name?: string;
size?: number;
unit?: types.AssetUnit;
}): types.Asset => {
const gen = {
KB: (s: number): bytes => {
return k6_randomBytes(s * 1024);
},
MB: (s: number): bytes => {
return gen.KB(s * 1024);
},
GB: (s: number): bytes => {
return gen.MB(s * 1024);
},
};
const fileBaseName = name.split('/').reverse()[0];
const fileName = fileBaseName.split('.')[0];
const fileExtension = fileBaseName.split('.').reverse()[0] || 'zip';
return {
name: `${fileName}-${__VU}-${__ITER}-${unit}-${size}-${randomString()}.${fileExtension}`,
bytes: gen[unit](size),
};
};

View File

@@ -1,27 +0,0 @@
import {sleep, check} from 'k6';
import {Options} from "k6/options";
import {defaults, api} from "./lib";
const files = {
'kb_50.jpg': open('./_files/kb_50.jpg', 'b'),
}
export let options: Options = {
insecureSkipTLSVerify: true,
iterations: 100,
vus: 100,
};
export default () => {
const res = api.uploadFile(
defaults.accounts.einstein,
files['kb_50.jpg'],
`kb_50-${__VU}-${__ITER}.jpg`,
);
check(res, {
'status is 204': () => res.status === 204,
});
sleep(1);
};

View File

@@ -0,0 +1,90 @@
import { Options, Threshold } from 'k6/options';
import { times } from 'lodash';
import { playbook, types, utils } from '../../../../../../lib';
interface File {
size: number;
unit: types.AssetUnit;
}
interface Plays {
davUpload: playbook.dav.Upload;
davPropfind: playbook.dav.Propfind;
davCreate: playbook.dav.Create;
davDelete: playbook.dav.Delete;
}
export const options = ({ files, plays }: { files: File[]; plays: Plays }): Options => {
return {
thresholds: files.reduce((acc: { [name: string]: Threshold[] }, c) => {
acc[`${plays.davUpload.metricTrendName}{asset:${c.unit + c.size.toString()}}`] = [];
acc[`${plays.davCreate.metricTrendName}{asset:${c.unit + c.size.toString()}}`] = [];
acc[`${plays.davDelete.metricTrendName}{asset:${c.unit + c.size.toString()}}`] = [];
return acc;
}, {}),
};
};
export default ({
files,
account,
credential,
plays,
}: {
plays: Plays;
files: File[];
account: types.Account;
credential: types.Credential;
}): void => {
const filesUploaded: { id: string; name: string; folder: string }[] = [];
files.forEach((f) => {
const id = f.unit + f.size.toString();
const asset = utils.buildAsset({
name: `${account.login}-dummy.zip`,
unit: f.unit,
size: f.size,
});
const folder = times(utils.randomNumber({ min: 1, max: 10 }), () => utils.randomString())
.reduce((acc: string[], c) => {
acc.push(c);
plays.davCreate.exec({
credential,
path: acc.join('/'),
userName: account.login,
tags: { asset: id },
});
return acc;
}, [])
.join('/');
plays.davUpload.exec({
credential,
asset,
path: folder,
userName: account.login,
tags: { asset: id },
});
filesUploaded.push({ id, name: asset.name, folder });
});
plays.davPropfind.exec({
credential,
userName: account.login,
});
filesUploaded.forEach((f) => {
plays.davDelete.exec({
credential,
userName: account.login,
path: f.folder.split('/')[0],
tags: { asset: f.id },
});
});
};

View File

@@ -0,0 +1,27 @@
import { Options } from 'k6/options';
import { times } from 'lodash';
import { auth, defaults, playbook, types, utils } from '../../../../../../lib';
import { default as propfind, options as propfindOptions } from './deep.lib';
// put 1000 files into nested dirs and run a 'PROPFIND' through API
const files: {
size: number;
unit: types.AssetUnit;
}[] = times(1000, () => ({ size: 1, unit: 'KB' }));
const authFactory = new auth(utils.buildAccount({ login: defaults.ACCOUNTS.EINSTEIN }));
const plays = {
davUpload: new playbook.dav.Upload(),
davCreate: new playbook.dav.Create(),
davPropfind: new playbook.dav.Propfind(),
davDelete: new playbook.dav.Delete(),
};
export const options: Options = {
insecureSkipTLSVerify: true,
iterations: 3,
vus: 1,
...propfindOptions({ plays, files }),
};
export default (): void => propfind({ files, plays, credential: authFactory.credential, account: authFactory.account });

View File

@@ -0,0 +1,71 @@
import { Options, Threshold } from 'k6/options';
import { playbook, types, utils } from '../../../../../../lib';
interface File {
size: number;
unit: types.AssetUnit;
}
interface Plays {
davUpload: playbook.dav.Upload;
davPropfind: playbook.dav.Propfind;
davDelete: playbook.dav.Delete;
}
export const options = ({ files, plays }: { files: File[]; plays: Plays }): Options => {
return {
thresholds: files.reduce((acc: { [name: string]: Threshold[] }, c) => {
acc[`${plays.davUpload.metricTrendName}{asset:${c.unit + c.size.toString()}}`] = [];
acc[`${plays.davDelete.metricTrendName}{asset:${c.unit + c.size.toString()}}`] = [];
return acc;
}, {}),
};
};
export default ({
files,
account,
credential,
plays,
}: {
plays: Plays;
files: File[];
account: types.Account;
credential: types.Credential;
}): void => {
const filesUploaded: { id: string; name: string }[] = [];
files.forEach((f) => {
const id = f.unit + f.size.toString();
const asset = utils.buildAsset({
name: `${account.login}-dummy.zip`,
unit: f.unit,
size: f.size,
});
plays.davUpload.exec({
credential,
asset,
userName: account.login,
tags: { asset: id },
});
filesUploaded.push({ id, name: asset.name });
});
plays.davPropfind.exec({
credential,
userName: account.login,
});
filesUploaded.forEach((f) => {
plays.davDelete.exec({
credential,
userName: account.login,
path: f.name,
tags: { asset: f.id },
});
});
};

View File

@@ -0,0 +1,26 @@
import { Options } from 'k6/options';
import { times } from 'lodash';
import { auth, defaults, playbook, types, utils } from '../../../../../../lib';
import { default as propfind, options as propfindOptions } from './flat.lib';
// put 1000 files into one dir and run a 'PROPFIND' through API
const files: {
size: number;
unit: types.AssetUnit;
}[] = times(1000, () => ({ size: 1, unit: 'KB' }));
const authFactory = new auth(utils.buildAccount({ login: defaults.ACCOUNTS.EINSTEIN }));
const plays = {
davUpload: new playbook.dav.Upload(),
davPropfind: new playbook.dav.Propfind(),
davDelete: new playbook.dav.Delete(),
};
export const options: Options = {
insecureSkipTLSVerify: true,
iterations: 3,
vus: 1,
...propfindOptions({ plays, files }),
};
export default (): void => propfind({ files, plays, credential: authFactory.credential, account: authFactory.account });

View File

@@ -0,0 +1,33 @@
import { Options } from 'k6/options';
import { auth, defaults, playbook, types, utils } from '../../../../../../lib';
import { default as upDownDelete, options as upDownDeleteOptions } from './shared.lib';
// upload, download and delete of many files with several sizes and summary size of 500 MB in one directory
const files: {
size: number;
unit: types.AssetUnit;
}[] = [
{ size: 50, unit: 'KB' },
{ size: 500, unit: 'KB' },
{ size: 5, unit: 'MB' },
{ size: 50, unit: 'MB' },
{ size: 500, unit: 'MB' },
{ size: 1, unit: 'GB' },
];
const authFactory = new auth(utils.buildAccount({ login: defaults.ACCOUNTS.EINSTEIN }));
const plays = {
davUpload: new playbook.dav.Upload(),
davDownload: new playbook.dav.Download(),
davDelete: new playbook.dav.Delete(),
};
export const options: Options = {
insecureSkipTLSVerify: true,
iterations: 3,
vus: 1,
...upDownDeleteOptions({ plays, files }),
};
export default (): void =>
upDownDelete({ files, plays, credential: authFactory.credential, account: authFactory.account });

View File

@@ -0,0 +1,31 @@
import { Options } from 'k6/options';
import { times } from 'lodash';
import { auth, defaults, playbook, types, utils } from '../../../../../../lib';
import { default as upDownDelete, options as upDownDeleteOptions } from './shared.lib';
// upload, download and delete of many files with several sizes and summary size of 500 MB in one directory
const files: {
size: number;
unit: types.AssetUnit;
}[] = [
...times(100, () => ({ size: 500, unit: 'KB' as types.AssetUnit })),
...times(50, () => ({ size: 5, unit: 'MB' as types.AssetUnit })),
...times(10, () => ({ size: 25, unit: 'MB' as types.AssetUnit })),
];
const authFactory = new auth(utils.buildAccount({ login: defaults.ACCOUNTS.EINSTEIN }));
const plays = {
davUpload: new playbook.dav.Upload(),
davDownload: new playbook.dav.Download(),
davDelete: new playbook.dav.Delete(),
};
export const options: Options = {
insecureSkipTLSVerify: true,
iterations: 3,
vus: 1,
...upDownDeleteOptions({ plays, files }),
};
export default (): void =>
upDownDelete({ files, plays, credential: authFactory.credential, account: authFactory.account });

View File

@@ -0,0 +1,76 @@
import { Options, Threshold } from 'k6/options';
import { playbook, types, utils } from '../../../../../../lib';
interface File {
size: number;
unit: types.AssetUnit;
}
interface Plays {
davUpload: playbook.dav.Upload;
davDownload: playbook.dav.Download;
davDelete: playbook.dav.Delete;
}
export const options = ({ files, plays }: { files: File[]; plays: Plays }): Options => {
return {
thresholds: files.reduce((acc: { [name: string]: Threshold[] }, c) => {
acc[`${plays.davUpload.metricTrendName}{asset:${c.unit + c.size.toString()}}`] = [];
acc[`${plays.davDownload.metricTrendName}{asset:${c.unit + c.size.toString()}}`] = [];
acc[`${plays.davDelete.metricTrendName}{asset:${c.unit + c.size.toString()}}`] = [];
return acc;
}, {}),
};
};
export default ({
files,
account,
credential,
plays,
}: {
plays: Plays;
files: File[];
account: types.Account;
credential: types.Credential;
}): void => {
const filesUploaded: { id: string; name: string }[] = [];
files.forEach((f) => {
const id = f.unit + f.size.toString();
const asset = utils.buildAsset({
name: `${account.login}-dummy.zip`,
unit: f.unit,
size: f.size,
});
plays.davUpload.exec({
credential,
asset,
userName: account.login,
tags: { asset: id },
});
filesUploaded.push({ id, name: asset.name });
});
filesUploaded.forEach((f) => {
plays.davDownload.exec({
credential,
userName: account.login,
path: f.name,
tags: { asset: f.id },
});
});
filesUploaded.forEach((f) => {
plays.davDelete.exec({
credential,
userName: account.login,
path: f.name,
tags: { asset: f.id },
});
});
};

View File

@@ -0,0 +1,44 @@
import { Options } from 'k6/options';
import { times } from 'lodash';
import { auth, defaults, playbook, types, utils } from '../../../../../../lib';
import { default as upDownDelete, options as upDownDeleteOptions } from './shared.lib';
// create 10 users. Do the Simple Uploads & downloads with each user in parallel.
const files: {
size: number;
unit: types.AssetUnit;
}[] = times(10, () => ({ size: 1, unit: 'KB' as types.AssetUnit }));
const authFactory = new auth(utils.buildAccount({ login: defaults.ACCOUNTS.ADMIN }));
const plays = {
davUpload: new playbook.dav.Upload(),
davDownload: new playbook.dav.Download(),
davDelete: new playbook.dav.Delete(),
usersCreate: new playbook.users.Create(),
usersDelete: new playbook.users.Delete(),
};
export const options: Options = {
insecureSkipTLSVerify: true,
iterations: 10,
vus: 10,
...upDownDeleteOptions({ plays, files }),
};
export default (): void => {
const userName: string = utils.randomString();
const password: string = utils.randomString();
plays.usersCreate.exec({
userName,
password,
email: `${userName}@owncloud.com`,
credential: authFactory.credential,
});
const userAuthFactory = new auth({ login: userName, password });
upDownDelete({ files, plays, credential: userAuthFactory.credential, account: userAuthFactory.account });
plays.usersDelete.exec({ userName: userName, credential: authFactory.credential });
};

File diff suppressed because it is too large Load Diff