Compare commits

..

6 Commits
0.8.2 ... 0.4.2

Author SHA1 Message Date
Oliver
b18f360daf 0.4.2 2021-08-02 08:43:04 +10:00
Oliver
20cc952982 Merge pull request #1887 from matmair/settings-safety
settings fixes

(cherry picked from commit d154ca08ea)
2021-08-02 08:42:34 +10:00
Oliver
cd39fd1dc2 Merge pull request #1890 from matmair/fix-for-1888
catch connection errors in exchange update

(cherry picked from commit db57e9516b)
2021-08-02 08:42:26 +10:00
Oliver
0e59c15773 0.4.1 2021-07-30 11:26:53 +10:00
Oliver
0a73032950 Merge pull request #1877 from eeintech/fix_search_js
Fixed missing comma propagating to translated JS files

(cherry picked from commit 2009773d9d)
2021-07-29 08:27:49 +10:00
Oliver
a7229b5b0b Merge pull request #1874 from SchrodingersGat/docker-dev-fix
Copy static files when starting dev server

(cherry picked from commit 50eb70f538)
2021-07-28 22:50:31 +10:00
921 changed files with 145522 additions and 347331 deletions

9
.coveragerc Normal file
View File

@@ -0,0 +1,9 @@
[run]
source = ./InvenTree
omit =
InvenTree/manage.py
InvenTree/setup.py
InvenTree/InvenTree/middleware.py
InvenTree/InvenTree/utils.py
InvenTree/InvenTree/wsgi.py
InvenTree/users/apps.py

View File

@@ -1,25 +0,0 @@
env:
commonjs: false
browser: true
es2021: true
jquery: true
extends:
- google
parserOptions:
ecmaVersion: 12
rules:
no-var: off
guard-for-in: off
no-trailing-spaces: off
camelcase: off
padded-blocks: off
prefer-const: off
max-len: off
require-jsdoc: off
valid-jsdoc: off
no-multiple-empty-lines: off
comma-dangle: off
prefer-spread: off
indent:
- error
- 4

2
.gitattributes vendored
View File

@@ -8,4 +8,4 @@
*.yaml text
*.conf text
*.sh text eol=lf
*.js text
*.js text

6
.github/CODEOWNERS vendored
View File

@@ -1,6 +0,0 @@
# General owner is the maintainers team
* @SchrodingersGat
# plugins are co-owned
/InvenTree/plugin/ @SchrodingersGat @matmair
/InvenTree/plugins/ @SchrodingersGat @matmair

2
.github/FUNDING.yml vendored
View File

@@ -1,2 +0,0 @@
patreon: inventree
ko_fi: inventree

View File

@@ -1,47 +0,0 @@
---
name: Bug
about: Create a bug report to help us improve InvenTree!
title: "[BUG] Enter bug description"
labels: bug, question
assignees: ''
---
<!---
Everything inside these brackets is hidden - please remove them where you fill out information.
--->
**Describe the bug**
<!---
A clear and concise description of what the bug is.
--->
**Steps to Reproduce**
Steps to reproduce the behavior:
<!---
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
--->
**Expected behavior**
<!---
A clear and concise description of what you expected to happen.
--->
<!---
**Screenshots**
If applicable, add screenshots to help explain your problem.
--->
**Deployment Method**
- [ ] Docker
- [ ] Bare Metal
**Version Information**
<!---
You can get this by going to the "About InvenTree" section in the upper right corner and clicking on to the "copy version information"
--->

View File

@@ -1,26 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[FR]"
labels: enhancement
assignees: ''
---
**Is your feature request the result of a bug?**
Please link it here.
**Problem**
A clear and concise description of what the problem is. e.g. I'm always frustrated when [...]
**Suggested solution**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Examples of other systems**
Show how other software handles your FR if you have examples.
**Do you want to develop this?**
If so please describe briefly how you would like to implement it (so we can give advice) and if you have experience in the needed technology (you do not need to be a pro - this is just as a information for us).

View File

@@ -1,17 +0,0 @@
name: 'Migration test'
description: 'Run migration test sequenze'
author: 'inventree'
runs:
using: 'composite'
steps:
- name: Data Import Export
shell: bash
run: |
invoke migrate
invoke import-fixtures
invoke export-records -f data.json
python3 ./InvenTree/manage.py flush --noinput
invoke migrate
invoke import-records -f data.json
invoke import-records -f data.json

View File

@@ -1,90 +0,0 @@
name: 'Setup Enviroment'
description: 'Setup the enviroment for general InvenTree tests'
author: 'inventree'
inputs:
python:
required: false
description: 'Install python.'
default: 'true'
npm:
required: false
description: 'Install npm.'
default: 'false'
install:
required: false
description: 'Install the InvenTree requirements?'
default: 'false'
dev-install:
required: false
description: 'Install the InvenTree development requirements?'
default: 'false'
update:
required: false
description: 'Should a full update cycle be run?'
default: 'false'
apt-dependency:
required: false
description: 'Extra APT package for install.'
pip-dependency:
required: false
description: 'Extra python package for install.'
runs:
using: 'composite'
steps:
- name: Checkout Code
uses: actions/checkout@v2
# Python installs
- name: Set up Python ${{ env.python_version }}
if: ${{ inputs.python == 'true' }}
uses: actions/setup-python@v2
with:
python-version: ${{ env.python_version }}
cache: pip
- name: Install Base Python Dependencies
if: ${{ inputs.python == 'true' }}
shell: bash
run: |
python3 -m pip install -U pip
pip3 install invoke wheel
- name: Install Specific Python Dependencies
if: ${{ inputs.pip-dependency }}
shell: bash
run: pip3 install ${{ inputs.pip-dependency }}
# NPM installs
- name: Install node.js ${{ env.node_version }}
if: ${{ inputs.npm == 'true' }}
uses: actions/setup-node@v2
with:
node-version: ${{ env.node_version }}
cache: 'npm'
- name: Intall npm packages
if: ${{ inputs.npm == 'true' }}
shell: bash
run: npm install
# OS installs
- name: Install OS Dependencies
if: ${{ inputs.apt-dependency }}
shell: bash
run: |
sudo apt-get update
sudo apt-get install ${{ inputs.apt-dependency }}
# Invoke commands
- name: Install dev requirements
if: ${{ inputs.dev-install == 'true' ||inputs.install == 'true' }}
shell: bash
run: pip install -r requirements-dev.txt
- name: Run invoke install
if: ${{ inputs.install == 'true' }}
shell: bash
run: invoke install
- name: Run invoke update
if: ${{ inputs.update == 'true' }}
shell: bash
run: invoke update

31
.github/release.yml vendored
View File

@@ -1,31 +0,0 @@
# .github/release.yml
changelog:
exclude:
labels:
- translation
categories:
- title: Breaking Changes
labels:
- Semver-Major
- breaking
- title: Security Patches
labels:
- security
- title: New Features
labels:
- Semver-Minor
- enhancement
- title: Bug Fixes
labels:
- Semver-Patch
- bug
- title: Devops / Setup Changes
labels:
- docker
- setup
- demo
- CI
- title: Other Changes
labels:
- "*"

View File

@@ -1,37 +0,0 @@
name: Check Translations
on:
push:
branches:
- l10
pull_request:
branches:
- l10
jobs:
check:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_NAME: './test_db.sqlite'
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install gettext
pip3 install invoke
invoke install
- name: Test Translations
run: invoke translate
- name: Check Migration Files
run: python3 ci/check_migration_files.py

60
.github/workflows/coverage.yaml vendored Normal file
View File

@@ -0,0 +1,60 @@
# Perform CI checks, and calculate code coverage
name: SQLite
on:
push:
branches-ignore:
- l10*
pull_request:
branches-ignore:
- l10*
jobs:
# Run tests on SQLite database
# These tests are used for code coverage analysis
coverage:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_NAME: './test_db.sqlite'
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install gettext
pip3 install invoke
invoke install
invoke static
- name: Coverage Tests
run: |
invoke coverage
- name: Data Import Export
run: |
invoke migrate
invoke import-fixtures
invoke export-records -f data.json
rm test_db.sqlite
invoke migrate
invoke import-records -f data.json
invoke import-records -f data.json
- name: Test Translations
run: invoke translate
- name: Check Migration Files
run: python3 ci/check_migration_files.py
- name: Upload Coverage Report
run: coveralls

View File

@@ -1,113 +0,0 @@
# Build, test and push InvenTree docker image
# This workflow runs under any of the following conditions:
#
# - Push to the master branch
# - Publish release
#
# The following actions are performed:
#
# - Check that the version number matches the current branch or tag
# - Build the InvenTree docker image
# - Run suite of unit tests against the build image
# - Push the compiled, tested image to dockerhub
name: Docker
on:
release:
types: [published]
push:
branches:
- 'master'
jobs:
# Build the docker image
build:
runs-on: ubuntu-latest
permissions:
id-token: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Check out repo
uses: actions/checkout@v2
- name: Version Check
run: |
pip install requests
python3 ci/version_check.py
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
- name: Build Docker Image
# Build the development docker image (using docker-compose.yml)
run: |
docker-compose build
- name: Run Unit Tests
run: |
docker-compose run inventree-dev-server invoke update
docker-compose run inventree-dev-server invoke setup-dev
docker-compose up -d
docker-compose run inventree-dev-server invoke wait
docker-compose run inventree-dev-server invoke test
docker-compose down
- name: Check Data Directory
# The following file structure should have been created by the docker image
run: |
test -d data
test -d data/env
test -d data/pgdb
test -d data/media
test -d data/static
test -d data/plugins
test -f data/config.yaml
test -f data/plugins.txt
test -f data/secret_key.txt
- name: Set up QEMU
if: github.event_name != 'pull_request'
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
if: github.event_name != 'pull_request'
uses: docker/setup-buildx-action@v1
- name: Set up cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@48866aa521d8bf870604709cd43ec2f602d03ff2
- name: Login to Dockerhub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract Docker metadata
if: github.event_name != 'pull_request'
id: meta
uses: docker/metadata-action@69f6fc9d46f2f8bf0d5491e4aabe0bb8c6a4678a
with:
images: |
inventree/inventree
- name: Build and Push
id: build-and-push
if: github.event_name != 'pull_request'
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
target: production
tags: ${{ env.docker_tags }}
build-args: |
commit_hash=${{ env.git_commit_hash }}
commit_date=${{ env.git_commit_date }}
- name: Sign the published image
if: github.event_name != 'pull_request'
env:
COSIGN_EXPERIMENTAL: "true"
run: cosign sign ${{ steps.meta.outputs.tags }}@${{ steps.build-and-push.outputs.digest }}
- name: Push to Stable Branch
uses: ad-m/github-push-action@master
if: env.stable_release == 'true' && github.event_name != 'pull_request'
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: stable
force: true

37
.github/workflows/docker_build.yaml vendored Normal file
View File

@@ -0,0 +1,37 @@
# Build and push latest docker image on push to master branch
name: Docker Build
on:
push:
branches:
- 'master'
jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Dockerhub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and Push
uses: docker/build-push-action@v2
with:
context: ./docker
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
target: production
repository: inventree/inventree
tags: inventree/inventree:latest
- name: Image Digest
run: echo ${{ steps.docker_build.outputs.digest }}

33
.github/workflows/docker_publish.yaml vendored Normal file
View File

@@ -0,0 +1,33 @@
# Publish docker images to dockerhub
name: Docker Publish
on:
release:
types: [published]
jobs:
publish_image:
name: Push InvenTree web server image to dockerhub
runs-on: ubuntu-latest
steps:
- name: Check out repo
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Dockerhub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and Push
uses: docker/build-push-action@v2
with:
context: ./docker
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
target: production
repository: inventree/inventree
tags: inventree/inventree:${{ github.event.release.tag_name }}

28
.github/workflows/javascript.yaml vendored Normal file
View File

@@ -0,0 +1,28 @@
# Check javascript template files
name: Javascript Templates
on:
push:
branches:
- master
pull_request:
branches-ignore:
- l10*
jobs:
javascript:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Check Files
run: |
cd ci
python check_js_templates.py

67
.github/workflows/mysql.yaml vendored Normal file
View File

@@ -0,0 +1,67 @@
# MySQL Unit Testing
name: MySQL
on:
push:
branches-ignore:
- l10*
pull_request:
branches-ignore:
- l10*
jobs:
test:
runs-on: ubuntu-latest
env:
# Database backend configuration
INVENTREE_DB_ENGINE: django.db.backends.mysql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_USER: root
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: '127.0.0.1'
INVENTREE_DB_PORT: 3306
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
services:
mysql:
image: mysql:latest
env:
MYSQL_ALLOW_EMPTY_PASSWORD: yes
MYSQL_DATABASE: inventree
MYSQL_USER: inventree
MYSQL_PASSWORD: password
MYSQL_ROOT_PASSWORD: password
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
ports:
- 3306:3306
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Dependencies
run: |
sudo apt-get install mysql-server libmysqlclient-dev
pip3 install invoke
pip3 install mysqlclient
invoke install
- name: Run Tests
run: invoke test
- name: Data Import Export
run: |
invoke migrate
python3 ./InvenTree/manage.py flush --noinput
invoke import-fixtures
invoke export-records -f data.json
python3 ./InvenTree/manage.py flush --noinput
invoke import-records -f data.json
invoke import-records -f data.json

63
.github/workflows/postgresql.yaml vendored Normal file
View File

@@ -0,0 +1,63 @@
# PostgreSQL Unit Testing
name: PostgreSQL
on:
push:
branches-ignore:
- l10*
pull_request:
branches-ignore:
- l10*
jobs:
test:
runs-on: ubuntu-latest
env:
# Database backend configuration
INVENTREE_DB_ENGINE: django.db.backends.postgresql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_USER: inventree
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: '127.0.0.1'
INVENTREE_DB_PORT: 5432
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
services:
postgres:
image: postgres
env:
POSTGRES_USER: inventree
POSTGRES_PASSWORD: password
ports:
- 5432:5432
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Dependencies
run: |
sudo apt-get install libpq-dev
pip3 install invoke
pip3 install psycopg2
invoke install
- name: Run Tests
run: invoke test
- name: Data Import Export
run: |
invoke migrate
python3 ./InvenTree/manage.py flush --noinput
invoke import-fixtures
invoke export-records -f data.json
python3 ./InvenTree/manage.py flush --noinput
invoke import-records -f data.json
invoke import-records -f data.json

49
.github/workflows/python.yaml vendored Normal file
View File

@@ -0,0 +1,49 @@
# Run python library tests whenever code is pushed to master
name: Python Bindings
on:
push:
branches:
- master
pull_request:
branches-ignore:
- l10*
jobs:
python:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_NAME: './test_db.sqlite'
INVENTREE_DB_ENGINE: 'sqlite3'
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Install InvenTree
run: |
sudo apt-get update
sudo apt-get install python3-dev python3-pip python3-venv
pip3 install invoke
invoke install
invoke migrate
- name: Download Python Code
run: |
git clone --depth 1 https://github.com/inventree/inventree-python ./inventree-python
- name: Start Server
run: |
invoke import-records -f ./inventree-python/test/test_data.json
invoke server -a 127.0.0.1:8000 &
sleep 60
- name: Run Tests
run: |
cd inventree-python
invoke test

View File

@@ -1,272 +0,0 @@
# Checks for each PR / push
name: QC checks
on:
push:
branches-ignore:
- l10*
pull_request:
branches-ignore:
- l10*
env:
python_version: 3.9
node_version: 16
# The OS version must be set per job
server_start_sleep: 60
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_DB_NAME: inventree
INVENTREE_MEDIA_ROOT: ../test_inventree_media
INVENTREE_STATIC_ROOT: ../test_inventree_static
jobs:
pep_style:
name: Style [Python]
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v1
- name: Enviroment Setup
uses: ./.github/actions/setup
with:
dev-install: true
- name: Run flake8
run: flake8 InvenTree --extend-ignore=D
javascript:
name: Style [JS]
runs-on: ubuntu-20.04
needs: pep_style
steps:
- uses: actions/checkout@v1
- name: Enviroment Setup
uses: ./.github/actions/setup
with:
npm: true
install: true
- name: Check Templated JS Files
run: |
cd ci
python3 check_js_templates.py
- name: Lint Javascript Files
run: |
python InvenTree/manage.py prerender
npx eslint InvenTree/InvenTree/static_i18n/i18n/*.js
html:
name: Style [HTML]
runs-on: ubuntu-20.04
needs: pep_style
steps:
- uses: actions/checkout@v1
- name: Enviroment Setup
uses: ./.github/actions/setup
with:
npm: true
install: true
- name: Check HTML Files
run: npx markuplint **/templates/*.html
pre-commit:
name: Style [pre-commit]
runs-on: ubuntu-20.04
needs: pep_style
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ env.python_version }}
uses: actions/setup-python@v2
with:
python-version: ${{ env.python_version }}
cache: 'pip'
- name: Run pre-commit Checks
uses: pre-commit/action@v2.0.3
- name: Check Version
run: |
pip install requests
python3 ci/version_check.py
python:
name: Tests - inventree-python
runs-on: ubuntu-20.04
needs: pre-commit
env:
wrapper_name: inventree-python
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DB_NAME: ../inventree_unit_test_db.sqlite3
INVENTREE_ADMIN_USER: testuser
INVENTREE_ADMIN_PASSWORD: testpassword
INVENTREE_ADMIN_EMAIL: test@test.com
INVENTREE_PYTHON_TEST_SERVER: http://localhost:12345
INVENTREE_PYTHON_TEST_USERNAME: testuser
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
steps:
- uses: actions/checkout@v1
- name: Enviroment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils
dev-install: true
update: true
- name: Download Python Code For `${{ env.wrapper_name }}`
run: git clone --depth 1 https://github.com/inventree/${{ env.wrapper_name }} ./${{ env.wrapper_name }}
- name: Start InvenTree Server
run: |
invoke delete-data -f
invoke import-fixtures
invoke server -a 127.0.0.1:12345 &
invoke wait
- name: Run Tests For `${{ env.wrapper_name }}`
run: |
cd ${{ env.wrapper_name }}
invoke check-server
coverage run -m unittest discover -s test/
docstyle:
name: Style [Python Docstrings]
runs-on: ubuntu-20.04
needs: pre-commit
continue-on-error: true
steps:
- uses: actions/checkout@v1
- name: Enviroment Setup
uses: ./.github/actions/setup
with:
install: true
- name: Run flake8
run: flake8 InvenTree --statistics
coverage:
name: Tests - DB [SQLite] + Coverage
runs-on: ubuntu-20.04
needs: ['javascript', 'html', 'pre-commit']
continue-on-error: true # continue if a step fails so that coverage gets pushed
env:
INVENTREE_DB_NAME: ./inventree.sqlite
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_PLUGINS_ENABLED: true
steps:
- uses: actions/checkout@v1
- name: Enviroment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils
dev-install: true
update: true
- name: Coverage Tests
run: invoke coverage
- name: Data Export Test
uses: ./.github/actions/migration
- name: Test Translations
run: invoke translate
- name: Check Migration Files
run: python3 ci/check_migration_files.py
- name: Upload Coverage Report
run: coveralls
postgres:
name: Tests - DB [PostgreSQL]
runs-on: ubuntu-20.04
needs: ['javascript', 'html', 'pre-commit']
if: github.event_name == 'push'
env:
INVENTREE_DB_ENGINE: django.db.backends.postgresql
INVENTREE_DB_USER: inventree
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: '127.0.0.1'
INVENTREE_DB_PORT: 5432
INVENTREE_DEBUG: info
INVENTREE_CACHE_HOST: localhost
INVENTREE_PLUGINS_ENABLED: true
services:
postgres:
image: postgres
env:
POSTGRES_USER: inventree
POSTGRES_PASSWORD: password
ports:
- 5432:5432
redis:
image: redis
ports:
- 6379:6379
steps:
- uses: actions/checkout@v1
- name: Enviroment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils libpq-dev
pip-dependency: psycopg2 django-redis>=5.0.0
dev-install: true
update: true
- name: Run Tests
run: invoke test
- name: Data Export Test
uses: ./.github/actions/migration
mysql:
name: Tests - DB [MySQL]
runs-on: ubuntu-20.04
needs: ['javascript', 'html', 'pre-commit']
if: github.event_name == 'push'
env:
# Database backend configuration
INVENTREE_DB_ENGINE: django.db.backends.mysql
INVENTREE_DB_USER: root
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: '127.0.0.1'
INVENTREE_DB_PORT: 3306
INVENTREE_DEBUG: info
INVENTREE_PLUGINS_ENABLED: true
services:
mysql:
image: mysql:latest
env:
MYSQL_ALLOW_EMPTY_PASSWORD: yes
MYSQL_DATABASE: ${{ env.INVENTREE_DB_NAME }}
MYSQL_USER: inventree
MYSQL_PASSWORD: password
MYSQL_ROOT_PASSWORD: password
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
ports:
- 3306:3306
steps:
- uses: actions/checkout@v1
- name: Enviroment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils libmysqlclient-dev
pip-dependency: mysqlclient
dev-install: true
update: true
- name: Run Tests
run: invoke test
- name: Data Export Test
uses: ./.github/actions/migration

View File

@@ -1,32 +0,0 @@
# Runs on releases
name: Publish release notes
on:
release:
types: [published]
jobs:
tweet:
runs-on: ubuntu-latest
steps:
- uses: Eomm/why-don-t-you-tweet@v1
with:
tweet-message: "InvenTree release ${{ github.event.release.tag_name }} is out now! Release notes: ${{ github.event.release.html_url }} #opensource #inventree"
env:
TWITTER_CONSUMER_API_KEY: ${{ secrets.TWITTER_CONSUMER_API_KEY }}
TWITTER_CONSUMER_API_SECRET: ${{ secrets.TWITTER_CONSUMER_API_SECRET }}
TWITTER_ACCESS_TOKEN: ${{ secrets.TWITTER_ACCESS_TOKEN }}
TWITTER_ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}
reddit:
runs-on: ubuntu-latest
steps:
- uses: bluwy/release-for-reddit-action@v1
with:
username: ${{ secrets.REDDIT_USERNAME }}
password: ${{ secrets.REDDIT_PASSWORD }}
app-id: ${{ secrets.REDDIT_APP_ID }}
app-secret: ${{ secrets.REDDIT_APP_SECRET }}
subreddit: InvenTree
title: "InvenTree version ${{ github.event.release.tag_name }} released"
comment: "${{ github.event.release.body }}"

View File

@@ -1,25 +0,0 @@
# Marks all issues that do not receive activity stale starting 2022
name: Mark stale issues and pull requests
on:
schedule:
- cron: '24 11 * * *'
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v3
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: 'This issue seems stale. Please react to show this is still important.'
stale-pr-message: 'This PR seems stale. Please react to show this is still important.'
stale-issue-label: 'inactive'
stale-pr-label: 'inactive'
start-date: '2022-01-01'
exempt-all-milestones: true

34
.github/workflows/style.yaml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: Style Checks
on:
push:
branches-ignore:
- l10*
pull_request:
branches-ignore:
- l10*
jobs:
style:
runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
python-version: [3.7]
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install deps
run: |
pip install flake8==3.8.3
pip install pep8-naming==0.11.1
- name: flake8
run: |
flake8 InvenTree

View File

@@ -19,31 +19,42 @@ jobs:
INVENTREE_STATIC_ROOT: ./static
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v1
with:
python-version: 3.9
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install -y gettext
pip3 install invoke
invoke install
- name: Make Translations
run: |
invoke translate
- name: Commit files
run: |
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git checkout -b l10_local
git add "*.po"
git commit -m "updated translation base"
- name: Push changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: l10
force: true
- uses: actions/checkout@v2
- name: Get Current Translations
run: |
git fetch
git checkout origin/l10 -- `git ls-tree origin/l10 -r --name-only | grep ".po"`
git reset
- name: Set up Python 3.7
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install -y gettext
pip3 install invoke
invoke install
- name: Make Translations
run: |
invoke translate
- name: stash changes
run: |
git stash
- name: Checkout Translation Branch
uses: actions/checkout@v2.3.4
with:
ref: l10
- name: Commit files
run: |
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git checkout stash -- .
git reset
git add "*.po"
git commit -m "updated translation base"
- name: Push changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: l10

View File

@@ -1,22 +0,0 @@
name: Update dependency files regularly
on:
workflow_dispatch:
schedule:
- cron: "0 0 * * *"
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup
run: pip install -r requirements-dev.txt
- name: Update requirements.txt
run: pip-compile --output-file=requirements.txt requirements.in -U
- name: Update requirements-dev.txt
run: pip-compile --generate-hashes --output-file=requirements-dev.txt requirements-dev.in -U
- uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: "[Bot] Updated dependency"
branch: dep-update

View File

@@ -1,25 +0,0 @@
# welcome new contributers
name: Welcome
on:
pull_request:
types: [opened]
issues:
types: [opened]
jobs:
run:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: actions/first-interaction@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
issue-message: |
Welcome to InvenTree! Please check the [contributing docs](https://inventree.readthedocs.io/en/latest/contribute/) on how to help.
If you experience setup / install issues please read all [install docs]( https://inventree.readthedocs.io/en/latest/start/intro/).
pr-message: |
This is your first PR, welcome!
Please check [Contributing](https://github.com/inventree/InvenTree/blob/master/CONTRIBUTING.md) to make sure your submission fits our general code-style and workflow.
Make sure to document why this PR is needed and to link connected issues so we can review it faster.

31
.gitignore vendored
View File

@@ -8,7 +8,6 @@ __pycache__/
env/
inventree-env/
./build/
.cache/
develop-eggs/
dist/
bin/
@@ -27,6 +26,7 @@ var/
.installed.cfg
*.egg
# Django stuff:
*.log
local_settings.py
@@ -36,12 +36,7 @@ local_settings.py
*.old
# Files used for testing
inventree-demo-dataset/
inventree-data/
dummy_image.*
_tmp.csv
inventree/label.pdf
inventree/label.png
# Sphinx files
docs/_build
@@ -53,7 +48,6 @@ static_i18n
# Local config file
config.yaml
plugins.txt
# Default data file
data.json
@@ -67,31 +61,10 @@ secret_key.txt
.idea/
*.code-workspace
.vscode/
.bash_history
# Coverage reports
.coverage
htmlcov/
# Temporary javascript files (used for testing)
js_tmp/
# Development files
dev/
data/
env/
# Locale stats file
locale_stats.json
# node.js
node_modules/
# maintenance locker
maintenance_mode_state.txt
# plugin dev directory
plugins/
# Compiled translation files
*.mo
dev/

View File

@@ -1,41 +0,0 @@
tasks:
- name: Setup django
before: |
export INVENTREE_DB_ENGINE='sqlite3'
export INVENTREE_DB_NAME='/workspace/InvenTree/dev/database.sqlite3'
export INVENTREE_MEDIA_ROOT='/workspace/InvenTree/inventree-data/media'
export INVENTREE_STATIC_ROOT='/workspace/InvenTree/dev/static'
export PIP_USER='no'
sudo apt install gettext
python3 -m venv venv
source venv/bin/activate
pip install invoke
mkdir dev
inv setup-test
gp sync-done start_server
- name: Start server
init: gp sync-await start_server
command: |
gp sync-await start_server
export INVENTREE_DB_ENGINE='sqlite3'
export INVENTREE_DB_NAME='/workspace/InvenTree/dev/database.sqlite3'
export INVENTREE_MEDIA_ROOT='/workspace/InvenTree/inventree-data/media'
export INVENTREE_STATIC_ROOT='/workspace/InvenTree/dev/static'
source venv/bin/activate
inv server
ports:
- port: 8000
onOpen: open-preview
github:
prebuilds:
master: true
pullRequests: false
pullRequestsFromForks: true
addBadge: true
addLabel: gitpod-ready
addCheck: false

View File

@@ -1,41 +0,0 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
exclude: |
(?x)^(
InvenTree/InvenTree/static/.*|
InvenTree/locale/.*
)$
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: mixed-line-ending
- repo: https://github.com/pycqa/flake8
rev: '4.0.1'
hooks:
- id: flake8
additional_dependencies: [
'flake8-bugbear',
'flake8-docstrings',
'flake8-string-format',
'pep8-naming ',
]
- repo: https://github.com/pycqa/isort
rev: '5.10.1'
hooks:
- id: isort
- repo: https://github.com/jazzband/pip-tools
rev: 6.6.2
hooks:
- id: pip-compile
name: pip-compile requirements-dev.in
args: [--generate-hashes, requirements-dev.in, -o, requirements-dev.txt]
files: ^requirements-dev\.(in|txt)$
- id: pip-compile
name: pip-compile requirements.txt
args: [requirements.in, -o, requirements.txt]
files: ^requirements\.(in|txt)$

View File

@@ -1,200 +1,29 @@
Hi there, thank you for your intrest in contributing!
Please read the contribution guidelines below, before submitting your first pull request to the InvenTree codebase.
Contributions to InvenTree are welcomed - please follow the guidelines below.
## Quickstart
## Feature Branches
The following commands will get you quickly configure and run a development server, complete with a demo dataset to work with:
No pushing to master! New featues must be submitted in a separate branch (one branch per feature).
### Bare Metal
```bash
git clone https://github.com/inventree/InvenTree.git && cd InvenTree
python3 -m venv env && source env/bin/activate
pip install invoke && invoke
pip install invoke && invoke setup-dev --tests
```
### Docker
```bash
git clone https://github.com/inventree/InvenTree.git && cd InvenTree
docker compose run inventree-dev-server invoke install
docker compose run inventree-dev-server invoke setup-test
docker compose up -d
```
Read the [InvenTree setup documentation](https://inventree.readthedocs.io/en/latest/start/intro/) for a complete installation reference guide.
### Setup Devtools
Run the following command to set up all toolsets for development.
```bash
invoke setup-dev
```
*We recommend you run this command before starting to contribute. This will install and set up `pre-commit` to run some checks before each commit and help reduce the style errors.*
## Branches and Versioning
InvenTree roughly follow the [GitLab flow](https://docs.gitlab.com/ee/topics/gitlab_flow.html) branching style, to allow simple management of multiple tagged releases, short-lived branches, and development on the main branch.
### Version Numbering
InvenTree version numbering follows the [semantic versioning](https://semver.org/) specification.
### Master Branch
The HEAD of the "main" or "master" branch of InvenTree represents the current "latest" state of code development.
- All feature branches are merged into master
- All bug fixes are merged into master
**No pushing to master:** New featues must be submitted as a pull request from a separate branch (one branch per feature).
### Feature Branches
Feature branches should be branched *from* the *master* branch.
- One major feature per branch / pull request
- Feature pull requests are merged back *into* the master branch
- Features *may* also be merged into a release candidate branch
### Stable Branch
The HEAD of the "stable" branch represents the latest stable release code.
- Versioned releases are merged into the "stable" branch
- Bug fix branches are made *from* the "stable" branch
#### Release Candidate Branches
- Release candidate branches are made from master, and merged into stable.
- RC branches are targetted at a major/minor version e.g. "0.5"
- When a release candidate branch is merged into *stable*, the release is tagged
#### Bugfix Branches
- If a bug is discovered in a tagged release version of InvenTree, a "bugfix" or "hotfix" branch should be made *from* that tagged release
- When approved, the branch is merged back *into* stable, with an incremented PATCH number (e.g. 0.4.1 -> 0.4.2)
- The bugfix *must* also be cherry picked into the *master* branch.
## Environment
### Target version
We are currently targeting:
| Name | Minimum version |
|---|---|
| Python | 3.9 |
| Django | 3.2 |
### Auto creating updates
The following tools can be used to auto-upgrade syntax that was depreciated in new versions:
```bash
pip install pyupgrade
pip install django-upgrade
```
To update the codebase run the following script.
```bash
pyupgrade `find . -name "*.py"`
django-upgrade --target-version 3.2 `find . -name "*.py"`
```
## Credits
If you add any new dependencies / libraries, they need to be added to [the docs](https://github.com/inventree/inventree-docs/blob/master/docs/credits.md). Please try to do that as timely as possible.
## Migration Files
## Include Migration Files
Any required migration files **must** be included in the commit, or the pull-request will be rejected. If you change the underlying database schema, make sure you run `invoke migrate` and commit the migration files before submitting the PR.
*Note: A github action checks for unstaged migration files and will reject the PR if it finds any!*
## Update Translation Files
## Unit Testing
Any PRs which update translatable strings (i.e. text strings that will appear in the web-front UI) must also update the translation (locale) files to include hooks for the translated strings.
Any new code should be covered by unit tests - a submitted PR may not be accepted if the code coverage for any new features is insufficient, or the overall code coverage is decreased.
*This does not mean that all translations must be provided, but that the translation files must include locations for the translated strings to be written.*
The InvenTree code base makes use of [GitHub actions](https://github.com/features/actions) to run a suite of automated tests against the code base every time a new pull request is received. These actions include (but are not limited to):
To perform this step, simply run `invoke translate` from the top level directory before submitting the PR.
- Checking Python and Javascript code against standard style guides
- Running unit test suite
- Automated building and pushing of docker images
- Generating translation files
## Testing
The various github actions can be found in the `./github/workflows` directory
## Code Style
Sumbitted Python code is automatically checked against PEP style guidelines. Locally you can run `invoke style` to ensure the style checks will pass, before submitting the PR.
Please write docstrings for each function and class - we follow the [google doc-style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings) for python. Docstrings for general javascript code is encouraged! Docstyles are checked by `invoke style`.
Any new code should be covered by unit tests - a submitted PR may not be accepted if the code coverage is decreased.
## Documentation
New features or updates to existing features should be accompanied by user documentation. A PR with associated documentation should link to the matching PR at https://github.com/inventree/inventree-docs/
## Translations
## Code Style
Any user-facing strings *must* be passed through the translation engine.
- InvenTree code is written in English
- User translatable strings are provided in English as the primary language
- Secondary language translations are provided [via Crowdin](https://crowdin.com/project/inventree)
*Note: Translation files are updated via GitHub actions - you do not need to compile translations files before submitting a pull request!*
### Python Code
For strings exposed via Python code, use the following format:
```python
from django.utils.translation import ugettext_lazy as _
user_facing_string = _('This string will be exposed to the translation engine!')
```
### Templated Strings
HTML and javascript files are passed through the django templating engine. Translatable strings are implemented as follows:
```html
{% load i18n %}
<span>{% trans "This string will be translated" %} - this string will not!</span>
```
## Github use
### Tags
The tags describe issues and PRs in multiple areas:
| Area | Name | Description |
|---|---|---|
| Type Labels | | |
| | breaking | Indicates a major update or change which breaks compatibility |
| | bug | Identifies a bug which needs to be addressed |
| | dependency | Relates to a project dependency |
| | duplicate | Duplicate of another issue or PR |
| | enhancement | This is an suggested enhancement or new feature |
| | help wanted | Assistance required |
| | invalid | This issue or PR is considered invalid |
| | inactive | Indicates lack of activity |
| | question | This is a question |
| | roadmap | This is a roadmap feature with no immediate plans for implementation |
| | security | Relates to a security issue |
| | starter | Good issue for a developer new to the project |
| | wontfix | No work will be done against this issue or PR |
| Feature Labels | | |
| | API | Relates to the API |
| | barcode | Barcode scanning and integration |
| | build | Build orders |
| | importer | Data importing and processing |
| | order | Purchase order and sales orders |
| | part | Parts |
| | plugin | Plugin ecosystem |
| | pricing | Pricing functionality |
| | report | Report generation |
| | stock | Stock item management |
| | user interface | User interface |
| Ecosystem Labels | | |
| | demo | Relates to the InvenTree demo server or dataset |
| | docker | Docker / docker-compose |
| | CI | CI / unit testing ecosystem |
| | setup | Relates to the InvenTree setup / installation process |
Sumbitted Python code is automatically checked against PEP style guidelines. Locally you can run `invoke style` to ensure the style checks will pass, before submitting the PR.

View File

@@ -1,140 +0,0 @@
# The InvenTree dockerfile provides two build targets:
#
# production:
# - Required files are copied into the image
# - Runs InvenTree web server under gunicorn
#
# dev:
# - Expects source directories to be loaded as a run-time volume
# - Runs InvenTree web server under django development server
# - Monitors source files for any changes, and live-reloads server
FROM python:3.9-slim as base
# Build arguments for this image
ARG commit_hash=""
ARG commit_date=""
ARG commit_tag=""
ENV PYTHONUNBUFFERED 1
# Ref: https://github.com/pyca/cryptography/issues/5776
ENV CRYPTOGRAPHY_DONT_BUILD_RUST 1
ENV INVENTREE_LOG_LEVEL="WARNING"
ENV INVENTREE_DOCKER="true"
# InvenTree paths
ENV INVENTREE_HOME="/home/inventree"
ENV INVENTREE_MNG_DIR="${INVENTREE_HOME}/InvenTree"
ENV INVENTREE_DATA_DIR="${INVENTREE_HOME}/data"
ENV INVENTREE_STATIC_ROOT="${INVENTREE_DATA_DIR}/static"
ENV INVENTREE_MEDIA_ROOT="${INVENTREE_DATA_DIR}/media"
ENV INVENTREE_PLUGIN_DIR="${INVENTREE_DATA_DIR}/plugins"
# InvenTree configuration files
ENV INVENTREE_CONFIG_FILE="${INVENTREE_DATA_DIR}/config.yaml"
ENV INVENTREE_SECRET_KEY_FILE="${INVENTREE_DATA_DIR}/secret_key.txt"
ENV INVENTREE_PLUGIN_FILE="${INVENTREE_DATA_DIR}/plugins.txt"
# Worker configuration (can be altered by user)
ENV INVENTREE_GUNICORN_WORKERS="4"
ENV INVENTREE_BACKGROUND_WORKERS="4"
# Default web server address:port
ENV INVENTREE_WEB_ADDR=0.0.0.0
ENV INVENTREE_WEB_PORT=8000
LABEL org.label-schema.schema-version="1.0" \
org.label-schema.build-date=${DATE} \
org.label-schema.vendor="inventree" \
org.label-schema.name="inventree/inventree" \
org.label-schema.url="https://hub.docker.com/r/inventree/inventree" \
org.label-schema.vcs-url="https://github.com/inventree/InvenTree.git" \
org.label-schema.vcs-ref=${commit_tag}
# RUN apt-get upgrade && apt-get update
RUN apt-get update
# Install required system packages
RUN apt-get install -y --no-install-recommends \
git gcc g++ gettext gnupg libffi-dev \
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#debian-11
poppler-utils libpango-1.0-0 libpangoft2-1.0-0 \
# Image format support
libjpeg-dev webp \
# SQLite support
sqlite3 \
# PostgreSQL support
libpq-dev \
# MySQL / MariaDB support
default-libmysqlclient-dev mariadb-client && \
apt-get autoclean && apt-get autoremove
# Update pip
RUN pip install --upgrade pip
# Install required base-level python packages
COPY ./docker/requirements.txt base_requirements.txt
RUN pip install --disable-pip-version-check -U -r base_requirements.txt
# InvenTree production image:
# - Copies required files from local directory
# - Installs required python packages from requirements.txt
# - Starts a gunicorn webserver
FROM base as production
ENV INVENTREE_DEBUG=False
# As .git directory is not available in production image, we pass the commit information via ENV
ENV INVENTREE_COMMIT_HASH="${commit_hash}"
ENV INVENTREE_COMMIT_DATE="${commit_date}"
# Copy source code
COPY InvenTree ${INVENTREE_HOME}/InvenTree
# Copy other key files
COPY requirements.txt ${INVENTREE_HOME}/requirements.txt
COPY tasks.py ${INVENTREE_HOME}/tasks.py
COPY docker/gunicorn.conf.py ${INVENTREE_HOME}/gunicorn.conf.py
COPY docker/init.sh ${INVENTREE_MNG_DIR}/init.sh
# Need to be running from within this directory
WORKDIR ${INVENTREE_MNG_DIR}
# Drop to the inventree user for the production image
#RUN adduser inventree
#RUN chown -R inventree:inventree ${INVENTREE_HOME}
#USER inventree
# Install InvenTree packages
RUN pip3 install --user --disable-pip-version-check -r ${INVENTREE_HOME}/requirements.txt
# Server init entrypoint
ENTRYPOINT ["/bin/bash", "./init.sh"]
# Launch the production server
# TODO: Work out why environment variables cannot be interpolated in this command
# TODO: e.g. -b ${INVENTREE_WEB_ADDR}:${INVENTREE_WEB_PORT} fails here
CMD gunicorn -c ./gunicorn.conf.py InvenTree.wsgi -b 0.0.0.0:8000 --chdir ./InvenTree
FROM base as dev
# The development image requires the source code to be mounted to /home/inventree/
# So from here, we don't actually "do" anything, apart from some file management
ENV INVENTREE_DEBUG=True
# Location for python virtual environment
# If the INVENTREE_PY_ENV variable is set, the entrypoint script will use it!
ENV INVENTREE_PY_ENV="${INVENTREE_DATA_DIR}/env"
WORKDIR ${INVENTREE_HOME}
# Entrypoint ensures that we are running in the python virtual environment
ENTRYPOINT ["/bin/bash", "./docker/init.sh"]
# Launch the development server
CMD ["invoke", "server", "-a", "${INVENTREE_WEB_ADDR}:${INVENTREE_WEB_PORT}"]

View File

@@ -1,4 +1,5 @@
"""The InvenTree module provides high-level management and functionality.
"""
The InvenTree module provides high-level management and functionality.
It provides a number of helper functions and generic classes which are used by InvenTree apps.
"""

View File

@@ -1,33 +0,0 @@
"""Admin classes"""
from import_export.resources import ModelResource
class InvenTreeResource(ModelResource):
"""Custom subclass of the ModelResource class provided by django-import-export"
Ensures that exported data are escaped to prevent malicious formula injection.
Ref: https://owasp.org/www-community/attacks/CSV_Injection
"""
def export_resource(self, obj):
"""Custom function to override default row export behaviour.
Specifically, strip illegal leading characters to prevent formula injection
"""
row = super().export_resource(obj)
illegal_start_vals = ['@', '=', '+', '-', '@', '\t', '\r', '\n']
for idx, val in enumerate(row):
if type(val) is str:
val = val.strip()
# If the value starts with certain 'suspicious' values, remove it!
while len(val) > 0 and val[0] in illegal_start_vals:
# Remove the first character
val = val[1:]
row[idx] = val
return row

View File

@@ -1,53 +1,65 @@
"""Main JSON interface views."""
"""
Main JSON interface views
"""
from django.conf import settings
from django.db import transaction
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from django.utils.translation import ugettext_lazy as _
from django.http import JsonResponse
from django.utils.translation import gettext_lazy as _
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters, permissions
from rest_framework import filters
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.serializers import ValidationError
from rest_framework.views import APIView
from InvenTree.mixins import ListCreateAPI
from InvenTree.permissions import RolePermission
from .status import is_worker_running
from .version import (inventreeApiVersion, inventreeInstanceName,
inventreeVersion)
from .views import AjaxView
from .version import inventreeVersion, inventreeApiVersion, inventreeInstanceName
from .status import is_worker_running
from plugins import plugins as inventree_plugins
logger = logging.getLogger("inventree")
logger.info("Loading action plugins...")
action_plugins = inventree_plugins.load_action_plugins()
class InfoView(AjaxView):
"""Simple JSON endpoint for InvenTree information.
""" Simple JSON endpoint for InvenTree information.
Use to confirm that the server is running, etc.
"""
permission_classes = [permissions.AllowAny]
def get(self, request, *args, **kwargs):
"""Serve current server information."""
data = {
'server': 'InvenTree',
'version': inventreeVersion(),
'instance': inventreeInstanceName(),
'apiVersion': inventreeApiVersion(),
'worker_running': is_worker_running(),
'plugins_enabled': settings.PLUGINS_ENABLED,
}
return JsonResponse(data)
class NotFoundView(AjaxView):
"""Simple JSON view when accessing an invalid API view."""
"""
Simple JSON view when accessing an invalid API view.
"""
permission_classes = [permissions.AllowAny]
def get(self, request, *args, **kwargs):
"""Proces an `not found` event on the API."""
data = {
'details': _('API endpoint not found'),
'url': request.build_absolute_uri(),
@@ -56,137 +68,13 @@ class NotFoundView(AjaxView):
return JsonResponse(data, status=404)
class BulkDeleteMixin:
"""Mixin class for enabling 'bulk delete' operations for various models.
Bulk delete allows for multiple items to be deleted in a single API query,
rather than using multiple API calls to the various detail endpoints.
This is implemented for two major reasons:
- Atomicity (guaranteed that either *all* items are deleted, or *none*)
- Speed (single API call and DB query)
"""
def filter_delete_queryset(self, queryset, request):
"""Provide custom filtering for the queryset *before* it is deleted"""
return queryset
def delete(self, request, *args, **kwargs):
"""Perform a DELETE operation against this list endpoint.
We expect a list of primary-key (ID) values to be supplied as a JSON object, e.g.
{
items: [4, 8, 15, 16, 23, 42]
}
"""
model = self.serializer_class.Meta.model
# Extract the items from the request body
try:
items = request.data.getlist('items', None)
except AttributeError:
items = request.data.get('items', None)
# Extract the filters from the request body
try:
filters = request.data.getlist('filters', None)
except AttributeError:
filters = request.data.get('filters', None)
if not items and not filters:
raise ValidationError({
"non_field_errors": ["List of items or filters must be provided for bulk deletion"],
})
if items and type(items) is not list:
raise ValidationError({
"items": ["'items' must be supplied as a list object"]
})
if filters and type(filters) is not dict:
raise ValidationError({
"filters": ["'filters' must be supplied as a dict object"]
})
# Keep track of how many items we deleted
n_deleted = 0
with transaction.atomic():
# Start with *all* models and perform basic filtering
queryset = model.objects.all()
queryset = self.filter_delete_queryset(queryset, request)
# Filter by provided item ID values
if items:
queryset = queryset.filter(id__in=items)
# Filter by provided filters
if filters:
queryset = queryset.filter(**filters)
n_deleted = queryset.count()
queryset.delete()
return Response(
{
'success': f"Deleted {n_deleted} items",
},
status=204
)
class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
"""Custom API endpoint which provides BulkDelete functionality in addition to List and Create"""
...
class APIDownloadMixin:
"""Mixin for enabling a LIST endpoint to be downloaded a file.
To download the data, add the ?export=<fmt> to the query string.
The implementing class must provided a download_queryset method,
e.g.
def download_queryset(self, queryset, export_format):
dataset = StockItemResource().export(queryset=queryset)
filedata = dataset.export(export_format)
filename = 'InvenTree_Stocktake_{date}.{fmt}'.format(
date=datetime.now().strftime("%d-%b-%Y"),
fmt=export_format
)
return DownloadFile(filedata, filename)
"""
def get(self, request, *args, **kwargs):
"""Generic handler for a download request."""
export_format = request.query_params.get('export', None)
if export_format and export_format in ['csv', 'tsv', 'xls', 'xlsx']:
queryset = self.filter_queryset(self.get_queryset())
return self.download_queryset(queryset, export_format)
else:
# Default to the parent class implementation
return super().get(request, *args, **kwargs)
def download_queryset(self, queryset, export_format):
"""This function must be implemented to provide a downloadFile request."""
raise NotImplementedError("download_queryset method not implemented!")
class AttachmentMixin:
"""Mixin for creating attachment objects, and ensuring the user information is saved correctly."""
"""
Mixin for creating attachment objects,
and ensuring the user information is saved correctly.
"""
permission_classes = [
permissions.IsAuthenticated,
RolePermission,
]
permission_classes = [permissions.IsAuthenticated]
filter_backends = [
DjangoFilterBackend,
@@ -195,7 +83,44 @@ class AttachmentMixin:
]
def perform_create(self, serializer):
"""Save the user information when a file is uploaded."""
""" Save the user information when a file is uploaded """
attachment = serializer.save()
attachment.user = self.request.user
attachment.save()
class ActionPluginView(APIView):
"""
Endpoint for running custom action plugins.
"""
permission_classes = [
permissions.IsAuthenticated,
]
def post(self, request, *args, **kwargs):
action = request.data.get('action', None)
data = request.data.get('data', None)
if action is None:
return Response({
'error': _("No action specified")
})
for plugin_class in action_plugins:
if plugin_class.action_name() == action:
plugin = plugin_class(request.user, data=data)
plugin.perform_action()
return Response(plugin.get_response())
# If we got to here, no matching action was found
return Response({
'error': _("No matching action found"),
"action": action,
})

View File

@@ -1,20 +1,15 @@
"""Helper functions for performing API unit tests."""
import csv
import io
import re
"""
Helper functions for performing API unit tests
"""
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.http.response import StreamingHttpResponse
from rest_framework.test import APITestCase
class UserMixin:
"""Mixin to setup a user and login for tests.
Use parameters to set username, password, email, roles and permissions.
class InvenTreeAPITestCase(APITestCase):
"""
Base class for running InvenTree API tests
"""
# User information
@@ -30,7 +25,7 @@ class UserMixin:
roles = []
def setUp(self):
"""Setup for all tests."""
super().setUp()
# Create a user to log in with
@@ -51,50 +46,44 @@ class UserMixin:
self.user.is_staff = True
self.user.save()
# Assign all roles if set
if self.roles == 'all':
self.assignRole(assign_all=True)
# else filter the roles
else:
for role in self.roles:
self.assignRole(role)
for role in self.roles:
self.assignRole(role)
if self.auto_login:
self.client.login(username=self.username, password=self.password)
def assignRole(self, role=None, assign_all: bool = False):
"""Set the user roles for the registered user."""
def assignRole(self, role):
"""
Set the user roles for the registered user
"""
# role is of the format 'rule.permission' e.g. 'part.add'
if not assign_all and role:
rule, perm = role.split('.')
rule, perm = role.split('.')
for ruleset in self.group.rule_sets.all():
if assign_all or ruleset.name == rule:
if ruleset.name == rule:
if assign_all or perm == 'view':
if perm == 'view':
ruleset.can_view = True
elif assign_all or perm == 'change':
elif perm == 'change':
ruleset.can_change = True
elif assign_all or perm == 'delete':
elif perm == 'delete':
ruleset.can_delete = True
elif assign_all or perm == 'add':
elif perm == 'add':
ruleset.can_add = True
ruleset.save()
break
class InvenTreeAPITestCase(UserMixin, APITestCase):
"""Base class for running InvenTree API tests."""
def getActions(self, url):
"""Return a dict of the 'actions' available at a given endpoint.
"""
Return a dict of the 'actions' available at a given endpoint.
Makes use of the HTTP 'OPTIONS' method to request this.
"""
response = self.client.options(url)
self.assertEqual(response.status_code, 200)
@@ -105,164 +94,50 @@ class InvenTreeAPITestCase(UserMixin, APITestCase):
return actions
def get(self, url, data=None, expected_code=200):
"""Issue a GET request."""
# Set default - see B006
if data is None:
data = {}
def get(self, url, data={}, expected_code=200):
"""
Issue a GET request
"""
response = self.client.get(url, data, format='json')
if expected_code is not None:
if response.status_code != expected_code:
print(f"Unexpected response at '{url}':")
print(response.data)
self.assertEqual(response.status_code, expected_code)
return response
def post(self, url, data=None, expected_code=None, format='json'):
"""Issue a POST request."""
response = self.client.post(url, data=data, format=format)
def post(self, url, data, expected_code=None):
"""
Issue a POST request
"""
if data is None:
data = {}
if expected_code is not None:
if response.status_code != expected_code:
print(f"Unexpected response at '{url}': status code = {response.status_code}")
if hasattr(response, 'data'):
print(response.data)
else:
print(f"(response object {type(response)} has no 'data' attribute")
self.assertEqual(response.status_code, expected_code)
return response
def delete(self, url, data=None, expected_code=None, format='json'):
"""Issue a DELETE request."""
if data is None:
data = {}
response = self.client.delete(url, data=data, format=format)
response = self.client.post(url, data=data, format='json')
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def patch(self, url, data, expected_code=None, format='json'):
"""Issue a PATCH request."""
response = self.client.patch(url, data=data, format=format)
def delete(self, url, expected_code=None):
"""
Issue a DELETE request
"""
response = self.client.delete(url)
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def put(self, url, data, expected_code=None, format='json'):
"""Issue a PUT request."""
response = self.client.put(url, data=data, format=format)
def patch(self, url, data, files=None, expected_code=None):
"""
Issue a PATCH request
"""
if expected_code is not None:
if response.status_code != expected_code:
print(f"Unexpected response at '{url}':")
print(response.data)
self.assertEqual(response.status_code, expected_code)
return response
def options(self, url, expected_code=None):
"""Issue an OPTIONS request."""
response = self.client.options(url, format='json')
response = self.client.patch(url, data=data, files=files, format='json')
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def download_file(self, url, data, expected_code=None, expected_fn=None, decode=True):
"""Download a file from the server, and return an in-memory file."""
response = self.client.get(url, data=data, format='json')
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
# Check that the response is of the correct type
if not isinstance(response, StreamingHttpResponse):
raise ValueError("Response is not a StreamingHttpResponse object as expected")
# Extract filename
disposition = response.headers['Content-Disposition']
result = re.search(r'attachment; filename="([\w.]+)"', disposition)
fn = result.groups()[0]
if expected_fn is not None:
self.assertEqual(expected_fn, fn)
if decode:
# Decode data and return as StringIO file object
fo = io.StringIO()
fo.name = fo
fo.write(response.getvalue().decode('UTF-8'))
else:
# Return a a BytesIO file object
fo = io.BytesIO()
fo.name = fn
fo.write(response.getvalue())
fo.seek(0)
return fo
def process_csv(self, fo, delimiter=',', required_cols=None, excluded_cols=None, required_rows=None):
"""Helper function to process and validate a downloaded csv file."""
# Check that the correct object type has been passed
self.assertTrue(isinstance(fo, io.StringIO))
fo.seek(0)
reader = csv.reader(fo, delimiter=delimiter)
headers = []
rows = []
for idx, row in enumerate(reader):
if idx == 0:
headers = row
else:
rows.append(row)
if required_cols is not None:
for col in required_cols:
self.assertIn(col, headers)
if excluded_cols is not None:
for col in excluded_cols:
self.assertNotIn(col, headers)
if required_rows is not None:
self.assertEqual(len(rows), required_rows)
# Return the file data as a list of dict items, based on the headers
data = []
for row in rows:
entry = {}
for idx, col in enumerate(headers):
entry[col] = row[idx]
data.append(entry)
return data

View File

@@ -1,256 +0,0 @@
"""InvenTree API version information."""
# InvenTree API version
INVENTREE_API_VERSION = 69
"""
Increment this API version number whenever there is a significant change to the API that any clients need to know about
v69 -> 2022-08-01 : https://github.com/inventree/InvenTree/pull/3443
- Updates the PartCategory list API:
- Improve query efficiency: O(n) becomes O(1)
- Rename 'parts' field to 'part_count'
- Updates the StockLocation list API:
- Improve query efficiency: O(n) becomes O(1)
v68 -> 2022-07-27 : https://github.com/inventree/InvenTree/pull/3417
- Allows SupplierPart list to be filtered by SKU value
- Allows SupplierPart list to be filtered by MPN value
v67 -> 2022-07-25 : https://github.com/inventree/InvenTree/pull/3395
- Adds a 'requirements' endpoint for Part instance
- Provides information on outstanding order requirements for a given part
v66 -> 2022-07-24 : https://github.com/inventree/InvenTree/pull/3393
- Part images can now be downloaded from a remote URL via the API
- Company images can now be downloaded from a remote URL via the API
v65 -> 2022-07-15 : https://github.com/inventree/InvenTree/pull/3335
- Annotates 'in_stock' quantity to the SupplierPart API
v64 -> 2022-07-08 : https://github.com/inventree/InvenTree/pull/3310
- Annotate 'on_order' quantity to BOM list API
- Allow BOM List API endpoint to be filtered by "on_order" parameter
v63 -> 2022-07-06 : https://github.com/inventree/InvenTree/pull/3301
- Allow BOM List API endpoint to be filtered by "available_stock" paramater
v62 -> 2022-07-05 : https://github.com/inventree/InvenTree/pull/3296
- Allows search on BOM List API endpoint
- Allows ordering on BOM List API endpoint
v61 -> 2022-06-12 : https://github.com/inventree/InvenTree/pull/3183
- Migrate the "Convert Stock Item" form class to use the API
- There is now an API endpoint for converting a stock item to a valid variant
v60 -> 2022-06-08 : https://github.com/inventree/InvenTree/pull/3148
- Add availability data fields to the SupplierPart model
v59 -> 2022-06-07 : https://github.com/inventree/InvenTree/pull/3154
- Adds further improvements to BulkDelete mixin class
- Fixes multiple bugs in custom OPTIONS metadata implementation
- Adds 'bulk delete' for Notifications
v58 -> 2022-06-06 : https://github.com/inventree/InvenTree/pull/3146
- Adds a BulkDelete API mixin class for fast, safe deletion of multiple objects with a single API request
v57 -> 2022-06-05 : https://github.com/inventree/InvenTree/pull/3130
- Transfer PartCategoryTemplateParameter actions to the API
v56 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3123
- Expose the PartParameterTemplate model to use the API
v55 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3120
- Converts the 'StockItemReturn' functionality to make use of the API
v54 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3117
- Adds 'available_stock' annotation on the SalesOrderLineItem API
- Adds (well, fixes) 'overdue' annotation on the SalesOrderLineItem API
v53 -> 2022-06-01 : https://github.com/inventree/InvenTree/pull/3110
- Adds extra search fields to the BuildOrder list API endpoint
v52 -> 2022-05-31 : https://github.com/inventree/InvenTree/pull/3103
- Allow part list API to be searched by supplier SKU
v51 -> 2022-05-24 : https://github.com/inventree/InvenTree/pull/3058
- Adds new fields to the SalesOrderShipment model
v50 -> 2022-05-18 : https://github.com/inventree/InvenTree/pull/2912
- Implement Attachments for manufacturer parts
v49 -> 2022-05-09 : https://github.com/inventree/InvenTree/pull/2957
- Allows filtering of plugin list by 'active' status
- Allows filtering of plugin list by 'mixin' support
- Adds endpoint to "identify" or "locate" stock items and locations (using plugins)
v48 -> 2022-05-12 : https://github.com/inventree/InvenTree/pull/2977
- Adds "export to file" functionality for PurchaseOrder API endpoint
- Adds "export to file" functionality for SalesOrder API endpoint
- Adds "export to file" functionality for BuildOrder API endpoint
v47 -> 2022-05-10 : https://github.com/inventree/InvenTree/pull/2964
- Fixes barcode API error response when scanning a StockItem which does not exist
- Fixes barcode API error response when scanning a StockLocation which does not exist
v46 -> 2022-05-09
- Fixes read permissions on settings API
- Allows non-staff users to read global settings via the API
v45 -> 2022-05-08 : https://github.com/inventree/InvenTree/pull/2944
- Settings are now accessed via the API using their unique key, not their PK
- This allows the settings to be accessed without prior knowledge of the PK
v44 -> 2022-05-04 : https://github.com/inventree/InvenTree/pull/2931
- Converting more server-side rendered forms to the API
- Exposes more core functionality to API endpoints
v43 -> 2022-04-26 : https://github.com/inventree/InvenTree/pull/2875
- Adds API detail endpoint for PartSalePrice model
- Adds API detail endpoint for PartInternalPrice model
v42 -> 2022-04-26 : https://github.com/inventree/InvenTree/pull/2833
- Adds variant stock information to the Part and BomItem serializers
v41 -> 2022-04-26
- Fixes 'variant_of' filter for Part list endpoint
v40 -> 2022-04-19
- Adds ability to filter StockItem list by "tracked" parameter
- This checks the serial number or batch code fields
v39 -> 2022-04-18
- Adds ability to filter StockItem list by "has_batch" parameter
v38 -> 2022-04-14 : https://github.com/inventree/InvenTree/pull/2828
- Adds the ability to include stock test results for "installed items"
v37 -> 2022-04-07 : https://github.com/inventree/InvenTree/pull/2806
- Adds extra stock availability information to the BomItem serializer
v36 -> 2022-04-03
- Adds ability to filter part list endpoint by unallocated_stock argument
v35 -> 2022-04-01 : https://github.com/inventree/InvenTree/pull/2797
- Adds stock allocation information to the Part API
- Adds calculated field for "unallocated_quantity"
v34 -> 2022-03-25
- Change permissions for "plugin list" API endpoint (now allows any authenticated user)
v33 -> 2022-03-24
- Adds "plugins_enabled" information to root API endpoint
v32 -> 2022-03-19
- Adds "parameters" detail to Part API endpoint (use &parameters=true)
- Adds ability to filter PartParameterTemplate API by Part instance
- Adds ability to filter PartParameterTemplate API by PartCategory instance
v31 -> 2022-03-14
- Adds "updated" field to SupplierPriceBreakList and SupplierPriceBreakDetail API endpoints
v30 -> 2022-03-09
- Adds "exclude_location" field to BuildAutoAllocation API endpoint
- Allows BuildItem API endpoint to be filtered by BomItem relation
v29 -> 2022-03-08
- Adds "scheduling" endpoint for predicted stock scheduling information
v28 -> 2022-03-04
- Adds an API endpoint for auto allocation of stock items against a build order
- Ref: https://github.com/inventree/InvenTree/pull/2713
v27 -> 2022-02-28
- Adds target_date field to individual line items for purchase orders and sales orders
v26 -> 2022-02-17
- Adds API endpoint for uploading a BOM file and extracting data
v25 -> 2022-02-17
- Adds ability to filter "part" list endpoint by "in_bom_for" argument
v24 -> 2022-02-10
- Adds API endpoint for deleting (cancelling) build order outputs
v23 -> 2022-02-02
- Adds API endpoints for managing plugin classes
- Adds API endpoints for managing plugin settings
v22 -> 2021-12-20
- Adds API endpoint to "merge" multiple stock items
v21 -> 2021-12-04
- Adds support for multiple "Shipments" against a SalesOrder
- Refactors process for stock allocation against a SalesOrder
v20 -> 2021-12-03
- Adds ability to filter POLineItem endpoint by "base_part"
- Adds optional "order_detail" to POLineItem list endpoint
v19 -> 2021-12-02
- Adds the ability to filter the StockItem API by "part_tree"
- Returns only stock items which match a particular part.tree_id field
v18 -> 2021-11-15
- Adds the ability to filter BomItem API by "uses" field
- This returns a list of all BomItems which "use" the specified part
- Includes inherited BomItem objects
v17 -> 2021-11-09
- Adds API endpoints for GLOBAL and USER settings objects
- Ref: https://github.com/inventree/InvenTree/pull/2275
v16 -> 2021-10-17
- Adds API endpoint for completing build order outputs
v15 -> 2021-10-06
- Adds detail endpoint for SalesOrderAllocation model
- Allows use of the API forms interface for adjusting SalesOrderAllocation objects
v14 -> 2021-10-05
- Stock adjustment actions API is improved, using native DRF serializer support
- However adjustment actions now only support 'pk' as a lookup field
v13 -> 2021-10-05
- Adds API endpoint to allocate stock items against a BuildOrder
- Updates StockItem API with improved filtering against BomItem data
v12 -> 2021-09-07
- Adds API endpoint to receive stock items against a PurchaseOrder
v11 -> 2021-08-26
- Adds "units" field to PartBriefSerializer
- This allows units to be introspected from the "part_detail" field in the StockItem serializer
v10 -> 2021-08-23
- Adds "purchase_price_currency" to StockItem serializer
- Adds "purchase_price_string" to StockItem serializer
- Purchase price is now writable for StockItem serializer
v9 -> 2021-08-09
- Adds "price_string" to part pricing serializers
v8 -> 2021-07-19
- Refactors the API interface for SupplierPart and ManufacturerPart models
- ManufacturerPart objects can no longer be created via the SupplierPart API endpoint
v7 -> 2021-07-03
- Introduced the concept of "API forms" in https://github.com/inventree/InvenTree/pull/1716
- API OPTIONS endpoints provide comprehensive field metedata
- Multiple new API endpoints added for database models
v6 -> 2021-06-23
- Part and Company images can now be directly uploaded via the REST API
v5 -> 2021-06-21
- Adds API interface for manufacturer part parameters
v4 -> 2021-06-01
- BOM items can now accept "variant stock" to be assigned against them
- Many slight API tweaks were needed to get this to work properly!
v3 -> 2021-05-22:
- The updated StockItem "history tracking" now uses a different interface
"""

View File

@@ -1,135 +1,72 @@
"""AppConfig for inventree app."""
# -*- coding: utf-8 -*-
import logging
from django.apps import AppConfig
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.exceptions import AppRegistryNotReady
from django.db import transaction
from django.db.utils import IntegrityError
from InvenTree.ready import isInTestMode, canAppAccessDatabase
import InvenTree.tasks
from InvenTree.ready import canAppAccessDatabase, isInTestMode
from .config import get_setting
logger = logging.getLogger("inventree")
class InvenTreeConfig(AppConfig):
"""AppConfig for inventree app."""
name = 'InvenTree'
def ready(self):
"""Setup background tasks and update exchange rates."""
if canAppAccessDatabase():
self.remove_obsolete_tasks()
self.start_background_tasks()
if not isInTestMode(): # pragma: no cover
if not isInTestMode():
self.update_exchange_rates()
self.collect_notification_methods()
if canAppAccessDatabase() or settings.TESTING_ENV:
self.add_user_on_startup()
def remove_obsolete_tasks(self):
"""Delete any obsolete scheduled tasks in the database."""
obsolete = [
'InvenTree.tasks.delete_expired_sessions',
'stock.tasks.delete_old_stock_items',
]
try:
from django_q.models import Schedule
except AppRegistryNotReady: # pragma: no cover
return
# Remove any existing obsolete tasks
Schedule.objects.filter(func__in=obsolete).delete()
def start_background_tasks(self):
"""Start all background tests for InvenTree."""
try:
from django_q.models import Schedule
except AppRegistryNotReady: # pragma: no cover
logger.warning("Cannot start background tasks - app registry not ready")
except (AppRegistryNotReady):
return
logger.info("Starting background tasks...")
# Remove successful task results from the database
InvenTree.tasks.schedule_task(
'InvenTree.tasks.delete_successful_tasks',
schedule_type=Schedule.DAILY,
)
# Check for InvenTree updates
InvenTree.tasks.schedule_task(
'InvenTree.tasks.check_for_updates',
schedule_type=Schedule.DAILY
)
# Heartbeat to let the server know the background worker is running
InvenTree.tasks.schedule_task(
'InvenTree.tasks.heartbeat',
schedule_type=Schedule.MINUTES,
minutes=15
)
# Keep exchange rates up to date
InvenTree.tasks.schedule_task(
'InvenTree.tasks.update_exchange_rates',
schedule_type=Schedule.DAILY,
)
# Delete old error messages
InvenTree.tasks.schedule_task(
'InvenTree.tasks.delete_old_error_logs',
schedule_type=Schedule.DAILY,
)
def update_exchange_rates(self):
"""
Update exchange rates each time the server is started, *if*:
# Delete old notification records
InvenTree.tasks.schedule_task(
'common.tasks.delete_old_notifications',
schedule_type=Schedule.DAILY,
)
# Check for overdue purchase orders
InvenTree.tasks.schedule_task(
'order.tasks.check_overdue_purchase_orders',
schedule_type=Schedule.DAILY
)
# Check for overdue sales orders
InvenTree.tasks.schedule_task(
'order.tasks.check_overdue_sales_orders',
schedule_type=Schedule.DAILY,
)
# Check for overdue build orders
InvenTree.tasks.schedule_task(
'build.tasks.check_overdue_build_orders',
schedule_type=Schedule.DAILY
)
def update_exchange_rates(self): # pragma: no cover
"""Update exchange rates each time the server is started.
Only runs *if*:
a) Have not been updated recently (one day or less)
b) The base exchange rate has been altered
"""
try:
from djmoney.contrib.exchange.models import ExchangeBackend
from common.settings import currency_code_default
from datetime import datetime, timedelta
from InvenTree.tasks import update_exchange_rates
except AppRegistryNotReady: # pragma: no cover
from common.settings import currency_code_default
except AppRegistryNotReady:
pass
base_currency = currency_code_default()
@@ -141,76 +78,28 @@ class InvenTreeConfig(AppConfig):
last_update = backend.last_update
if last_update is None:
if last_update is not None:
delta = datetime.now().date() - last_update.date()
if delta > timedelta(days=1):
print(f"Last update was {last_update}")
update = True
else:
# Never been updated
logger.info("Exchange backend has never been updated")
print("Exchange backend has never been updated")
update = True
# Backend currency has changed?
if base_currency != backend.base_currency:
logger.info(f"Base currency changed from {backend.base_currency} to {base_currency}")
if not base_currency == backend.base_currency:
print(f"Base currency changed from {backend.base_currency} to {base_currency}")
update = True
except (ExchangeBackend.DoesNotExist):
logger.info("Exchange backend not found - updating")
print("Exchange backend not found - updating")
update = True
except Exception:
except:
# Some other error - potentially the tables are not ready yet
return
if update:
try:
update_exchange_rates()
except Exception as e:
logger.error(f"Error updating exchange rates: {e}")
def add_user_on_startup(self):
"""Add a user on startup."""
# stop if checks were already created
if hasattr(settings, 'USER_ADDED') and settings.USER_ADDED:
return
# get values
add_user = get_setting('INVENTREE_ADMIN_USER', 'admin_user')
add_email = get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email')
add_password = get_setting('INVENTREE_ADMIN_PASSWORD', 'admin_password')
# check if all values are present
set_variables = 0
for tested_var in [add_user, add_email, add_password]:
if tested_var:
set_variables += 1
# no variable set -> do not try anything
if set_variables == 0:
settings.USER_ADDED = True
return
# not all needed variables set
if set_variables < 3:
logger.warn('Not all required settings for adding a user on startup are present:\nINVENTREE_ADMIN_USER, INVENTREE_ADMIN_EMAIL, INVENTREE_ADMIN_PASSWORD')
settings.USER_ADDED = True
return
# good to go -> create user
user = get_user_model()
try:
with transaction.atomic():
if user.objects.filter(username=add_user).exists():
logger.info(f"User {add_user} already exists - skipping creation")
else:
new_user = user.objects.create_superuser(add_user, add_email, add_password)
logger.info(f'User {str(new_user)} was created!')
except IntegrityError as _e:
logger.warning(f'The user "{add_user}" could not be created due to the following error:\n{str(_e)}')
# do not try again
settings.USER_ADDED = True
def collect_notification_methods(self):
"""Collect all notification methods."""
from common.notifications import storage
storage.collect()
update_exchange_rates()

View File

@@ -1,84 +0,0 @@
"""Pull rendered copies of the templated.
Only used for testing the js files! - This file is omited from coverage.
"""
import os # pragma: no cover
import pathlib # pragma: no cover
from InvenTree.helpers import InvenTreeTestCase # pragma: no cover
class RenderJavascriptFiles(InvenTreeTestCase): # pragma: no cover
"""A unit test to "render" javascript files.
The server renders templated javascript files,
we need the fully-rendered files for linting and static tests.
"""
def download_file(self, filename, prefix):
"""Function to `download`(copy) a file to a temporay firectory."""
url = os.path.join(prefix, filename)
response = self.client.get(url)
here = os.path.abspath(os.path.dirname(__file__))
output_dir = os.path.join(
here,
'..',
'..',
'js_tmp',
)
output_dir = os.path.abspath(output_dir)
if not os.path.exists(output_dir):
os.mkdir(output_dir)
output_file = os.path.join(
output_dir,
filename,
)
with open(output_file, 'wb') as output:
output.write(response.content)
def download_files(self, subdir, prefix):
"""Download files in directory."""
here = os.path.abspath(os.path.dirname(__file__))
js_template_dir = os.path.join(
here,
'..',
'templates',
'js',
)
directory = os.path.join(js_template_dir, subdir)
directory = os.path.abspath(directory)
js_files = pathlib.Path(directory).rglob('*.js')
n = 0
for f in js_files:
js = os.path.basename(f)
self.download_file(js, prefix)
n += 1
return n
def test_render_files(self):
"""Look for all javascript files."""
n = 0
print("Rendering javascript files...")
n += self.download_files('translated', '/js/i18n')
n += self.download_files('dynamic', '/js/dynamic')
print(f"Rendered {n} javascript files.")

View File

@@ -1,205 +0,0 @@
"""Helper functions for loading InvenTree configuration options."""
import logging
import os
import random
import shutil
import string
from pathlib import Path
import yaml
logger = logging.getLogger('inventree')
def is_true(x):
"""Shortcut function to determine if a value "looks" like a boolean"""
return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on']
def get_base_dir() -> Path:
"""Returns the base (top-level) InvenTree directory."""
return Path(__file__).parent.parent.resolve()
def get_config_file(create=True) -> Path:
"""Returns the path of the InvenTree configuration file.
Note: It will be created it if does not already exist!
"""
base_dir = get_base_dir()
cfg_filename = os.getenv('INVENTREE_CONFIG_FILE')
if cfg_filename:
cfg_filename = Path(cfg_filename.strip()).resolve()
else:
# Config file is *not* specified - use the default
cfg_filename = base_dir.joinpath('config.yaml').resolve()
if not cfg_filename.exists() and create:
print("InvenTree configuration file 'config.yaml' not found - creating default file")
cfg_template = base_dir.joinpath("config_template.yaml")
shutil.copyfile(cfg_template, cfg_filename)
print(f"Created config file {cfg_filename}")
return cfg_filename
def load_config_data() -> map:
"""Load configuration data from the config file."""
cfg_file = get_config_file()
with open(cfg_file, 'r') as cfg:
data = yaml.safe_load(cfg)
return data
def get_setting(env_var=None, config_key=None, default_value=None):
"""Helper function for retrieving a configuration setting value.
- First preference is to look for the environment variable
- Second preference is to look for the value of the settings file
- Third preference is the default value
Arguments:
env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT'
config_key: Key to lookup in the configuration file
default_value: Value to return if first two options are not provided
"""
# First, try to load from the environment variables
if env_var is not None:
val = os.getenv(env_var, None)
if val is not None:
return val
# Next, try to load from configuration file
if config_key is not None:
cfg_data = load_config_data()
result = None
# Hack to allow 'path traversal' in configuration file
for key in config_key.strip().split('.'):
if type(cfg_data) is not dict or key not in cfg_data:
result = None
break
result = cfg_data[key]
cfg_data = cfg_data[key]
if result is not None:
return result
# Finally, return the default value
return default_value
def get_boolean_setting(env_var=None, config_key=None, default_value=False):
"""Helper function for retreiving a boolean configuration setting"""
return is_true(get_setting(env_var, config_key, default_value))
def get_media_dir(create=True):
"""Return the absolute path for the 'media' directory (where uploaded files are stored)"""
md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root')
if not md:
raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified')
md = Path(md).resolve()
if create:
md.mkdir(parents=True, exist_ok=True)
return md
def get_static_dir(create=True):
"""Return the absolute path for the 'static' directory (where static files are stored)"""
sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root')
if not sd:
raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified')
sd = Path(sd).resolve()
if create:
sd.mkdir(parents=True, exist_ok=True)
return sd
def get_plugin_file():
"""Returns the path of the InvenTree plugins specification file.
Note: It will be created if it does not already exist!
"""
# Check if the plugin.txt file (specifying required plugins) is specified
plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file')
if not plugin_file:
# If not specified, look in the same directory as the configuration file
config_dir = get_config_file().parent
plugin_file = config_dir.joinpath('plugins.txt')
else:
# Make sure we are using a modern Path object
plugin_file = Path(plugin_file)
if not plugin_file.exists():
logger.warning("Plugin configuration file does not exist - creating default file")
logger.info(f"Creating plugin file at '{plugin_file}'")
# If opening the file fails (no write permission, for example), then this will throw an error
plugin_file.write_text("# InvenTree Plugins (uses PIP framework to install)\n\n")
return plugin_file
def get_secret_key():
"""Return the secret key value which will be used by django.
Following options are tested, in descending order of preference:
A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data
B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file
C) Look for default key file "secret_key.txt"
D) Create "secret_key.txt" if it does not exist
"""
# Look for environment variable
if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'):
logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY") # pragma: no cover
return secret_key
# Look for secret key file
if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'):
secret_key_file = Path(secret_key_file).resolve()
else:
# Default location for secret key file
secret_key_file = get_base_dir().joinpath("secret_key.txt").resolve()
if not secret_key_file.exists():
logger.info(f"Generating random key file at '{secret_key_file}'")
# Create a random key file
options = string.digits + string.ascii_letters + string.punctuation
key = ''.join([random.choice(options) for i in range(100)])
secret_key_file.write_text(key)
logger.info(f"Loading SECRET_KEY from '{secret_key_file}'")
key_data = secret_key_file.read_text().strip()
return key_data

View File

@@ -1,23 +1,29 @@
# -*- coding: utf-8 -*-
"""Provides extra global data to all templates."""
"""
Provides extra global data to all templates.
"""
from InvenTree.status_codes import SalesOrderStatus, PurchaseOrderStatus
from InvenTree.status_codes import BuildStatus, StockStatus
from InvenTree.status_codes import StockHistoryCode
import InvenTree.status
from InvenTree.status_codes import (BuildStatus, PurchaseOrderStatus,
SalesOrderStatus, StockHistoryCode,
StockStatus)
from users.models import RuleSet, check_user_role
from users.models import RuleSet
def health_status(request):
"""Provide system health status information to the global context.
"""
Provide system health status information to the global context.
- Not required for AJAX requests
- Do not provide if it is already provided to the context
"""
if request.path.endswith('.js'):
# Do not provide to script requests
return {} # pragma: no cover
return {}
if hasattr(request, '_inventree_health_status'):
# Do not duplicate efforts
@@ -30,14 +36,9 @@ def health_status(request):
'email_configured': InvenTree.status.is_email_configured(),
}
# The following keys are required to denote system health
health_keys = [
'django_q_running',
]
all_healthy = True
for k in health_keys:
for k in status.keys():
if status[k] is not True:
all_healthy = False
@@ -49,7 +50,10 @@ def health_status(request):
def status_codes(request):
"""Provide status code enumerations."""
"""
Provide status code enumerations.
"""
if hasattr(request, '_inventree_status_codes'):
# Do not duplicate efforts
return {}
@@ -67,7 +71,8 @@ def status_codes(request):
def user_roles(request):
"""Return a map of the current roles assigned to the user.
"""
Return a map of the current roles assigned to the user.
Roles are denoted by their simple names, and then the permission type.
@@ -78,18 +83,37 @@ def user_roles(request):
Each value will return a boolean True / False
"""
user = request.user
roles = {
}
for role in RuleSet.RULESET_MODELS.keys():
if user.is_superuser:
for ruleset in RuleSet.RULESET_MODELS.keys():
roles[ruleset] = {
'view': True,
'add': True,
'change': True,
'delete': True,
}
else:
for group in user.groups.all():
for rule in group.rule_sets.all():
permissions = {}
# Ensure the role name is in the dict
if rule.name not in roles:
roles[rule.name] = {
'view': user.is_superuser,
'add': user.is_superuser,
'change': user.is_superuser,
'delete': user.is_superuser
}
for perm in ['view', 'add', 'change', 'delete']:
permissions[perm] = user.is_superuser or check_user_role(user, role, perm)
roles[role] = permissions
# Roles are additive across groups
roles[rule.name]['view'] |= rule.can_view
roles[rule.name]['add'] |= rule.can_add
roles[rule.name]['change'] |= rule.can_change
roles[rule.name]['delete'] |= rule.can_delete
return {'roles': roles}

View File

@@ -1,88 +0,0 @@
"""Custom exception handling for the DRF API."""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
import traceback
from django.conf import settings
from django.core.exceptions import ValidationError as DjangoValidationError
from django.utils.translation import gettext_lazy as _
import rest_framework.views as drfviews
from error_report.models import Error
from rest_framework import serializers
from rest_framework.exceptions import ValidationError as DRFValidationError
from rest_framework.response import Response
def log_error(path):
"""Log an error to the database.
- Uses python exception handling to extract error details
Arguments:
path: The 'path' (most likely a URL) associated with this error (optional)
"""
kind, info, data = sys.exc_info()
# Check if the eror is on the ignore list
if kind in settings.IGNORED_ERRORS:
return
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=path,
)
def exception_handler(exc, context):
"""Custom exception handler for DRF framework.
Ref: https://www.django-rest-framework.org/api-guide/exceptions/#custom-exception-handling
Catches any errors not natively handled by DRF, and re-throws as an error DRF can handle
"""
response = None
# Catch any django validation error, and re-throw a DRF validation error
if isinstance(exc, DjangoValidationError):
exc = DRFValidationError(detail=serializers.as_serializer_error(exc))
# Default to the built-in DRF exception handler
response = drfviews.exception_handler(exc, context)
if response is None:
# DRF handler did not provide a default response for this exception
if settings.TESTING:
# If in TESTING mode, re-throw the exception for traceback
raise exc
elif settings.DEBUG:
# If in DEBUG mode, provide error information in the response
error_detail = str(exc)
else:
error_detail = _("Error details can be found in the admin panel")
response_data = {
'error': type(exc).__name__,
'error_class': str(type(exc)),
'detail': error_detail,
'path': context['request'].path,
'status_code': 500,
}
response = Response(response_data, status=500)
log_error(context['request'].path)
if response is not None:
# Convert errors returned under the label '__all__' to 'non_field_errors'
if '__all__' in response.data:
response.data['non_field_errors'] = response.data['__all__']
del response.data['__all__']
return response

View File

@@ -1,74 +1,34 @@
"""Exchangerate backend to use `exchangerate.host` to get rates."""
import ssl
from urllib.error import URLError
from urllib.request import urlopen
from django.db.utils import OperationalError
import certifi
from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend
from common.settings import currency_code_default, currency_codes
from urllib.error import HTTPError, URLError
from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend
class InvenTreeExchange(SimpleExchangeBackend):
"""Backend for automatically updating currency exchange rates.
"""
Backend for automatically updating currency exchange rates.
Uses the `exchangerate.host` service API
Uses the exchangerate.host service API
"""
name = "InvenTreeExchange"
def __init__(self):
"""Set API url."""
self.url = "https://api.exchangerate.host/latest"
super().__init__()
def get_params(self):
"""Placeholder to set API key. Currently not required by `exchangerate.host`."""
# No API key is required
return {
}
def get_response(self, **kwargs):
"""Custom code to get response from server.
Note: Adds a 5-second timeout
"""
url = self.get_url(**kwargs)
try:
context = ssl.create_default_context(cafile=certifi.where())
response = urlopen(url, timeout=5, context=context)
return response.read()
except Exception:
# Something has gone wrong, but we can just try again next time
# Raise a TypeError so the outer function can handle this
raise TypeError
def update_rates(self, base_currency=None):
"""Set the requested currency codes and get rates."""
# Set default - see B008
if base_currency is None:
base_currency = currency_code_default()
def update_rates(self, base_currency=currency_code_default()):
symbols = ','.join(currency_codes())
try:
super().update_rates(base=base_currency, symbols=symbols)
# catch connection errors
except URLError:
except (HTTPError, URLError):
print('Encountered connection error while updating')
except TypeError:
print('Exchange returned invalid response')
except OperationalError as e:
if 'SerializationFailure' in e.__cause__.__class__.__name__:
print('Serialization Failure while updating exchange rates')
# We are just going to swallow this exception because the
# exchange rates will be updated later by the scheduled task
else:
# Other operational errors probably are still show stoppers
# so reraise them so that the log contains the stacktrace
raise

View File

@@ -1,54 +1,47 @@
"""Custom fields used in InvenTree."""
""" Custom fields used in InvenTree """
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
from decimal import Decimal
from django import forms
from django.core import validators
from django.db import models as models
from django.utils.translation import gettext_lazy as _
from djmoney.forms.fields import MoneyField
from djmoney.models.fields import MoneyField as ModelMoneyField
from djmoney.models.validators import MinMoneyValidator
from rest_framework.fields import URLField as RestURLField
import InvenTree.helpers
from .validators import allowable_url_schemes
from django.utils.translation import ugettext_lazy as _
class InvenTreeRestURLField(RestURLField):
"""Custom field for DRF with custom scheme vaildators."""
def __init__(self, **kwargs):
"""Update schemes."""
from django.forms.fields import URLField as FormURLField
from django.db import models as models
from django.core import validators
from django import forms
# Enforce 'max length' parameter in form validation
if 'max_length' not in kwargs:
kwargs['max_length'] = 200
from decimal import Decimal
super().__init__(**kwargs)
self.validators[-1].schemes = allowable_url_schemes()
from djmoney.models.fields import MoneyField as ModelMoneyField
from djmoney.forms.fields import MoneyField
from djmoney.models.validators import MinMoneyValidator
import InvenTree.helpers
class InvenTreeURLField(models.URLField):
"""Custom URL field which has custom scheme validators."""
class InvenTreeURLFormField(FormURLField):
""" Custom URL form field with custom scheme validators """
default_validators = [validators.URLValidator(schemes=allowable_url_schemes())]
def __init__(self, **kwargs):
"""Initialization method for InvenTreeURLField"""
# Max length for InvenTreeURLField defaults to 200
if 'max_length' not in kwargs:
kwargs['max_length'] = 200
class InvenTreeURLField(models.URLField):
""" Custom URL field which has custom scheme validators """
super().__init__(**kwargs)
default_validators = [validators.URLValidator(schemes=allowable_url_schemes())]
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': InvenTreeURLFormField
})
def money_kwargs():
"""Returns the database settings for MoneyFields."""
from common.settings import currency_code_default, currency_code_mappings
""" returns the database settings for MoneyFields """
from common.settings import currency_code_mappings, currency_code_default
kwargs = {}
kwargs['currency_choices'] = currency_code_mappings()
@@ -57,10 +50,11 @@ def money_kwargs():
class InvenTreeModelMoneyField(ModelMoneyField):
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
"""
Custom MoneyField for clean migrations while using dynamic currency settings
"""
def __init__(self, **kwargs):
"""Overwrite default values and validators."""
# detect if creating migration
if 'migrate' in sys.argv or 'makemigrations' in sys.argv:
# remove currency information for a clean migration
@@ -73,39 +67,36 @@ class InvenTreeModelMoneyField(ModelMoneyField):
# Set a minimum value validator
validators = kwargs.get('validators', [])
allow_negative = kwargs.pop('allow_negative', False)
# If no validators are provided, add some "standard" ones
if len(validators) == 0:
if not allow_negative:
validators.append(
MinMoneyValidator(0),
)
validators.append(
MinMoneyValidator(0),
)
kwargs['validators'] = validators
super().__init__(**kwargs)
def formfield(self, **kwargs):
"""Override form class to use own function."""
""" override form class to use own function """
kwargs['form_class'] = InvenTreeMoneyField
return super().formfield(**kwargs)
class InvenTreeMoneyField(MoneyField):
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
""" custom MoneyField for clean migrations while using dynamic currency settings """
def __init__(self, *args, **kwargs):
"""Override initial values with the real info from database."""
# override initial values with the real info from database
kwargs.update(money_kwargs())
super().__init__(*args, **kwargs)
class DatePickerFormField(forms.DateField):
"""Custom date-picker field."""
"""
Custom date-picker field
"""
def __init__(self, **kwargs):
"""Set up custom values."""
help_text = kwargs.get('help_text', _('Enter date'))
label = kwargs.get('label', None)
required = kwargs.get('required', False)
@@ -128,7 +119,10 @@ class DatePickerFormField(forms.DateField):
def round_decimal(value, places):
"""Round value to the specified number of places."""
"""
Round value to the specified number of places.
"""
if value is not None:
# see https://docs.python.org/2/library/decimal.html#decimal.Decimal.quantize for options
return value.quantize(Decimal(10) ** -places)
@@ -136,19 +130,17 @@ def round_decimal(value, places):
class RoundingDecimalFormField(forms.DecimalField):
"""Custom FormField that automatically rounds inputs."""
def to_python(self, value):
"""Convert value to python type."""
value = super().to_python(value)
value = super(RoundingDecimalFormField, self).to_python(value)
value = round_decimal(value, self.decimal_places)
return value
def prepare_value(self, value):
"""Override the 'prepare_value' method, to remove trailing zeros when displaying.
"""
Override the 'prepare_value' method, to remove trailing zeros when displaying.
Why? It looks nice!
"""
if type(value) == Decimal:
return InvenTree.helpers.normalize(value)
else:
@@ -156,15 +148,11 @@ class RoundingDecimalFormField(forms.DecimalField):
class RoundingDecimalField(models.DecimalField):
"""Custom Field that automatically rounds inputs."""
def to_python(self, value):
"""Convert value to python type."""
value = super().to_python(value)
value = super(RoundingDecimalField, self).to_python(value)
return round_decimal(value, self.decimal_places)
def formfield(self, **kwargs):
"""Return a Field instance for this field."""
defaults = {
'form_class': RoundingDecimalFormField
}
@@ -172,19 +160,3 @@ class RoundingDecimalField(models.DecimalField):
defaults.update(kwargs)
return super().formfield(**kwargs)
class InvenTreeNotesField(models.TextField):
"""Custom implementation of a 'notes' field"""
# Maximum character limit for the various 'notes' fields
NOTES_MAX_LENGTH = 50000
def __init__(self, **kwargs):
"""Configure default initial values for this field"""
kwargs['max_length'] = self.NOTES_MAX_LENGTH
kwargs['verbose_name'] = _('Notes')
kwargs['blank'] = True
kwargs['null'] = True
super().__init__(**kwargs)

View File

@@ -1,76 +0,0 @@
"""General filters for InvenTree."""
from rest_framework.filters import OrderingFilter
class InvenTreeOrderingFilter(OrderingFilter):
"""Custom OrderingFilter class which allows aliased filtering of related fields.
To use, simply specify this filter in the "filter_backends" section.
filter_backends = [
InvenTreeOrderingFilter,
]
Then, specify a ordering_field_aliases attribute:
ordering_field_alises = {
'name': 'part__part__name',
'SKU': 'part__SKU',
}
"""
def get_ordering(self, request, queryset, view):
"""Override ordering for supporting aliases."""
ordering = super().get_ordering(request, queryset, view)
aliases = getattr(view, 'ordering_field_aliases', None)
# Attempt to map ordering fields based on provided aliases
if ordering is not None and aliases is not None:
"""Ordering fields should be mapped to separate fields."""
ordering_initial = ordering
ordering = []
for field in ordering_initial:
reverse = field.startswith('-')
if reverse:
field = field[1:]
# Are aliases defined for this field?
if field in aliases:
alias = aliases[field]
else:
alias = field
"""
Potentially, a single field could be "aliased" to multiple field,
(For example to enforce a particular ordering sequence)
e.g. to filter first by the integer value...
ordering_field_aliases = {
"reference": ["integer_ref", "reference"]
}
"""
if type(alias) is str:
alias = [alias]
elif type(alias) in [list, tuple]:
pass
else:
# Unsupported alias type
continue
for a in alias:
if reverse:
a = '-' + a
ordering.append(a)
return ordering

View File

@@ -1,156 +0,0 @@
"""Custom string formatting functions and helpers"""
import re
import string
from django.utils.translation import gettext_lazy as _
def parse_format_string(fmt_string: str) -> dict:
"""Extract formatting information from the provided format string.
Returns a dict object which contains structured information about the format groups
"""
groups = string.Formatter().parse(fmt_string)
info = {}
for group in groups:
# Skip any group which does not have a named value
if not group[1]:
continue
info[group[1]] = {
'format': group[1],
'prefix': group[0],
}
return info
def construct_format_regex(fmt_string: str) -> str:
r"""Construct a regular expression based on a provided format string
This function turns a python format string into a regular expression,
which can be used for two purposes:
- Ensure that a particular string matches the specified format
- Extract named variables from a matching string
This function also provides support for wildcard characters:
- '?' provides single character matching; is converted to a '.' (period) for regex
- '#' provides single digit matching; is converted to '\d'
Args:
fmt_string: A typical format string e.g. "PO-???-{ref:04d}"
Returns:
str: A regular expression pattern e.g. ^PO\-...\-(?P<ref>.*)$
Raises:
ValueError: Format string is invalid
"""
pattern = "^"
for group in string.Formatter().parse(fmt_string):
prefix = group[0] # Prefix (literal text appearing before this group)
name = group[1] # Name of this format variable
format = group[2] # Format specifier e.g :04d
rep = [
'+', '-', '.',
'{', '}', '(', ')',
'^', '$', '~', '!', '@', ':', ';', '|', '\'', '"',
]
# Escape any special regex characters
for ch in rep:
prefix = prefix.replace(ch, '\\' + ch)
# Replace ? with single-character match
prefix = prefix.replace('?', '.')
# Replace # with single-digit match
prefix = prefix.replace('#', r'\d')
pattern += prefix
# Add a named capture group for the format entry
if name:
# Check if integer values are requried
if format.endswith('d'):
chr = '\d'
else:
chr = '.'
# Specify width
# TODO: Introspect required width
w = '+'
pattern += f"(?P<{name}>{chr}{w})"
pattern += "$"
return pattern
def validate_string(value: str, fmt_string: str) -> str:
"""Validate that the provided string matches the specified format.
Args:
value: The string to be tested e.g. 'SO-1234-ABC',
fmt_string: The required format e.g. 'SO-{ref}-???',
Returns:
bool: True if the value matches the required format, else False
Raises:
ValueError: The provided format string is invalid
"""
pattern = construct_format_regex(fmt_string)
result = re.match(pattern, value)
return result is not None
def extract_named_group(name: str, value: str, fmt_string: str) -> str:
"""Extract a named value from the provided string, given the provided format string
Args:
name: Name of group to extract e.g. 'ref'
value: Raw string e.g. 'PO-ABC-1234'
fmt_string: Format pattern e.g. 'PO-???-{ref}
Returns:
str: String value of the named group
Raises:
ValueError: format string is incorrectly specified, or provided value does not match format string
NameError: named value does not exist in the format string
IndexError: named value could not be found in the provided entry
"""
info = parse_format_string(fmt_string)
if name not in info.keys():
raise NameError(_(f"Value '{name}' does not appear in pattern format"))
# Construct a regular expression for matching against the provided format string
# Note: This will raise a ValueError if 'fmt_string' is incorrectly specified
pattern = construct_format_regex(fmt_string)
# Run the regex matcher against the raw string
result = re.match(pattern, value)
if not result:
raise ValueError(_("Provided value does not match required pattern: ") + fmt_string)
# And return the value we are interested in
# Note: This will raise an IndexError if the named group was not matched
return result.group(name)

View File

@@ -1,33 +1,23 @@
"""Helper forms which subclass Django forms to provide additional functionality."""
"""
Helper forms which subclass Django forms to provide additional functionality
"""
import logging
from urllib.parse import urlencode
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.conf import settings
from django.contrib.auth.models import Group, User
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.contrib.auth.models import User
from allauth.account.adapter import DefaultAccountAdapter
from allauth.account.forms import SignupForm, set_form_field_order
from allauth.exceptions import ImmediateHttpResponse
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
from allauth_2fa.adapter import OTPAdapter
from allauth_2fa.utils import user_has_valid_totp_device
from crispy_forms.bootstrap import (AppendedText, PrependedAppendedText,
PrependedText)
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Field, Layout
from crispy_forms.layout import Layout, Field
from crispy_forms.bootstrap import PrependedText, AppendedText, PrependedAppendedText, StrictButton, Div
from common.models import InvenTreeSetting
logger = logging.getLogger('inventree')
from part.models import PartCategory
class HelperForm(forms.ModelForm):
"""Provides simple integration of crispy_forms extension."""
""" Provides simple integration of crispy_forms extension. """
# Custom field decorations can be specified here, per form class
field_prefix = {}
@@ -35,7 +25,6 @@ class HelperForm(forms.ModelForm):
field_placeholder = {}
def __init__(self, *args, **kwargs):
"""Setup layout."""
super(forms.ModelForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
@@ -52,8 +41,14 @@ class HelperForm(forms.ModelForm):
self.rebuild_layout()
def is_valid(self):
valid = super(HelperForm, self).is_valid()
return valid
def rebuild_layout(self):
"""Build crispy layout out of current fields."""
layouts = []
for field in self.fields:
@@ -108,164 +103,104 @@ class HelperForm(forms.ModelForm):
self.helper.layout = Layout(*layouts)
class EditUserForm(HelperForm):
"""Form for editing user information."""
class ConfirmForm(forms.Form):
""" Generic confirmation form """
confirm = forms.BooleanField(
required=False, initial=False,
help_text=_("Confirm")
)
class Meta:
"""Metaclass options."""
fields = [
'confirm'
]
class DeleteForm(forms.Form):
""" Generic deletion form which provides simple user confirmation
"""
confirm_delete = forms.BooleanField(
required=False,
initial=False,
label=_('Confirm delete'),
help_text=_('Confirm item deletion')
)
class Meta:
fields = [
'confirm_delete'
]
class EditUserForm(HelperForm):
""" Form for editing user information
"""
class Meta:
model = User
fields = [
'username',
'first_name',
'last_name',
'email'
]
class SetPasswordForm(HelperForm):
"""Form for setting user password."""
""" Form for setting user password
"""
enter_password = forms.CharField(
max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Enter password'),
help_text=_('Enter new password')
)
enter_password = forms.CharField(max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Enter password'),
help_text=_('Enter new password'))
confirm_password = forms.CharField(
max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Confirm password'),
help_text=_('Confirm new password')
)
old_password = forms.CharField(
label=_("Old password"),
strip=False,
widget=forms.PasswordInput(attrs={'autocomplete': 'current-password', 'autofocus': True}),
)
confirm_password = forms.CharField(max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Confirm password'),
help_text=_('Confirm new password'))
class Meta:
"""Metaclass options."""
model = User
fields = [
'enter_password',
'confirm_password',
'old_password',
'confirm_password'
]
# override allauth
class CustomSignupForm(SignupForm):
"""Override to use dynamic settings."""
class SettingCategorySelectForm(forms.ModelForm):
""" Form for setting category settings """
category = forms.ModelChoiceField(queryset=PartCategory.objects.all())
class Meta:
model = PartCategory
fields = [
'category'
]
def __init__(self, *args, **kwargs):
"""Check settings to influence which fields are needed."""
kwargs['email_required'] = InvenTreeSetting.get_setting('LOGIN_MAIL_REQUIRED')
super(SettingCategorySelectForm, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
# check for two mail fields
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
self.fields["email2"] = forms.EmailField(
label=_("Email (again)"),
widget=forms.TextInput(
attrs={
"type": "email",
"placeholder": _("Email address confirmation"),
}
),
)
# check for two password fields
if not InvenTreeSetting.get_setting('LOGIN_SIGNUP_PWD_TWICE'):
self.fields.pop("password2")
# reorder fields
set_form_field_order(self, ["username", "email", "email2", "password1", "password2", ])
def clean(self):
"""Make sure the supllied emails match if enabled in settings."""
cleaned_data = super().clean()
# check for two mail fields
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
email = cleaned_data.get("email")
email2 = cleaned_data.get("email2")
if (email and email2) and email != email2:
self.add_error("email2", _("You must type the same email each time."))
return cleaned_data
class RegistratonMixin:
"""Mixin to check if registration should be enabled."""
def is_open_for_signup(self, request, *args, **kwargs):
"""Check if signup is enabled in settings."""
if settings.EMAIL_HOST and InvenTreeSetting.get_setting('LOGIN_ENABLE_REG', True):
return super().is_open_for_signup(request, *args, **kwargs)
return False
def save_user(self, request, user, form, commit=True):
"""Check if a default group is set in settings."""
user = super().save_user(request, user, form)
start_group = InvenTreeSetting.get_setting('SIGNUP_GROUP')
if start_group:
try:
group = Group.objects.get(id=start_group)
user.groups.add(group)
except Group.DoesNotExist:
logger.error('The setting `SIGNUP_GROUP` contains an non existant group', start_group)
user.save()
return user
class CustomAccountAdapter(RegistratonMixin, OTPAdapter, DefaultAccountAdapter):
"""Override of adapter to use dynamic settings."""
def send_mail(self, template_prefix, email, context):
"""Only send mail if backend configured."""
if settings.EMAIL_HOST:
return super().send_mail(template_prefix, email, context)
return False
class CustomSocialAccountAdapter(RegistratonMixin, DefaultSocialAccountAdapter):
"""Override of adapter to use dynamic settings."""
def is_auto_signup_allowed(self, request, sociallogin):
"""Check if auto signup is enabled in settings."""
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_SSO_AUTO', True):
return super().is_auto_signup_allowed(request, sociallogin)
return False
# from OTPAdapter
def has_2fa_enabled(self, user):
"""Returns True if the user has 2FA configured."""
return user_has_valid_totp_device(user)
def login(self, request, user):
"""Ensure user is send to 2FA before login if enabled."""
# Require two-factor authentication if it has been configured.
if self.has_2fa_enabled(user):
# Cast to string for the case when this is not a JSON serializable
# object, e.g. a UUID.
request.session['allauth_2fa_user_id'] = str(user.id)
redirect_url = reverse('two-factor-authenticate')
# Add GET parameters to the URL if they exist.
if request.GET:
redirect_url += '?' + urlencode(request.GET)
raise ImmediateHttpResponse(
response=HttpResponseRedirect(redirect_url)
)
# Otherwise defer to the original allauth adapter.
return super().login(request, user)
self.helper = FormHelper()
# Form rendering
self.helper.form_show_labels = False
self.helper.layout = Layout(
Div(
Div(Field('category'),
css_class='col-sm-6',
style='width: 70%;'),
Div(StrictButton(_('Select Category'), css_class='btn btn-primary', type='submit'),
css_class='col-sm-6',
style='width: 30%; padding-left: 0;'),
css_class='row',
),
)

View File

@@ -1,51 +1,47 @@
"""Provides helper functions used throughout the InvenTree project."""
"""
Provides helper functions used throughout the InvenTree project
"""
import io
import json
import logging
import os
import os.path
import re
from decimal import Decimal, InvalidOperation
from pathlib import Path
from wsgiref.util import FileWrapper
from django.conf import settings
from django.contrib.auth.models import Permission
from django.contrib.staticfiles.storage import StaticFilesStorage
from django.core.exceptions import FieldError, ValidationError
from django.core.files.storage import default_storage
from django.core.validators import URLValidator
from django.http import StreamingHttpResponse
from django.test import TestCase
from django.utils.translation import gettext_lazy as _
import requests
from djmoney.money import Money
import json
import os.path
from PIL import Image
from decimal import Decimal, InvalidOperation
from wsgiref.util import FileWrapper
from django.http import StreamingHttpResponse
from django.core.exceptions import ValidationError, FieldError
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import Permission
import InvenTree.version
from common.models import InvenTreeSetting
from common.notifications import (InvenTreeNotificationBodies,
NotificationBody, trigger_notification)
from .settings import MEDIA_URL, STATIC_URL
from common.settings import currency_code_default
from .api_tester import UserMixin
from .settings import MEDIA_URL, STATIC_URL
logger = logging.getLogger('inventree')
from djmoney.money import Money
def getSetting(key, backup_value=None):
"""Shortcut for reading a setting value from the database."""
"""
Shortcut for reading a setting value from the database
"""
return InvenTreeSetting.get_setting(key, backup_value=backup_value)
def generateTestKey(test_name):
"""Generate a test 'key' for a given test name. This must not have illegal chars as it will be used for dict lookup in a template.
"""
Generate a test 'key' for a given test name.
This must not have illegal chars as it will be used for dict lookup in a template.
Tests must be named such that they will have unique keys.
"""
key = test_name.strip().lower()
key = key.replace(" ", "")
@@ -55,223 +51,56 @@ def generateTestKey(test_name):
return key
def constructPathString(path, max_chars=250):
"""Construct a 'path string' for the given path.
Arguments:
path: A list of strings e.g. ['path', 'to', 'location']
max_chars: Maximum number of characters
def getMediaUrl(filename):
"""
Return the qualified access path for the given file,
under the media directory.
"""
pathstring = '/'.join(path)
idx = 0
# Replace middle elements to limit the pathstring
if len(pathstring) > max_chars:
mid = len(path) // 2
path_l = path[0:mid]
path_r = path[mid:]
# Ensure the pathstring length is limited
while len(pathstring) > max_chars:
# Remove an element from the list
if idx % 2 == 0:
path_l = path_l[:-1]
else:
path_r = path_r[1:]
subpath = path_l + ['...'] + path_r
pathstring = '/'.join(subpath)
idx += 1
return pathstring
def getMediaUrl(filename):
"""Return the qualified access path for the given file, under the media directory."""
return os.path.join(MEDIA_URL, str(filename))
def getStaticUrl(filename):
"""Return the qualified access path for the given file, under the static media directory."""
"""
Return the qualified access path for the given file,
under the static media directory.
"""
return os.path.join(STATIC_URL, str(filename))
def construct_absolute_url(*arg):
"""Construct (or attempt to construct) an absolute URL from a relative URL.
This is useful when (for example) sending an email to a user with a link
to something in the InvenTree web framework.
This requires the BASE_URL configuration option to be set!
"""
base = str(InvenTreeSetting.get_setting('INVENTREE_BASE_URL'))
url = '/'.join(arg)
if not base:
return url
# Strip trailing slash from base url
if base.endswith('/'):
base = base[:-1]
if url.startswith('/'):
url = url[1:]
url = f"{base}/{url}"
return url
def download_image_from_url(remote_url, timeout=2.5):
"""Download an image file from a remote URL.
This is a potentially dangerous operation, so we must perform some checks:
- The remote URL is available
- The Content-Length is provided, and is not too large
- The file is a valid image file
Arguments:
remote_url: The remote URL to retrieve image
max_size: Maximum allowed image size (default = 1MB)
timeout: Connection timeout in seconds (default = 5)
Returns:
An in-memory PIL image file, if the download was successful
Raises:
requests.exceptions.ConnectionError: Connection could not be established
requests.exceptions.Timeout: Connection timed out
requests.exceptions.HTTPError: Server responded with invalid response code
ValueError: Server responded with invalid 'Content-Length' value
TypeError: Response is not a valid image
"""
# Check that the provided URL at least looks valid
validator = URLValidator()
validator(remote_url)
# Calculate maximum allowable image size (in bytes)
max_size = int(InvenTreeSetting.get_setting('INVENTREE_DOWNLOAD_IMAGE_MAX_SIZE')) * 1024 * 1024
try:
response = requests.get(
remote_url,
timeout=timeout,
allow_redirects=True,
stream=True,
)
# Throw an error if anything goes wrong
response.raise_for_status()
except requests.exceptions.ConnectionError as exc:
raise Exception(_("Connection error") + f": {str(exc)}")
except requests.exceptions.Timeout as exc:
raise exc
except requests.exceptions.HTTPError:
raise requests.exceptions.HTTPError(_("Server responded with invalid status code") + f": {response.status_code}")
except Exception as exc:
raise Exception(_("Exception occurred") + f": {str(exc)}")
if response.status_code != 200:
raise Exception(_("Server responded with invalid status code") + f": {response.status_code}")
try:
content_length = int(response.headers.get('Content-Length', 0))
except ValueError:
raise ValueError(_("Server responded with invalid Content-Length value"))
if content_length > max_size:
raise ValueError(_("Image size is too large"))
# Download the file, ensuring we do not exceed the reported size
fo = io.BytesIO()
dl_size = 0
chunk_size = 64 * 1024
for chunk in response.iter_content(chunk_size=chunk_size):
dl_size += len(chunk)
if dl_size > max_size:
raise ValueError(_("Image download exceeded maximum size"))
fo.write(chunk)
if dl_size == 0:
raise ValueError(_("Remote server returned empty response"))
# Now, attempt to convert the downloaded data to a valid image file
# img.verify() will throw an exception if the image is not valid
try:
img = Image.open(fo).convert()
img.verify()
except Exception:
raise TypeError(_("Supplied URL is not a valid image file"))
return img
def TestIfImage(img):
"""Test if an image file is indeed an image."""
try:
Image.open(img).verify()
return True
except Exception:
return False
def getBlankImage():
"""Return the qualified path for the 'blank image' placeholder."""
"""
Return the qualified path for the 'blank image' placeholder.
"""
return getStaticUrl("img/blank_image.png")
def getBlankThumbnail():
"""Return the qualified path for the 'blank image' thumbnail placeholder."""
"""
Return the qualified path for the 'blank image' thumbnail placeholder.
"""
return getStaticUrl("img/blank_image.thumbnail.png")
def getLogoImage(as_file=False, custom=True):
"""Return the InvenTree logo image, or a custom logo if available."""
"""Return the path to the logo-file."""
if custom and settings.CUSTOM_LOGO:
static_storage = StaticFilesStorage()
if static_storage.exists(settings.CUSTOM_LOGO):
storage = static_storage
elif default_storage.exists(settings.CUSTOM_LOGO):
storage = default_storage
else:
storage = None
if storage is not None:
if as_file:
return f"file://{storage.path(settings.CUSTOM_LOGO)}"
else:
return storage.url(settings.CUSTOM_LOGO)
# If we have got to this point, return the default logo
if as_file:
path = settings.STATIC_ROOT.joinpath('img/inventree.png')
return f"file://{path}"
else:
return getStaticUrl('img/inventree.png')
def TestIfImage(img):
""" Test if an image file is indeed an image """
try:
Image.open(img).verify()
return True
except:
return False
def TestIfImageURL(url):
"""Test if an image URL (or filename) looks like a valid image format.
""" Test if an image URL (or filename) looks like a valid image format.
Simply tests the extension against a set of allowed values
"""
return os.path.splitext(os.path.basename(url))[-1].lower() in [
'.jpg', '.jpeg', '.j2k',
'.jpg', '.jpeg',
'.png', '.bmp',
'.tif', '.tiff',
'.webp', '.gif',
@@ -279,7 +108,7 @@ def TestIfImageURL(url):
def str2bool(text, test=True):
"""Test if a string 'looks' like a boolean value.
""" Test if a string 'looks' like a boolean value.
Args:
text: Input text
@@ -295,7 +124,10 @@ def str2bool(text, test=True):
def is_bool(text):
"""Determine if a string value 'looks' like a boolean."""
"""
Determine if a string value 'looks' like a boolean.
"""
if str2bool(text, True):
return True
elif str2bool(text, False):
@@ -305,7 +137,9 @@ def is_bool(text):
def isNull(text):
"""Test if a string 'looks' like a null value. This is useful for querying the API against a null key.
"""
Test if a string 'looks' like a null value.
This is useful for querying the API against a null key.
Args:
text: Input text
@@ -313,11 +147,15 @@ def isNull(text):
Returns:
True if the text looks like a null value
"""
return str(text).strip().lower() in ['top', 'null', 'none', 'empty', 'false', '-1', '']
def normalize(d):
"""Normalize a decimal number, and remove exponential formatting."""
"""
Normalize a decimal number, and remove exponential formatting.
"""
if type(d) is not Decimal:
d = Decimal(d)
@@ -328,7 +166,8 @@ def normalize(d):
def increment(n):
"""Attempt to increment an integer (or a string that looks like an integer).
"""
Attempt to increment an integer (or a string that looks like an integer!)
e.g.
@@ -336,7 +175,9 @@ def increment(n):
2 -> 3
AB01 -> AB02
QQQ -> QQQ
"""
value = str(n).strip()
# Ignore empty strings
@@ -354,7 +195,7 @@ def increment(n):
groups = result.groups()
# If we cannot match the regex, then simply return the provided value
if len(groups) != 2:
if not len(groups) == 2:
return value
prefix, number = groups
@@ -378,7 +219,10 @@ def increment(n):
def decimal2string(d):
"""Format a Decimal number as a string, stripping out any trailing zeroes or decimal points. Essentially make it look like a whole number if it is one.
"""
Format a Decimal number as a string,
stripping out any trailing zeroes or decimal points.
Essentially make it look like a whole number if it is one.
Args:
d: A python Decimal object
@@ -386,6 +230,7 @@ def decimal2string(d):
Returns:
A string representation of the input number
"""
if type(d) is Decimal:
d = normalize(d)
@@ -406,7 +251,8 @@ def decimal2string(d):
def decimal2money(d, currency=None):
"""Format a Decimal number as Money.
"""
Format a Decimal number as Money
Args:
d: A python Decimal object
@@ -421,7 +267,7 @@ def decimal2money(d, currency=None):
def WrapWithQuotes(text, quote='"'):
"""Wrap the supplied text with quotes.
""" Wrap the supplied text with quotes
Args:
text: Input text to wrap
@@ -430,6 +276,7 @@ def WrapWithQuotes(text, quote='"'):
Returns:
Supplied text wrapped in quote char
"""
if not text.startswith(quote):
text = quote + text
@@ -439,8 +286,8 @@ def WrapWithQuotes(text, quote='"'):
return text
def MakeBarcode(object_name, object_pk, object_data=None, **kwargs):
"""Generate a string for a barcode. Adds some global InvenTree parameters.
def MakeBarcode(object_name, object_pk, object_data={}, **kwargs):
""" Generate a string for a barcode. Adds some global InvenTree parameters.
Args:
object_type: string describing the object type e.g. 'StockItem'
@@ -451,8 +298,6 @@ def MakeBarcode(object_name, object_pk, object_data=None, **kwargs):
Returns:
json string of the supplied data plus some other data
"""
if object_data is None:
object_data = {}
url = kwargs.get('url', False)
brief = kwargs.get('brief', True)
@@ -487,7 +332,8 @@ def MakeBarcode(object_name, object_pk, object_data=None, **kwargs):
def GetExportFormats():
"""Return a list of allowable file formats for exporting data."""
""" Return a list of allowable file formats for exporting data """
return [
'csv',
'tsv',
@@ -498,18 +344,18 @@ def GetExportFormats():
]
def DownloadFile(data, filename, content_type='application/text', inline=False) -> StreamingHttpResponse:
"""Create a dynamic file for the user to download.
def DownloadFile(data, filename, content_type='application/text'):
""" Create a dynamic file for the user to download.
Args:
data: Raw file data (string or bytes)
filename: Filename for the file download
content_type: Content type for the download
inline: Download "inline" or as attachment? (Default = attachment)
Return:
A StreamingHttpResponse object wrapping the supplied data
"""
filename = WrapWithQuotes(filename)
if type(data) == str:
@@ -519,55 +365,37 @@ def DownloadFile(data, filename, content_type='application/text', inline=False)
response = StreamingHttpResponse(wrapper, content_type=content_type)
response['Content-Length'] = len(data)
disposition = "inline" if inline else "attachment"
response['Content-Disposition'] = f'{disposition}; filename={filename}'
response['Content-Disposition'] = 'attachment; filename={f}'.format(f=filename)
return response
def extract_serial_numbers(serials, expected_quantity, next_number: int):
"""Attempt to extract serial numbers from an input string.
Requirements:
- Serial numbers can be either strings, or integers
- Serial numbers can be split by whitespace / newline / commma chars
- Serial numbers can be supplied as an inclusive range using hyphen char e.g. 10-20
- Serial numbers can be defined as ~ for getting the next available serial number
- Serial numbers can be supplied as <start>+ for getting all expecteded numbers starting from <start>
- Serial numbers can be supplied as <start>+<length> for getting <length> numbers starting from <start>
def extract_serial_numbers(serials, expected_quantity):
""" Attempt to extract serial numbers from an input string.
- Serial numbers must be integer values
- Serial numbers must be positive
- Serial numbers can be split by whitespace / newline / commma chars
- Serial numbers can be supplied as an inclusive range using hyphen char e.g. 10-20
- Serial numbers can be supplied as <start>+ for getting all expecteded numbers starting from <start>
- Serial numbers can be supplied as <start>+<length> for getting <length> numbers starting from <start>
Args:
serials: input string with patterns
expected_quantity: The number of (unique) serial numbers we expect
next_number(int): the next possible serial number
"""
serials = serials.strip()
# fill in the next serial number into the serial
while '~' in serials:
serials = serials.replace('~', str(next_number), 1)
next_number += 1
# Split input string by whitespace or comma (,) characters
groups = re.split(r"[\s,]+", serials)
groups = re.split("[\s,]+", serials)
numbers = []
errors = []
# Helper function to check for duplicated numbers
def add_sn(sn):
# Attempt integer conversion first, so numerical strings are never stored
try:
sn = int(sn)
except ValueError:
pass
if sn in numbers:
errors.append(_('Duplicate serial: {sn}').format(sn=sn))
# helpers
def number_add(n):
if n in numbers:
errors.append(_('Duplicate serial: {n}').format(n=n))
else:
numbers.append(sn)
numbers.append(n)
try:
expected_quantity = int(expected_quantity)
@@ -577,25 +405,15 @@ def extract_serial_numbers(serials, expected_quantity, next_number: int):
if len(serials) == 0:
raise ValidationError([_("Empty serial number string")])
# If the user has supplied the correct number of serials, don't process them for groups
# just add them so any duplicates (or future validations) are checked
if len(groups) == expected_quantity:
for group in groups:
add_sn(group)
if len(errors) > 0:
raise ValidationError(errors)
return numbers
for group in groups:
group = group.strip()
# Hyphen indicates a range of numbers
if '-' in group:
items = group.split('-')
if len(items) == 2 and all([i.isnumeric() for i in items]):
if len(items) == 2:
a = items[0].strip()
b = items[1].strip()
@@ -605,16 +423,16 @@ def extract_serial_numbers(serials, expected_quantity, next_number: int):
if a < b:
for n in range(a, b + 1):
add_sn(n)
number_add(n)
else:
errors.append(_("Invalid group range: {g}").format(g=group))
errors.append(_("Invalid group: {g}").format(g=group))
except ValueError:
errors.append(_("Invalid group: {g}").format(g=group))
continue
else:
# More than 2 hyphens or non-numeric group so add without interpolating
add_sn(group)
errors.append(_("Invalid group: {g}").format(g=group))
continue
# plus signals either
# 1: 'start+': expected number of serials, starting at start
@@ -632,21 +450,20 @@ def extract_serial_numbers(serials, expected_quantity, next_number: int):
# case 1
else:
end = start + (expected_quantity - len(numbers))
end = start + expected_quantity
for n in range(start, end):
add_sn(n)
number_add(n)
# no case
else:
errors.append(_("Invalid group sequence: {g}").format(g=group))
errors.append(_("Invalid group: {g}").format(g=group))
continue
# At this point, we assume that the "group" is just a single serial value
elif group:
add_sn(group)
# No valid input group detected
else:
raise ValidationError(_(f"Invalid/no group {group}"))
if group in numbers:
errors.append(_("Duplicate serial: {g}".format(g=group)))
else:
numbers.append(group)
if len(errors) > 0:
raise ValidationError(errors)
@@ -655,14 +472,15 @@ def extract_serial_numbers(serials, expected_quantity, next_number: int):
raise ValidationError([_("No serial numbers found")])
# The number of extracted serial numbers must match the expected quantity
if expected_quantity != len(numbers):
raise ValidationError([_("Number of unique serial numbers ({s}) must match quantity ({q})").format(s=len(numbers), q=expected_quantity)])
if not expected_quantity == len(numbers):
raise ValidationError([_("Number of unique serial number ({s}) must match quantity ({q})").format(s=len(numbers), q=expected_quantity)])
return numbers
def validateFilterString(value, model=None):
"""Validate that a provided filter string looks like a list of comma-separated key=value pairs.
"""
Validate that a provided filter string looks like a list of comma-separated key=value pairs
These should nominally match to a valid database filter based on the model being filtered.
@@ -677,6 +495,7 @@ def validateFilterString(value, model=None):
Returns a map of key:value pairs
"""
# Empty results map
results = {}
@@ -692,7 +511,7 @@ def validateFilterString(value, model=None):
pair = group.split('=')
if len(pair) != 2:
if not len(pair) == 2:
raise ValidationError(
"Invalid group: {g}".format(g=group)
)
@@ -722,36 +541,51 @@ def validateFilterString(value, model=None):
def addUserPermission(user, permission):
"""Shortcut function for adding a certain permission to a user."""
"""
Shortcut function for adding a certain permission to a user.
"""
perm = Permission.objects.get(codename=permission)
user.user_permissions.add(perm)
def addUserPermissions(user, permissions):
"""Shortcut function for adding multiple permissions to a user."""
"""
Shortcut function for adding multiple permissions to a user.
"""
for permission in permissions:
addUserPermission(user, permission)
def getMigrationFileNames(app):
"""Return a list of all migration filenames for provided app."""
local_dir = Path(__file__).parent
files = local_dir.joinpath('..', app, 'migrations').iterdir()
"""
Return a list of all migration filenames for provided app
"""
local_dir = os.path.dirname(os.path.abspath(__file__))
migration_dir = os.path.join(local_dir, '..', app, 'migrations')
files = os.listdir(migration_dir)
# Regex pattern for migration files
regex = re.compile(r"^[\d]+_.*\.py$")
pattern = r"^[\d]+_.*\.py$"
migration_files = []
for f in files:
if regex.match(f.name):
migration_files.append(f.name)
if re.match(pattern, f):
migration_files.append(f)
return migration_files
def getOldestMigrationFile(app, exclude_extension=True, ignore_initial=True):
"""Return the filename associated with the oldest migration."""
"""
Return the filename associated with the oldest migration
"""
oldest_num = -1
oldest_file = None
@@ -773,7 +607,10 @@ def getOldestMigrationFile(app, exclude_extension=True, ignore_initial=True):
def getNewestMigrationFile(app, exclude_extension=True):
"""Return the filename associated with the newest migration."""
"""
Return the filename associated with the newest migration
"""
newest_file = None
newest_num = -1
@@ -791,7 +628,8 @@ def getNewestMigrationFile(app, exclude_extension=True):
def clean_decimal(number):
"""Clean-up decimal value."""
""" Clean-up decimal value """
# Check if empty
if number is None or number == '' or number == 0:
return Decimal(0)
@@ -824,110 +662,3 @@ def clean_decimal(number):
return Decimal(0)
return clean_number.quantize(Decimal(1)) if clean_number == clean_number.to_integral() else clean_number.normalize()
def get_objectreference(obj, type_ref: str = 'content_type', object_ref: str = 'object_id'):
"""Lookup method for the GenericForeignKey fields.
Attributes:
- obj: object that will be resolved
- type_ref: field name for the contenttype field in the model
- object_ref: field name for the object id in the model
Example implementation in the serializer:
```
target = serializers.SerializerMethodField()
def get_target(self, obj):
return get_objectreference(obj, 'target_content_type', 'target_object_id')
```
The method name must always be the name of the field prefixed by 'get_'
"""
model_cls = getattr(obj, type_ref)
obj_id = getattr(obj, object_ref)
# check if references are set -> return nothing if not
if model_cls is None or obj_id is None:
return None
# resolve referenced data into objects
model_cls = model_cls.model_class()
try:
item = model_cls.objects.get(id=obj_id)
except model_cls.DoesNotExist:
return None
url_fnc = getattr(item, 'get_absolute_url', None)
# create output
ret = {}
if url_fnc:
ret['link'] = url_fnc()
return {
'name': str(item),
'model': str(model_cls._meta.verbose_name),
**ret
}
def inheritors(cls):
"""Return all classes that are subclasses from the supplied cls."""
subcls = set()
work = [cls]
while work:
parent = work.pop()
for child in parent.__subclasses__():
if child not in subcls:
subcls.add(child)
work.append(child)
return subcls
class InvenTreeTestCase(UserMixin, TestCase):
"""Testcase with user setup buildin."""
pass
def notify_responsible(instance, sender, content: NotificationBody = InvenTreeNotificationBodies.NewOrder, exclude=None):
"""Notify all responsible parties of a change in an instance.
Parses the supplied content with the provided instance and sender and sends a notification to all responsible users,
excluding the optional excluded list.
Args:
instance: The newly created instance
sender: Sender model reference
content (NotificationBody, optional): _description_. Defaults to InvenTreeNotificationBodies.NewOrder.
exclude (User, optional): User instance that should be excluded. Defaults to None.
"""
if instance.responsible is not None:
# Setup context for notification parsing
content_context = {
'instance': str(instance),
'verbose_name': sender._meta.verbose_name,
'app_label': sender._meta.app_label,
'model_name': sender._meta.model_name,
}
# Setup notification context
context = {
'instance': instance,
'name': content.name.format(**content_context),
'message': content.message.format(**content_context),
'link': InvenTree.helpers.construct_absolute_url(instance.get_absolute_url()),
'template': {
'html': content.template.format(**content_context),
'subject': content.name.format(**content_context),
}
}
# Create notification
trigger_notification(
instance,
content.slug.format(**content_context),
targets=[instance.responsible],
target_exclude=[exclude],
context=context,
)

View File

@@ -1,38 +1,38 @@
"""Custom management command to cleanup old settings that are not defined anymore."""
import logging
"""
Custom management command to cleanup old settings that are not defined anymore
"""
from django.core.management.base import BaseCommand
logger = logging.getLogger('inventree')
class Command(BaseCommand):
"""Cleanup old (undefined) settings in the database."""
"""
Cleanup old (undefined) settings in the database
"""
def handle(self, *args, **kwargs):
"""Cleanup old (undefined) settings in the database."""
logger.info("Collecting settings")
print("Collecting settings")
from common.models import InvenTreeSetting, InvenTreeUserSetting
# general settings
db_settings = InvenTreeSetting.objects.all()
model_settings = InvenTreeSetting.SETTINGS
model_settings = InvenTreeSetting.GLOBAL_SETTINGS
# check if key exist and delete if not
for setting in db_settings:
if setting.key not in model_settings:
setting.delete()
logger.info(f"deleted setting '{setting.key}'")
print(f"deleted setting '{setting.key}'")
# user settings
db_settings = InvenTreeUserSetting.objects.all()
model_settings = InvenTreeUserSetting.SETTINGS
model_settings = InvenTreeUserSetting.GLOBAL_SETTINGS
# check if key exist and delete if not
for setting in db_settings:
if setting.key not in model_settings:
setting.delete()
logger.info(f"deleted user setting '{setting.key}'")
print(f"deleted user setting '{setting.key}'")
logger.info("checked all settings")
print("checked all settings")

View File

@@ -1,17 +1,19 @@
"""Custom management command to prerender files."""
"""
Custom management command to prerender files
"""
from django.core.management.base import BaseCommand
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.module_loading import import_string
from django.http.request import HttpRequest
from django.utils.translation import override as lang_over
import os
from django.conf import settings
from django.core.management.base import BaseCommand
from django.http.request import HttpRequest
from django.template.loader import render_to_string
from django.utils.module_loading import import_string
from django.utils.translation import override as lang_over
def render_file(file_name, source, target, locales, ctx):
"""Renders a file into all provided locales."""
""" renders a file into all provided locales """
for locale in locales:
target_file = os.path.join(target, locale + '.' + file_name)
with open(target_file, 'w') as localised_file:
@@ -21,10 +23,11 @@ def render_file(file_name, source, target, locales, ctx):
class Command(BaseCommand):
"""Django command to prerender files."""
"""
django command to prerender files
"""
def handle(self, *args, **kwargs):
"""Django command to prerender files."""
# static directories
LC_DIR = settings.LOCALE_PATHS[0]
SOURCE_DIR = settings.STATICFILES_I18_SRC

View File

@@ -1,4 +1,5 @@
"""Custom management command to rebuild all MPTT models.
"""
Custom management command to rebuild all MPTT models
- This is crucial after importing any fixtures, etc
"""
@@ -7,17 +8,19 @@ from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""Rebuild all database models which leverage the MPTT structure."""
"""
Rebuild all database models which leverage the MPTT structure.
"""
def handle(self, *args, **kwargs):
"""Rebuild all database models which leverage the MPTT structure."""
# Part model
try:
print("Rebuilding Part objects")
from part.models import Part
Part.objects.rebuild()
except Exception:
except:
print("Error rebuilding Part objects")
# Part category
@@ -26,7 +29,7 @@ class Command(BaseCommand):
from part.models import PartCategory
PartCategory.objects.rebuild()
except Exception:
except:
print("Error rebuilding PartCategory objects")
# StockItem model
@@ -35,7 +38,7 @@ class Command(BaseCommand):
from stock.models import StockItem
StockItem.objects.rebuild()
except Exception:
except:
print("Error rebuilding StockItem objects")
# StockLocation model
@@ -44,7 +47,7 @@ class Command(BaseCommand):
from stock.models import StockLocation
StockLocation.objects.rebuild()
except Exception:
except:
print("Error rebuilding StockLocation objects")
# Build model
@@ -53,5 +56,5 @@ class Command(BaseCommand):
from build.models import Build
Build.objects.rebuild()
except Exception:
except:
print("Error rebuilding Build objects")

View File

@@ -1,56 +0,0 @@
"""Custom management command to rebuild thumbnail images.
- May be required after importing a new dataset, for example
"""
import logging
from django.core.management.base import BaseCommand
from django.db.utils import OperationalError, ProgrammingError
from PIL import UnidentifiedImageError
from company.models import Company
from part.models import Part
logger = logging.getLogger('inventree')
class Command(BaseCommand):
"""Rebuild all thumbnail images."""
def rebuild_thumbnail(self, model):
"""Rebuild the thumbnail specified by the "image" field of the provided model."""
if not model.image:
return
img = model.image
logger.info(f"Generating thumbnail image for '{img}'")
try:
model.image.render_variations(replace=False)
except FileNotFoundError:
logger.warning(f"Warning: Image file '{img}' is missing")
except UnidentifiedImageError:
logger.warning(f"Warning: Image file '{img}' is not a valid image")
def handle(self, *args, **kwargs):
"""Rebuild all thumbnail images."""
logger.info("Rebuilding Part thumbnails")
for part in Part.objects.exclude(image=None):
try:
self.rebuild_thumbnail(part)
except (OperationalError, ProgrammingError):
logger.error("ERROR: Database read error.")
break
logger.info("Rebuilding Company thumbnails")
for company in Company.objects.exclude(image=None):
try:
self.rebuild_thumbnail(company)
except (OperationalError, ProgrammingError):
logger.error("ERROR: abase read error.")
break

View File

@@ -1,33 +0,0 @@
"""Custom management command to remove MFA for a user."""
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""Remove MFA for a user."""
def add_arguments(self, parser):
"""Add the arguments."""
parser.add_argument('mail', type=str)
def handle(self, *args, **kwargs):
"""Remove MFA for the supplied user (by mail)."""
# general settings
mail = kwargs.get('mail')
if not mail:
raise KeyError('A mail is required')
user = get_user_model()
mfa_user = [*set(user.objects.filter(email=mail) | user.objects.filter(emailaddress__email=mail))]
if len(mfa_user) == 0:
print('No user with this mail associated')
elif len(mfa_user) > 1:
print('More than one user found with this mail')
else:
# and clean out all MFA methods
# backup codes
mfa_user[0].staticdevice_set.all().delete()
# TOTP tokens
mfa_user[0].totpdevice_set.all().delete()
print(f'Removed all MFA methods for user {str(mfa_user[0])}')

View File

@@ -1,17 +1,22 @@
"""Custom management command, wait for the database to be ready!"""
"""
Custom management command, wait for the database to be ready!
"""
from django.core.management.base import BaseCommand
from django.db import connection
from django.db.utils import OperationalError, ImproperlyConfigured
import time
from django.core.management.base import BaseCommand
from django.db import connection
from django.db.utils import ImproperlyConfigured, OperationalError
class Command(BaseCommand):
"""Django command to pause execution until the database is ready."""
"""
django command to pause execution until the database is ready
"""
def handle(self, *args, **kwargs):
"""Wait till the database is ready."""
self.stdout.write("Waiting for database...")
connected = False

View File

@@ -1,20 +1,23 @@
"""Custom metadata for DRF."""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from rest_framework import serializers
from rest_framework.fields import empty
from rest_framework.metadata import SimpleMetadata
from rest_framework.utils import model_meta
from rest_framework.fields import empty
import users.models
from InvenTree.helpers import str2bool
logger = logging.getLogger('inventree')
class InvenTreeMetadata(SimpleMetadata):
"""Custom metadata class for the DRF API.
"""
Custom metadata class for the DRF API.
This custom metadata class imits the available "actions",
based on the user's role permissions.
@@ -24,31 +27,16 @@ class InvenTreeMetadata(SimpleMetadata):
Additionally, we include some extra information about database models,
so we can perform lookup for ForeignKey related fields.
"""
def determine_metadata(self, request, view):
"""Overwrite the metadata to adapt to hte request user."""
self.request = request
self.view = view
metadata = super().determine_metadata(request, view)
"""
Custom context information to pass through to the OPTIONS endpoint,
if the "context=True" is supplied to the OPTIONS requst
Serializer class can supply context data by defining a get_context_data() method (no arguments)
"""
context = {}
if str2bool(request.query_params.get('context', False)):
if hasattr(self, 'serializer') and hasattr(self.serializer, 'get_context_data'):
context = self.serializer.get_context_data()
metadata['context'] = context
user = request.user
if user is None:
@@ -70,36 +58,30 @@ class InvenTreeMetadata(SimpleMetadata):
actions = metadata.get('actions', None)
if actions is None:
actions = {}
if actions is not None:
check = users.models.RuleSet.check_table_permission
check = users.models.RuleSet.check_table_permission
# Map the request method to a permission type
rolemap = {
'POST': 'add',
'PUT': 'change',
'PATCH': 'change',
'DELETE': 'delete',
}
# Map the request method to a permission type
rolemap = {
'POST': 'add',
'PUT': 'change',
'PATCH': 'change',
'DELETE': 'delete',
}
# Remove any HTTP methods that the user does not have permission for
for method, permission in rolemap.items():
# Remove any HTTP methods that the user does not have permission for
for method, permission in rolemap.items():
if method in actions and not check(user, table, permission):
del actions[method]
result = check(user, table, permission)
# Add a 'DELETE' action if we are allowed to delete
if 'DELETE' in view.allowed_methods and check(user, table, 'delete'):
actions['DELETE'] = True
if method in actions and not result:
del actions[method]
# Add a 'DELETE' action if we are allowed to delete
if 'DELETE' in view.allowed_methods and check(user, table, 'delete'):
actions['DELETE'] = True
# Add a 'VIEW' action if we are allowed to view
if 'GET' in view.allowed_methods and check(user, table, 'view'):
actions['GET'] = True
metadata['actions'] = actions
# Add a 'VIEW' action if we are allowed to view
if 'GET' in view.allowed_methods and check(user, table, 'view'):
actions['GET'] = True
except AttributeError:
# We will assume that if the serializer class does *not* have a Meta
@@ -109,56 +91,32 @@ class InvenTreeMetadata(SimpleMetadata):
return metadata
def get_serializer_info(self, serializer):
"""Override get_serializer_info so that we can add 'default' values to any fields whose Meta.model specifies a default value."""
self.serializer = serializer
"""
Override get_serializer_info so that we can add 'default' values
to any fields whose Meta.model specifies a default value
"""
serializer_info = super().get_serializer_info(serializer)
model_class = None
# Attributes to copy extra attributes from the model to the field (if they don't exist)
extra_attributes = [
'help_text',
'max_length',
]
try:
model_class = serializer.Meta.model
ModelClass = serializer.Meta.model
model_fields = model_meta.get_field_info(model_class)
model_default_func = getattr(model_class, 'api_defaults', None)
if model_default_func:
model_default_values = model_class.api_defaults(self.request)
else:
model_default_values = {}
model_fields = model_meta.get_field_info(ModelClass)
# Iterate through simple fields
for name, field in model_fields.fields.items():
if name in serializer_info.keys():
if field.has_default() and name in serializer_info.keys():
if field.has_default():
default = field.default
default = field.default
if callable(default):
try:
default = default()
except:
continue
if callable(default):
try:
default = default()
except Exception:
continue
serializer_info[name]['default'] = default
elif name in model_default_values:
serializer_info[name]['default'] = model_default_values[name]
for attr in extra_attributes:
if attr not in serializer_info[name]:
if hasattr(field, attr):
serializer_info[name][attr] = getattr(field, attr)
serializer_info[name]['default'] = default
# Iterate through relations
for name, relation in model_fields.relations.items():
@@ -175,12 +133,8 @@ class InvenTreeMetadata(SimpleMetadata):
# This is used to automatically filter AJAX requests
serializer_info[name]['filters'] = relation.model_field.get_limit_choices_to()
for attr in extra_attributes:
if attr not in serializer_info[name] and hasattr(relation.model_field, attr):
serializer_info[name][attr] = getattr(relation.model_field, attr)
if name in model_default_values:
serializer_info[name]['default'] = model_default_values[name]
if 'help_text' not in serializer_info[name] and hasattr(relation.model_field, 'help_text'):
serializer_info[name]['help_text'] = relation.model_field.help_text
except AttributeError:
pass
@@ -191,27 +145,18 @@ class InvenTreeMetadata(SimpleMetadata):
# Extract extra information if an instance is available
if hasattr(serializer, 'instance'):
instance = serializer.instance
if instance is None and model_class is not None:
# Attempt to find the instance based on kwargs lookup
kwargs = getattr(self.view, 'kwargs', None)
if kwargs:
pk = None
for field in ['pk', 'id', 'PK', 'ID']:
if field in kwargs:
pk = kwargs[field]
break
if pk is not None:
try:
instance = model_class.objects.get(pk=pk)
except (ValueError, model_class.DoesNotExist):
pass
if instance is None:
try:
instance = self.view.get_object()
except:
pass
if instance is not None:
"""If there is an instance associated with this API View, introspect that instance to find any specific API info."""
"""
If there is an instance associated with this API View,
introspect that instance to find any specific API info.
"""
if hasattr(instance, 'api_instance_filters'):
@@ -233,15 +178,18 @@ class InvenTreeMetadata(SimpleMetadata):
return serializer_info
def get_field_info(self, field):
"""Given an instance of a serializer field, return a dictionary of metadata about it.
"""
Given an instance of a serializer field, return a dictionary
of metadata about it.
We take the regular DRF metadata and add our own unique flavor
"""
# Run super method first
field_info = super().get_field_info(field)
# If a default value is specified for the serializer field, add it!
if 'default' not in field_info and field.default != empty:
if 'default' not in field_info and not field.default == empty:
field_info['default'] = field.get_default()
# Force non-nullable fields to read as "required"
@@ -251,7 +199,7 @@ class InvenTreeMetadata(SimpleMetadata):
# Introspect writable related fields
if field_info['type'] == 'field' and not field_info['read_only']:
# If the field is a PrimaryKeyRelatedField, we can extract the model from the queryset
if isinstance(field, serializers.PrimaryKeyRelatedField):
model = field.queryset.model

View File

@@ -1,52 +1,48 @@
"""Middleware for InvenTree."""
import logging
import sys
from django.conf import settings
from django.contrib.auth.middleware import PersistentRemoteUserMiddleware
from django.http import HttpResponse
from django.shortcuts import HttpResponseRedirect
from django.urls import reverse_lazy
from django.db import connection
from django.shortcuts import redirect
from django.urls import Resolver404, include, re_path, reverse_lazy
import logging
import time
import operator
from allauth_2fa.middleware import (AllauthTwoFactorMiddleware,
BaseRequire2FAMiddleware)
from error_report.middleware import ExceptionProcessor
from rest_framework.authtoken.models import Token
from common.models import InvenTreeSetting
from InvenTree.urls import frontendpatterns
logger = logging.getLogger("inventree")
class AuthRequiredMiddleware(object):
"""Check for user to be authenticated."""
def __init__(self, get_response):
"""Save response object."""
self.get_response = get_response
def __call__(self, request):
"""Check if user needs to be authenticated and is.
Redirects to login if not authenticated.
"""
# Code to be executed for each request before
# the view (and later middleware) are called.
assert hasattr(request, 'user')
# API requests are handled by the DRF library
if request.path_info.startswith('/api/'):
return self.get_response(request)
response = self.get_response(request)
if not request.user.is_authenticated:
"""
Normally, a web-based session would use csrftoken based authentication.
However when running an external application (e.g. the InvenTree app),
we wish to use token-based auth to grab media files.
However when running an external application (e.g. the InvenTree app or Python library),
we must validate the user token manually.
So, we will allow token-based authentication but ONLY for the /media/ directory.
What problem is this solving?
- The InvenTree mobile app does not use csrf token auth
- Token auth is used by the Django REST framework, but that is under the /api/ endpoint
- Media files (e.g. Part images) are required to be served to the app
- We do not want to make /media/ files accessible without login!
There is PROBABLY a better way of going about this?
a) Allow token-based authentication against a user?
b) Serve /media/ files in a duplicate location e.g. /api/media/ ?
c) Is there a "standard" way of solving this problem?
My [google|stackoverflow]-fu has failed me. So this hack has been created.
"""
authorized = False
@@ -60,104 +56,104 @@ class AuthRequiredMiddleware(object):
elif request.path_info.startswith('/accounts/'):
authorized = True
elif 'Authorization' in request.headers.keys() or 'authorization' in request.headers.keys():
auth = request.headers.get('Authorization', request.headers.get('authorization')).strip()
elif 'Authorization' in request.headers.keys():
auth = request.headers['Authorization'].strip()
if auth.lower().startswith('token') and len(auth.split()) == 2:
token_key = auth.split()[1]
if auth.startswith('Token') and len(auth.split()) == 2:
token = auth.split()[1]
# Does the provided token match a valid user?
try:
token = Token.objects.get(key=token_key)
if Token.objects.filter(key=token).exists():
# Provide the user information to the request
request.user = token.user
authorized = True
allowed = ['/api/', '/media/']
except Token.DoesNotExist:
logger.warning(f"Access denied for unknown token {token_key}")
# Only allow token-auth for /media/ or /static/ dirs!
if any([request.path_info.startswith(a) for a in allowed]):
authorized = True
# No authorization was found for the request
if not authorized:
# A logout request will redirect the user to the login screen
if request.path_info == reverse_lazy('logout'):
return HttpResponseRedirect(reverse_lazy('login'))
path = request.path_info
# List of URL endpoints we *do not* want to redirect to
urls = [
reverse_lazy('account_login'),
reverse_lazy('account_logout'),
reverse_lazy('login'),
reverse_lazy('logout'),
reverse_lazy('admin:login'),
reverse_lazy('admin:logout'),
]
# Do not redirect requests to any of these paths
paths_ignore = [
'/api/',
'/js/',
'/media/',
'/static/',
]
if path not in urls and not any([path.startswith(p) for p in paths_ignore]):
if path not in urls and not path.startswith('/api/'):
# Save the 'next' parameter to pass through to the login view
return redirect(f'{reverse_lazy("account_login")}?next={request.path}')
return redirect('%s?next=%s' % (reverse_lazy('login'), request.path))
else:
# Return a 401 (Unauthorized) response code for this request
return HttpResponse('Unauthorized', status=401)
response = self.get_response(request)
# Code to be executed for each request/response after
# the view is called.
return response
url_matcher = re_path('', include(frontendpatterns))
class QueryCountMiddleware(object):
"""
This middleware will log the number of queries run
and the total time taken for each request (with a
status code of 200). It does not currently support
multi-db setups.
To enable this middleware, set 'log_queries: True' in the local InvenTree config file.
class Check2FAMiddleware(BaseRequire2FAMiddleware):
"""Check if user is required to have MFA enabled."""
def require_2fa(self, request):
"""Use setting to check if MFA should be enforced for frontend page."""
try:
if url_matcher.resolve(request.path[1:]):
return InvenTreeSetting.get_setting('LOGIN_ENFORCE_MFA')
except Resolver404:
pass
return False
Reference: https://www.dabapps.com/blog/logging-sql-queries-django-13/
Note: 2020-08-15 - This is no longer used, instead we now rely on the django-debug-toolbar addon
"""
class CustomAllauthTwoFactorMiddleware(AllauthTwoFactorMiddleware):
"""This function ensures only frontend code triggers the MFA auth cycle."""
def process_request(self, request):
"""Check if requested url is forntend and enforce MFA check."""
try:
if not url_matcher.resolve(request.path[1:]):
super().process_request(request)
except Resolver404:
pass
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
class InvenTreeRemoteUserMiddleware(PersistentRemoteUserMiddleware):
"""Middleware to check if HTTP-header based auth is enabled and to set it up."""
header = settings.REMOTE_LOGIN_HEADER
t_start = time.time()
response = self.get_response(request)
t_stop = time.time()
def process_request(self, request):
"""Check if proxy login is enabled."""
if not settings.REMOTE_LOGIN:
return
if response.status_code == 200:
total_time = 0
return super().process_request(request)
if len(connection.queries) > 0:
queries = {}
class InvenTreeExceptionProcessor(ExceptionProcessor):
"""Custom exception processor that respects blocked errors."""
for query in connection.queries:
query_time = query.get('time')
def process_exception(self, request, exception):
"""Check if kind is ignored before procesing."""
kind, info, data = sys.exc_info()
sql = query.get('sql').split('.')[0]
# Check if the eror is on the ignore list
if kind in settings.IGNORED_ERRORS:
return
if sql in queries:
queries[sql] += 1
else:
queries[sql] = 1
return super().process_exception(request, exception)
if query_time is None:
# django-debug-toolbar monkeypatches the connection
# cursor wrapper and adds extra information in each
# item in connection.queries. The query time is stored
# under the key "duration" rather than "time" and is
# in milliseconds, not seconds.
query_time = float(query.get('duration', 0))
total_time += float(query_time)
logger.debug('{n} queries run, {a:.3f}s / {b:.3f}s'.format(
n=len(connection.queries),
a=total_time,
b=(t_stop - t_start)))
for x in sorted(queries.items(), key=operator.itemgetter(1), reverse=True):
print(x[0], ':', x[1])
return response

View File

@@ -1,90 +0,0 @@
"""Mixins for (API) views in the whole project."""
from bleach import clean
from rest_framework import generics, status
from rest_framework.response import Response
class CleanMixin():
"""Model mixin class which cleans inputs."""
# Define a map of fields avaialble for import
SAFE_FIELDS = {}
def create(self, request, *args, **kwargs):
"""Override to clean data before processing it."""
serializer = self.get_serializer(data=self.clean_data(request.data))
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def update(self, request, *args, **kwargs):
"""Override to clean data before processing it."""
partial = kwargs.pop('partial', False)
instance = self.get_object()
serializer = self.get_serializer(instance, data=self.clean_data(request.data), partial=partial)
serializer.is_valid(raise_exception=True)
self.perform_update(serializer)
if getattr(instance, '_prefetched_objects_cache', None):
# If 'prefetch_related' has been applied to a queryset, we need to
# forcibly invalidate the prefetch cache on the instance.
instance._prefetched_objects_cache = {}
return Response(serializer.data)
def clean_data(self, data: dict) -> dict:
"""Clean / sanitize data.
This uses mozillas bleach under the hood to disable certain html tags by
encoding them - this leads to script tags etc. to not work.
The results can be longer then the input; might make some character combinations
`ugly`. Prevents XSS on the server-level.
Args:
data (dict): Data that should be sanatized.
Returns:
dict: Profided data sanatized; still in the same order.
"""
clean_data = {}
for k, v in data.items():
if isinstance(v, str):
ret = clean(v)
elif isinstance(v, dict):
ret = self.clean_data(v)
else:
ret = v
clean_data[k] = ret
return clean_data
class ListAPI(generics.ListAPIView):
"""View for list API."""
class ListCreateAPI(CleanMixin, generics.ListCreateAPIView):
"""View for list and create API."""
class CreateAPI(CleanMixin, generics.CreateAPIView):
"""View for create API."""
class RetrieveAPI(generics.RetrieveAPIView):
"""View for retreive API."""
pass
class RetrieveUpdateAPI(CleanMixin, generics.RetrieveUpdateAPIView):
"""View for retrieve and update API."""
pass
class RetrieveUpdateDestroyAPI(CleanMixin, generics.RetrieveUpdateDestroyAPIView):
"""View for retrieve, update and destroy API."""
class UpdateAPI(CleanMixin, generics.UpdateAPIView):
"""View for update API."""

View File

@@ -1,364 +1,45 @@
"""Generic models which provide extra functionality over base Django model types."""
"""
Generic models which provide extra functionality over base Django model types.
"""
from __future__ import unicode_literals
import logging
import os
import re
from datetime import datetime
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from django.core.exceptions import ValidationError
from django.db.models.signals import pre_delete
from django.dispatch import receiver
from error_report.models import Error
from mptt.exceptions import InvalidMove
from mptt.models import MPTTModel, TreeForeignKey
from mptt.exceptions import InvalidMove
import InvenTree.format
import InvenTree.helpers
from common.models import InvenTreeSetting
from InvenTree.fields import InvenTreeURLField
logger = logging.getLogger('inventree')
from .validators import validate_tree_name
def rename_attachment(instance, filename):
"""Function for renaming an attachment file. The subdirectory for the uploaded file is determined by the implementing class.
"""
Function for renaming an attachment file.
The subdirectory for the uploaded file is determined by the implementing class.
Args:
Args:
instance: Instance of a PartAttachment object
filename: name of uploaded file
Returns:
path to store file, format: '<subdir>/<id>/filename'
"""
# Construct a path to store a file attachment for a given model type
return os.path.join(instance.getSubdir(), filename)
class DataImportMixin(object):
"""Model mixin class which provides support for 'data import' functionality.
Models which implement this mixin should provide information on the fields available for import
"""
# Define a map of fields avaialble for import
IMPORT_FIELDS = {}
@classmethod
def get_import_fields(cls):
"""Return all available import fields.
Where information on a particular field is not explicitly provided,
introspect the base model to (attempt to) find that information.
"""
fields = cls.IMPORT_FIELDS
for name, field in fields.items():
# Attempt to extract base field information from the model
base_field = None
for f in cls._meta.fields:
if f.name == name:
base_field = f
break
if base_field:
if 'label' not in field:
field['label'] = base_field.verbose_name
if 'help_text' not in field:
field['help_text'] = base_field.help_text
fields[name] = field
return fields
@classmethod
def get_required_import_fields(cls):
"""Return all *required* import fields."""
fields = {}
for name, field in cls.get_import_fields().items():
required = field.get('required', False)
if required:
fields[name] = field
return fields
class ReferenceIndexingMixin(models.Model):
"""A mixin for keeping track of numerical copies of the "reference" field.
Here, we attempt to convert a "reference" field value (char) to an integer,
for performing fast natural sorting.
This requires extra database space (due to the extra table column),
but is required as not all supported database backends provide equivalent casting.
This mixin adds a field named 'reference_int'.
- If the 'reference' field can be cast to an integer, it is stored here
- If the 'reference' field *starts* with an integer, it is stored here
- Otherwise, we store zero
"""
# Name of the global setting which defines the required reference pattern for this model
REFERENCE_PATTERN_SETTING = None
@classmethod
def get_reference_pattern(cls):
"""Returns the reference pattern associated with this model.
This is defined by a global setting object, specified by the REFERENCE_PATTERN_SETTING attribute
"""
# By default, we return an empty string
if cls.REFERENCE_PATTERN_SETTING is None:
return ''
return InvenTreeSetting.get_setting(cls.REFERENCE_PATTERN_SETTING, create=False).strip()
@classmethod
def get_reference_context(cls):
"""Generate context data for generating the 'reference' field for this class.
- Returns a python dict object which contains the context data for formatting the reference string.
- The default implementation provides some default context information
"""
return {
'ref': cls.get_next_reference(),
'date': datetime.now(),
}
@classmethod
def get_most_recent_item(cls):
"""Return the item which is 'most recent'
In practice, this means the item with the highest reference value
"""
query = cls.objects.all().order_by('-reference_int', '-pk')
if query.exists():
return query.first()
else:
return None
@classmethod
def get_next_reference(cls):
"""Return the next available reference value for this particular class."""
# Find the "most recent" item
latest = cls.get_most_recent_item()
if not latest:
# No existing items
return 1
reference = latest.reference.strip
try:
reference = InvenTree.format.extract_named_group('ref', reference, cls.get_reference_pattern())
except Exception:
# If reference cannot be extracted using the pattern, try just the integer value
reference = str(latest.reference_int)
# Attempt to perform 'intelligent' incrementing of the reference field
incremented = InvenTree.helpers.increment(reference)
try:
incremented = int(incremented)
except ValueError:
pass
return incremented
@classmethod
def generate_reference(cls):
"""Generate the next 'reference' field based on specified pattern"""
fmt = cls.get_reference_pattern()
ctx = cls.get_reference_context()
reference = None
attempts = set()
while reference is None:
try:
ref = fmt.format(**ctx)
if ref in attempts:
# We are stuck in a loop!
reference = ref
break
else:
attempts.add(ref)
if cls.objects.filter(reference=ref).exists():
# Handle case where we have duplicated an existing reference
ctx['ref'] = InvenTree.helpers.increment(ctx['ref'])
else:
# We have found an 'unused' reference
reference = ref
break
except Exception:
# If anything goes wrong, return the most recent reference
recent = cls.get_most_recent_item()
if recent:
reference = recent.reference
else:
reference = ""
return reference
@classmethod
def validate_reference_pattern(cls, pattern):
"""Ensure that the provided pattern is valid"""
ctx = cls.get_reference_context()
try:
info = InvenTree.format.parse_format_string(pattern)
except Exception:
raise ValidationError({
"value": _("Improperly formatted pattern"),
})
# Check that only 'allowed' keys are provided
for key in info.keys():
if key not in ctx.keys():
raise ValidationError({
"value": _("Unknown format key specified") + f": '{key}'"
})
# Check that the 'ref' variable is specified
if 'ref' not in info.keys():
raise ValidationError({
'value': _("Missing required format key") + ": 'ref'"
})
@classmethod
def validate_reference_field(cls, value):
"""Check that the provided 'reference' value matches the requisite pattern"""
pattern = cls.get_reference_pattern()
value = str(value).strip()
if len(value) == 0:
raise ValidationError(_("Reference field cannot be empty"))
# An 'empty' pattern means no further validation is required
if not pattern:
return
if not InvenTree.format.validate_string(value, pattern):
raise ValidationError(_("Reference must match required pattern") + ": " + pattern)
# Check that the reference field can be rebuild
cls.rebuild_reference_field(value, validate=True)
class Meta:
"""Metaclass options. Abstract ensures no database table is created."""
abstract = True
@classmethod
def rebuild_reference_field(cls, reference, validate=False):
"""Extract integer out of reference for sorting.
If the 'integer' portion is buried somewhere 'within' the reference,
we can first try to extract it using the pattern.
Example:
reference - BO-123-ABC
pattern - BO-{ref}-???
extracted - 123
If we cannot extract using the pattern for some reason, fallback to the entire reference
"""
try:
# Extract named group based on provided pattern
reference = InvenTree.format.extract_named_group('ref', reference, cls.get_reference_pattern())
except Exception:
pass
reference_int = extract_int(reference)
if validate:
if reference_int > models.BigIntegerField.MAX_BIGINT:
raise ValidationError({
"reference": _("Reference number is too large")
})
return reference_int
reference_int = models.BigIntegerField(default=0)
def extract_int(reference, clip=0x7fffffff, allow_negative=False):
"""Extract an integer out of reference."""
# Default value if we cannot convert to an integer
ref_int = 0
reference = str(reference).strip()
# Ignore empty string
if len(reference) == 0:
return 0
# Look at the start of the string - can it be "integerized"?
result = re.match(r"^(\d+)", reference)
if result and len(result.groups()) == 1:
ref = result.groups()[0]
try:
ref_int = int(ref)
except Exception:
ref_int = 0
else:
# Look at the "end" of the string
result = re.search(r'(\d+)$', reference)
if result and len(result.groups()) == 1:
ref = result.groups()[0]
try:
ref_int = int(ref)
except Exception:
ref_int = 0
# Ensure that the returned values are within the range that can be stored in an IntegerField
# Note: This will result in large values being "clipped"
if clip is not None:
if ref_int > clip:
ref_int = clip
elif ref_int < -clip:
ref_int = -clip
if not allow_negative and ref_int < 0:
ref_int = abs(ref_int)
return ref_int
class InvenTreeAttachment(models.Model):
"""Provides an abstracted class for managing file attachments.
An attachment can be either an uploaded file, or an external URL
""" Provides an abstracted class for managing file attachments.
Attributes:
attachment: File
@@ -366,42 +47,19 @@ class InvenTreeAttachment(models.Model):
user: User associated with file upload
upload_date: Date the file was uploaded
"""
def getSubdir(self):
"""Return the subdirectory under which attachments should be stored.
"""
Return the subdirectory under which attachments should be stored.
Note: Re-implement this for each subclass of InvenTreeAttachment
"""
return "attachments"
def save(self, *args, **kwargs):
"""Provide better validation error."""
# Either 'attachment' or 'link' must be specified!
if not self.attachment and not self.link:
raise ValidationError({
'attachment': _('Missing file'),
'link': _('Missing external link'),
})
super().save(*args, **kwargs)
def __str__(self):
"""Human name for attachment."""
if self.attachment is not None:
return os.path.basename(self.attachment.name)
else:
return str(self.link)
return os.path.basename(self.attachment.name)
attachment = models.FileField(upload_to=rename_attachment, verbose_name=_('Attachment'),
help_text=_('Select file to attach'),
blank=True, null=True
)
link = InvenTreeURLField(
blank=True, null=True,
verbose_name=_('Link'),
help_text=_('Link to external URL')
)
help_text=_('Select file to attach'))
comment = models.CharField(blank=True, max_length=100, verbose_name=_('Comment'), help_text=_('File comment'))
@@ -417,70 +75,14 @@ class InvenTreeAttachment(models.Model):
@property
def basename(self):
"""Base name/path for attachment."""
if self.attachment:
return os.path.basename(self.attachment.name)
else:
return None
@basename.setter
def basename(self, fn):
"""Function to rename the attachment file.
- Filename cannot be empty
- Filename cannot contain illegal characters
- Filename must specify an extension
- Filename cannot match an existing file
"""
fn = fn.strip()
if len(fn) == 0:
raise ValidationError(_('Filename must not be empty'))
attachment_dir = settings.MEDIA_ROOT.joinpath(self.getSubdir())
old_file = settings.MEDIA_ROOT.joinpath(self.attachment.name)
new_file = settings.MEDIA_ROOT.joinpath(self.getSubdir(), fn).resolve()
# Check that there are no directory tricks going on...
if new_file.parent != attachment_dir:
logger.error(f"Attempted to rename attachment outside valid directory: '{new_file}'")
raise ValidationError(_("Invalid attachment directory"))
# Ignore further checks if the filename is not actually being renamed
if new_file == old_file:
return
forbidden = ["'", '"', "#", "@", "!", "&", "^", "<", ">", ":", ";", "/", "\\", "|", "?", "*", "%", "~", "`"]
for c in forbidden:
if c in fn:
raise ValidationError(_(f"Filename contains illegal character '{c}'"))
if len(fn.split('.')) < 2:
raise ValidationError(_("Filename missing extension"))
if not old_file.exists():
logger.error(f"Trying to rename attachment '{old_file}' which does not exist")
return
if new_file.exists():
raise ValidationError(_("Attachment with this filename already exists"))
try:
os.rename(old_file, new_file)
self.attachment.name = os.path.join(self.getSubdir(), fn)
self.save()
except Exception:
raise ValidationError(_("Error renaming file"))
return os.path.basename(self.attachment.name)
class Meta:
"""Metaclass options. Abstract ensures no database table is created."""
abstract = True
class InvenTreeTree(MPTTModel):
"""Provides an abstracted self-referencing tree model for data categories.
""" Provides an abstracted self-referencing tree model for data categories.
- Each Category has one parent Category, which can be blank (for a top-level Category).
- Each Category can have zero-or-more child Categor(y/ies)
@@ -492,7 +94,10 @@ class InvenTreeTree(MPTTModel):
"""
def api_instance_filters(self):
"""Instance filters for InvenTreeTree models."""
"""
Instance filters for InvenTreeTree models
"""
return {
'parent': {
'exclude_tree': self.pk,
@@ -500,40 +105,27 @@ class InvenTreeTree(MPTTModel):
}
def save(self, *args, **kwargs):
"""Custom save method for InvenTreeTree abstract model"""
try:
super().save(*args, **kwargs)
except InvalidMove:
# Provide better error for parent selection
raise ValidationError({
'parent': _("Invalid choice"),
})
# Re-calculate the 'pathstring' field
pathstring = InvenTree.helpers.constructPathString(
[item.name for item in self.path]
)
if pathstring != self.pathstring:
self.pathstring = pathstring
super().save(force_update=True)
class Meta:
"""Metaclass defines extra model properties."""
abstract = True
# Names must be unique at any given level in the tree
unique_together = ('name', 'parent')
class MPTTMeta:
"""Set insert order."""
order_insertion_by = ['name']
name = models.CharField(
blank=False,
max_length=100,
validators=[validate_tree_name],
verbose_name=_("Name"),
help_text=_("Name"),
)
@@ -553,17 +145,9 @@ class InvenTreeTree(MPTTModel):
verbose_name=_("parent"),
related_name='children')
# The 'pathstring' field is calculated each time the model is saved
pathstring = models.CharField(
blank=True,
max_length=250,
verbose_name=_('Path'),
help_text=_('Path')
)
@property
def item_count(self):
"""Return the number of items which exist *under* this node in the tree.
""" Return the number of items which exist *under* this node in the tree.
Here an 'item' is considered to be the 'leaf' at the end of each branch,
and the exact nature here will depend on the class implementation.
@@ -573,29 +157,30 @@ class InvenTreeTree(MPTTModel):
return 0
def getUniqueParents(self):
"""Return a flat set of all parent items that exist above this node.
""" Return a flat set of all parent items that exist above this node.
If any parents are repeated (which would be very bad!), the process is halted
"""
return self.get_ancestors()
def getUniqueChildren(self, include_self=True):
"""Return a flat set of all child items that exist under this node.
""" Return a flat set of all child items that exist under this node.
If any child items are repeated, the repetitions are omitted.
"""
return self.get_descendants(include_self=include_self)
@property
def has_children(self):
"""True if there are any children under this item."""
""" True if there are any children under this item """
return self.getUniqueChildren(include_self=False).count() > 0
def getAcceptableParents(self):
"""Returns a list of acceptable parent items within this model Acceptable parents are ones which are not underneath this item.
""" Returns a list of acceptable parent items within this model
Acceptable parents are ones which are not underneath this item.
Setting the parent of an item to its own child results in recursion.
"""
contents = ContentType.objects.get_for_model(type(self))
available = contents.get_all_objects_for_this_type()
@@ -613,16 +198,17 @@ class InvenTreeTree(MPTTModel):
@property
def parentpath(self):
"""Get the parent path of this category.
""" Get the parent path of this category
Returns:
List of category names from the top level to the parent of this category
"""
return [a for a in self.get_ancestors()]
@property
def path(self):
"""Get the complete part of this category.
""" Get the complete part of this category.
e.g. ["Top", "Second", "Third", "This"]
@@ -631,55 +217,28 @@ class InvenTreeTree(MPTTModel):
"""
return self.parentpath + [self]
@property
def pathstring(self):
""" Get a string representation for the path of this item.
e.g. "Top/Second/Third/This"
"""
return '/'.join([item.name for item in self.path])
def __str__(self):
"""String representation of a category is the full path to that category."""
""" String representation of a category is the full path to that category """
return "{path} - {desc}".format(path=self.pathstring, desc=self.description)
@receiver(pre_delete, sender=InvenTreeTree, dispatch_uid='tree_pre_delete_log')
def before_delete_tree_item(sender, instance, using, **kwargs):
"""Receives pre_delete signal from InvenTreeTree object.
""" Receives pre_delete signal from InvenTreeTree object.
Before an item is deleted, update each child object to point to the parent of the object being deleted.
"""
# Update each tree item below this one
for child in instance.children.all():
child.parent = instance.parent
child.save()
@receiver(post_save, sender=Error, dispatch_uid='error_post_save_notification')
def after_error_logged(sender, instance: Error, created: bool, **kwargs):
"""Callback when a server error is logged.
- Send a UI notification to all users with staff status
"""
if created:
try:
import common.notifications
users = get_user_model().objects.filter(is_staff=True)
link = InvenTree.helpers.construct_absolute_url(
reverse('admin:error_report_error_change', kwargs={'object_id': instance.pk})
)
context = {
'error': instance,
'name': _('Server Error'),
'message': _('An error has been logged by the server.'),
'link': link
}
common.notifications.trigger_notification(
instance,
'inventree.error_log',
context=context,
targets=users,
delivery_methods=set([common.notifications.UIMessageNotification]),
)
except Exception as exc:
"""We do not want to throw an exception while reporting an exception"""
logger.error(exc)

View File

@@ -1,4 +1,5 @@
"""Permission set for InvenTree."""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import permissions
@@ -6,7 +7,9 @@ import users.models
class RolePermission(permissions.BasePermission):
"""Role mixin for API endpoints, allowing us to specify the user "role" which is required for certain operations.
"""
Role mixin for API endpoints, allowing us to specify the user "role"
which is required for certain operations.
Each endpoint can have one or more of the following actions:
- GET
@@ -25,10 +28,14 @@ class RolePermission(permissions.BasePermission):
to perform the specified action.
For example, a DELETE action will be rejected unless the user has the "part.remove" permission
"""
def has_permission(self, request, view):
"""Determine if the current user has the specified permissions."""
"""
Determine if the current user has the specified permissions
"""
user = request.user
# Superuser can do it all

View File

@@ -0,0 +1,43 @@
# -*- coding: utf-8 -*-
import inspect
import importlib
import pkgutil
def iter_namespace(pkg):
return pkgutil.iter_modules(pkg.__path__, pkg.__name__ + ".")
def get_modules(pkg):
# Return all modules in a given package
return [importlib.import_module(name) for finder, name, ispkg in iter_namespace(pkg)]
def get_classes(module):
# Return all classes in a given module
return inspect.getmembers(module, inspect.isclass)
def get_plugins(pkg, baseclass):
"""
Return a list of all modules under a given package.
- Modules must be a subclass of the provided 'baseclass'
- Modules must have a non-empty PLUGIN_NAME parameter
"""
plugins = []
modules = get_modules(pkg)
# Iterate through each module in the package
for mod in modules:
# Iterate through each class in the module
for item in get_classes(mod):
plugin = item[1]
if issubclass(plugin, baseclass) and plugin.PLUGIN_NAME:
plugins.append(plugin)
return plugins

View File

@@ -1,24 +1,25 @@
"""Functions to check if certain parts of InvenTree are ready."""
import sys
def isInTestMode():
"""Returns True if the database is in testing mode."""
return 'test' in sys.argv
"""
Returns True if the database is in testing mode
"""
if 'test' in sys.argv:
return True
def isImportingData():
"""Returns True if the database is currently importing data, e.g. 'loaddata' command is performed."""
return 'loaddata' in sys.argv
return False
def canAppAccessDatabase(allow_test=False):
"""Returns True if the apps.py file can access database records.
"""
Returns True if the apps.py file can access database records.
There are some circumstances where we don't want the ready function in apps.py
to touch the database
"""
# If any of the following management commands are being executed,
# prevent custom "on load" code from running!
excluded_commands = [
@@ -32,8 +33,7 @@ def canAppAccessDatabase(allow_test=False):
'createsuperuser',
'wait_for_db',
'prerender',
'rebuild_models',
'rebuild_thumbnails',
'rebuild',
'collectstatic',
'makemessages',
'compilemessages',

View File

@@ -1,46 +1,44 @@
"""Serializers used in various InvenTree apps."""
"""
Serializers used in various InvenTree apps
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from collections import OrderedDict
from decimal import Decimal
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError as DjangoValidationError
from django.db import models
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ugettext_lazy as _
import tablib
from djmoney.contrib.django_rest_framework.fields import MoneyField
from djmoney.money import Money
from djmoney.utils import MONEY_CLASSES, get_currency_field_name
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from rest_framework.fields import empty
from rest_framework.serializers import DecimalField
from rest_framework.utils import model_meta
from common.models import InvenTreeSetting
from InvenTree.fields import InvenTreeRestURLField
from InvenTree.helpers import download_image_from_url
from rest_framework import serializers
from rest_framework.utils import model_meta
from rest_framework.fields import empty
from rest_framework.exceptions import ValidationError
from rest_framework.serializers import DecimalField
class InvenTreeMoneySerializer(MoneyField):
"""Custom serializer for 'MoneyField', which ensures that passed values are numerically valid.
"""
Custom serializer for 'MoneyField',
which ensures that passed values are numerically valid
Ref: https://github.com/django-money/django-money/blob/master/djmoney/contrib/django_rest_framework/fields.py
"""
def __init__(self, *args, **kwargs):
"""Overrite default values."""
kwargs["max_digits"] = kwargs.get("max_digits", 19)
kwargs["decimal_places"] = kwargs.get("decimal_places", 4)
kwargs["required"] = kwargs.get("required", False)
super().__init__(*args, **kwargs)
def get_value(self, data):
"""Test that the returned amount is a valid Decimal."""
"""
Test that the returned amount is a valid Decimal
"""
amount = super(DecimalField, self).get_value(data)
# Convert an empty string to None
@@ -48,42 +46,53 @@ class InvenTreeMoneySerializer(MoneyField):
amount = None
try:
if amount is not None and amount is not empty:
if amount is not None:
amount = Decimal(amount)
except Exception:
raise ValidationError({
self.field_name: [_("Must be a valid number")],
})
except:
raise ValidationError(_("Must be a valid number"))
currency = data.get(get_currency_field_name(self.field_name), self.default_currency)
if currency and amount is not None and not isinstance(amount, MONEY_CLASSES) and amount is not empty:
return Money(amount, currency)
return amount
class InvenTreeModelSerializer(serializers.ModelSerializer):
"""Inherits the standard Django ModelSerializer class, but also ensures that the underlying model class data are checked on validation."""
class UserSerializer(serializers.ModelSerializer):
""" Serializer for User - provides all fields """
# Switch out URLField mapping
serializer_field_mapping = {
**serializers.ModelSerializer.serializer_field_mapping,
models.URLField: InvenTreeRestURLField,
}
class Meta:
model = User
fields = 'all'
class UserSerializerBrief(serializers.ModelSerializer):
""" Serializer for User - provides limited information """
class Meta:
model = User
fields = [
'pk',
'username',
]
class InvenTreeModelSerializer(serializers.ModelSerializer):
"""
Inherits the standard Django ModelSerializer class,
but also ensures that the underlying model class data are checked on validation.
"""
def __init__(self, instance=None, data=empty, **kwargs):
"""Custom __init__ routine to ensure that *default* values (as specified in the ORM) are used by the DRF serializers, *if* the values are not provided by the user."""
# self.instance = instance
# If instance is None, we are creating a new instance
if instance is None and data is not empty:
if data is None:
data = OrderedDict()
else:
new_data = OrderedDict()
new_data.update(data)
data = new_data
# Required to side-step immutability of a QueryDict
data = data.copy()
# Add missing fields which have default values
ModelClass = self.Meta.model
@@ -94,7 +103,6 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
"""
Update the field IF (and ONLY IF):
- The field has a specified default value
- The field does not already have a value set
"""
@@ -106,7 +114,7 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
if callable(value):
try:
value = value()
except Exception:
except:
continue
data[field_name] = value
@@ -114,10 +122,11 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
super().__init__(instance, data, **kwargs)
def get_initial(self):
"""Construct initial data for the serializer.
"""
Construct initial data for the serializer.
Use the 'default' values specified by the django model definition
"""
initials = super().get_initial().copy()
# Are we creating a new instance?
@@ -136,7 +145,7 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
if callable(value):
try:
value = value()
except Exception:
except:
continue
initials[field_name] = value
@@ -144,7 +153,11 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
return initials
def save(self, **kwargs):
"""Catch any django ValidationError thrown at the moment `save` is called, and re-throw as a DRF ValidationError."""
"""
Catch any django ValidationError thrown at the moment save() is called,
and re-throw as a DRF ValidationError
"""
try:
super().save(**kwargs)
except (ValidationError, DjangoValidationError) as exc:
@@ -152,21 +165,13 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
return self.instance
def update(self, instance, validated_data):
"""Catch any django ValidationError, and re-throw as a DRF ValidationError."""
try:
instance = super().update(instance, validated_data)
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=serializers.as_serializer_error(exc))
return instance
def run_validation(self, data=empty):
"""Perform serializer validation.
"""
Perform serializer validation.
In addition to running validators on the serializer fields,
this class ensures that the underlying model is also validated.
"""
# Run any native validation checks first (may raise a ValidationError)
data = super().run_validation(data)
@@ -181,46 +186,22 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
# Update instance fields
for attr, value in data.items():
try:
setattr(instance, attr, value)
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=serializers.as_serializer_error(exc))
setattr(instance, attr, value)
# Run a 'full_clean' on the model.
# Note that by default, DRF does *not* perform full model validation!
try:
instance.full_clean()
except (ValidationError, DjangoValidationError) as exc:
data = exc.message_dict
# Change '__all__' key (django style) to 'non_field_errors' (DRF style)
if '__all__' in data:
data['non_field_errors'] = data['__all__']
del data['__all__']
raise ValidationError(data)
raise ValidationError(detail=serializers.as_serializer_error(exc))
return data
class UserSerializer(InvenTreeModelSerializer):
"""Serializer for a User."""
class Meta:
"""Metaclass defines serializer fields."""
model = User
fields = [
'pk',
'username',
'first_name',
'last_name',
'email'
]
class InvenTreeAttachmentSerializerField(serializers.FileField):
"""Override the DRF native FileField serializer, to remove the leading server path.
"""
Override the DRF native FileField serializer,
to remove the leading server path.
For example, the FileField might supply something like:
@@ -230,396 +211,31 @@ class InvenTreeAttachmentSerializerField(serializers.FileField):
/media/foo/bar.jpg
If the server process is serving the data at 127.0.0.1,
Why? You can't handle the why!
Actually, if the server process is serving the data at 127.0.0.1,
but a proxy service (e.g. nginx) is then providing DNS lookup to the outside world,
then an attachment which prefixes the "address" of the internal server
will not be accessible from the outside world.
"""
def to_representation(self, value):
"""To json-serializable type."""
if not value:
return None
return os.path.join(str(settings.MEDIA_URL), str(value))
class InvenTreeAttachmentSerializer(InvenTreeModelSerializer):
"""Special case of an InvenTreeModelSerializer, which handles an "attachment" model.
The only real addition here is that we support "renaming" of the attachment file.
"""
user_detail = UserSerializer(source='user', read_only=True, many=False)
attachment = InvenTreeAttachmentSerializerField(
required=False,
allow_null=False,
)
# The 'filename' field must be present in the serializer
filename = serializers.CharField(
label=_('Filename'),
required=False,
source='basename',
allow_blank=False,
)
class InvenTreeImageSerializerField(serializers.ImageField):
"""Custom image serializer.
"""
Custom image serializer.
On upload, validate that the file is a valid image file
"""
def to_representation(self, value):
"""To json-serializable type."""
if not value:
return None
return os.path.join(str(settings.MEDIA_URL), str(value))
class InvenTreeDecimalField(serializers.FloatField):
"""Custom serializer for decimal fields.
Solves the following issues:
- The normal DRF DecimalField renders values with trailing zeros
- Using a FloatField can result in rounding issues: https://code.djangoproject.com/ticket/30290
"""
def to_internal_value(self, data):
"""Convert to python type."""
# Convert the value to a string, and then a decimal
try:
return Decimal(str(data))
except Exception:
raise serializers.ValidationError(_("Invalid value"))
class DataFileUploadSerializer(serializers.Serializer):
"""Generic serializer for uploading a data file, and extracting a dataset.
- Validates uploaded file
- Extracts column names
- Extracts data rows
"""
# Implementing class should register a target model (database model) to be used for import
TARGET_MODEL = None
class Meta:
"""Metaclass options."""
fields = [
'data_file',
]
data_file = serializers.FileField(
label=_("Data File"),
help_text=_("Select data file for upload"),
required=True,
allow_empty_file=False,
)
def validate_data_file(self, data_file):
"""Perform validation checks on the uploaded data file."""
self.filename = data_file.name
name, ext = os.path.splitext(data_file.name)
# Remove the leading . from the extension
ext = ext[1:]
accepted_file_types = [
'xls', 'xlsx',
'csv', 'tsv',
'xml',
]
if ext not in accepted_file_types:
raise serializers.ValidationError(_("Unsupported file type"))
# Impose a 50MB limit on uploaded BOM files
max_upload_file_size = 50 * 1024 * 1024
if data_file.size > max_upload_file_size:
raise serializers.ValidationError(_("File is too large"))
# Read file data into memory (bytes object)
try:
data = data_file.read()
except Exception as e:
raise serializers.ValidationError(str(e))
if ext in ['csv', 'tsv', 'xml']:
try:
data = data.decode()
except Exception as e:
raise serializers.ValidationError(str(e))
# Convert to a tablib dataset (we expect headers)
try:
self.dataset = tablib.Dataset().load(data, ext, headers=True)
except Exception as e:
raise serializers.ValidationError(str(e))
if len(self.dataset.headers) == 0:
raise serializers.ValidationError(_("No columns found in file"))
if len(self.dataset) == 0:
raise serializers.ValidationError(_("No data rows found in file"))
return data_file
def match_column(self, column_name, field_names, exact=False):
"""Attempt to match a column name (from the file) to a field (defined in the model).
Order of matching is:
- Direct match
- Case insensitive match
- Fuzzy match
"""
if not column_name:
return None
column_name = str(column_name).strip()
column_name_lower = column_name.lower()
if column_name in field_names:
return column_name
for field_name in field_names:
if field_name.lower() == column_name_lower:
return field_name
if exact:
# Finished available 'exact' matches
return None
# TODO: Fuzzy pattern matching for column names
# No matches found
return None
def extract_data(self):
"""Returns dataset extracted from the file."""
# Provide a dict of available import fields for the model
model_fields = {}
# Keep track of columns we have already extracted
matched_columns = set()
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except Exception:
pass
# Extract a list of valid model field names
model_field_names = [key for key in model_fields.keys()]
# Provide a dict of available columns from the dataset
file_columns = {}
for header in self.dataset.headers:
column = {}
# Attempt to "match" file columns to model fields
match = self.match_column(header, model_field_names, exact=True)
if match is not None and match not in matched_columns:
matched_columns.add(match)
column['value'] = match
else:
column['value'] = None
file_columns[header] = column
return {
'file_fields': file_columns,
'model_fields': model_fields,
'rows': [row.values() for row in self.dataset.dict],
'filename': self.filename,
}
def save(self):
"""Empty overwrite for save."""
...
class DataFileExtractSerializer(serializers.Serializer):
"""Generic serializer for extracting data from an imported dataset.
- User provides an array of matched headers
- User provides an array of raw data rows
"""
# Implementing class should register a target model (database model) to be used for import
TARGET_MODEL = None
class Meta:
"""Metaclass options."""
fields = [
'columns',
'rows',
]
# Mapping of columns
columns = serializers.ListField(
child=serializers.CharField(
allow_blank=True,
),
)
rows = serializers.ListField(
child=serializers.ListField(
child=serializers.CharField(
allow_blank=True,
allow_null=True,
),
)
)
def validate(self, data):
"""Clean data."""
data = super().validate(data)
self.columns = data.get('columns', [])
self.rows = data.get('rows', [])
if len(self.rows) == 0:
raise serializers.ValidationError(_("No data rows provided"))
if len(self.columns) == 0:
raise serializers.ValidationError(_("No data columns supplied"))
self.validate_extracted_columns()
return data
@property
def data(self):
"""Returns current data."""
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except Exception:
model_fields = {}
rows = []
for row in self.rows:
"""Optionally pre-process each row, before sending back to the client."""
processed_row = self.process_row(self.row_to_dict(row))
if processed_row:
rows.append({
"original": row,
"data": processed_row,
})
return {
'fields': model_fields,
'columns': self.columns,
'rows': rows,
}
def process_row(self, row):
"""Process a 'row' of data, which is a mapped column:value dict.
Returns either a mapped column:value dict, or None.
If the function returns None, the column is ignored!
"""
# Default implementation simply returns the original row data
return row
def row_to_dict(self, row):
"""Convert a "row" to a named data dict."""
row_dict = {
'errors': {},
}
for idx, value in enumerate(row):
if idx < len(self.columns):
col = self.columns[idx]
if col:
row_dict[col] = value
return row_dict
def validate_extracted_columns(self):
"""Perform custom validation of header mapping."""
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except Exception:
model_fields = {}
cols_seen = set()
for name, field in model_fields.items():
required = field.get('required', False)
# Check for missing required columns
if required:
if name not in self.columns:
raise serializers.ValidationError(_(f"Missing required column: '{name}'"))
for col in self.columns:
if not col:
continue
# Check for duplicated columns
if col in cols_seen:
raise serializers.ValidationError(_(f"Duplicate column: '{col}'"))
cols_seen.add(col)
def save(self):
"""No "save" action for this serializer."""
pass
class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
"""Mixin class which allows downloading an 'image' from a remote URL.
Adds the optional, write-only `remote_image` field to the serializer
"""
remote_image = serializers.URLField(
required=False,
allow_blank=False,
write_only=True,
label=_("URL"),
help_text=_("URL of remote image file"),
)
def validate_remote_image(self, url):
"""Perform custom validation for the remote image URL.
- Attempt to download the image and store it against this object instance
- Catches and re-throws any errors
"""
if not url:
return
if not InvenTreeSetting.get_setting('INVENTREE_DOWNLOAD_FROM_URL'):
raise ValidationError(_("Downloading images from remote URL is not enabled"))
try:
self.remote_image_file = download_image_from_url(url)
except Exception as exc:
self.remote_image_file = None
raise ValidationError(str(exc))
return url

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,170 @@
/**
* @author: Dennis Hernández
* @webSite: http://djhvscf.github.io/Blog
* @update: zhixin wen <wenzhixin2010@gmail.com>
*/
!($ => {
const diacriticsMap = {}
const defaultAccentsDiacritics = [
{base: 'A', letters: '\u0041\u24B6\uFF21\u00C0\u00C1\u00C2\u1EA6\u1EA4\u1EAA\u1EA8\u00C3\u0100\u0102\u1EB0\u1EAE\u1EB4\u1EB2\u0226\u01E0\u00C4\u01DE\u1EA2\u00C5\u01FA\u01CD\u0200\u0202\u1EA0\u1EAC\u1EB6\u1E00\u0104\u023A\u2C6F'},
{base: 'AA',letters: '\uA732'},
{base: 'AE',letters: '\u00C6\u01FC\u01E2'},
{base: 'AO',letters: '\uA734'},
{base: 'AU',letters: '\uA736'},
{base: 'AV',letters: '\uA738\uA73A'},
{base: 'AY',letters: '\uA73C'},
{base: 'B', letters: '\u0042\u24B7\uFF22\u1E02\u1E04\u1E06\u0243\u0182\u0181'},
{base: 'C', letters: '\u0043\u24B8\uFF23\u0106\u0108\u010A\u010C\u00C7\u1E08\u0187\u023B\uA73E'},
{base: 'D', letters: '\u0044\u24B9\uFF24\u1E0A\u010E\u1E0C\u1E10\u1E12\u1E0E\u0110\u018B\u018A\u0189\uA779'},
{base: 'DZ',letters: '\u01F1\u01C4'},
{base: 'Dz',letters: '\u01F2\u01C5'},
{base: 'E', letters: '\u0045\u24BA\uFF25\u00C8\u00C9\u00CA\u1EC0\u1EBE\u1EC4\u1EC2\u1EBC\u0112\u1E14\u1E16\u0114\u0116\u00CB\u1EBA\u011A\u0204\u0206\u1EB8\u1EC6\u0228\u1E1C\u0118\u1E18\u1E1A\u0190\u018E'},
{base: 'F', letters: '\u0046\u24BB\uFF26\u1E1E\u0191\uA77B'},
{base: 'G', letters: '\u0047\u24BC\uFF27\u01F4\u011C\u1E20\u011E\u0120\u01E6\u0122\u01E4\u0193\uA7A0\uA77D\uA77E'},
{base: 'H', letters: '\u0048\u24BD\uFF28\u0124\u1E22\u1E26\u021E\u1E24\u1E28\u1E2A\u0126\u2C67\u2C75\uA78D'},
{base: 'I', letters: '\u0049\u24BE\uFF29\u00CC\u00CD\u00CE\u0128\u012A\u012C\u0130\u00CF\u1E2E\u1EC8\u01CF\u0208\u020A\u1ECA\u012E\u1E2C\u0197'},
{base: 'J', letters: '\u004A\u24BF\uFF2A\u0134\u0248'},
{base: 'K', letters: '\u004B\u24C0\uFF2B\u1E30\u01E8\u1E32\u0136\u1E34\u0198\u2C69\uA740\uA742\uA744\uA7A2'},
{base: 'L', letters: '\u004C\u24C1\uFF2C\u013F\u0139\u013D\u1E36\u1E38\u013B\u1E3C\u1E3A\u0141\u023D\u2C62\u2C60\uA748\uA746\uA780'},
{base: 'LJ',letters: '\u01C7'},
{base: 'Lj',letters: '\u01C8'},
{base: 'M', letters: '\u004D\u24C2\uFF2D\u1E3E\u1E40\u1E42\u2C6E\u019C'},
{base: 'N', letters: '\u004E\u24C3\uFF2E\u01F8\u0143\u00D1\u1E44\u0147\u1E46\u0145\u1E4A\u1E48\u0220\u019D\uA790\uA7A4'},
{base: 'NJ',letters: '\u01CA'},
{base: 'Nj',letters: '\u01CB'},
{base: 'O', letters: '\u004F\u24C4\uFF2F\u00D2\u00D3\u00D4\u1ED2\u1ED0\u1ED6\u1ED4\u00D5\u1E4C\u022C\u1E4E\u014C\u1E50\u1E52\u014E\u022E\u0230\u00D6\u022A\u1ECE\u0150\u01D1\u020C\u020E\u01A0\u1EDC\u1EDA\u1EE0\u1EDE\u1EE2\u1ECC\u1ED8\u01EA\u01EC\u00D8\u01FE\u0186\u019F\uA74A\uA74C'},
{base: 'OI',letters: '\u01A2'},
{base: 'OO',letters: '\uA74E'},
{base: 'OU',letters: '\u0222'},
{base: 'OE',letters: '\u008C\u0152'},
{base: 'oe',letters: '\u009C\u0153'},
{base: 'P', letters: '\u0050\u24C5\uFF30\u1E54\u1E56\u01A4\u2C63\uA750\uA752\uA754'},
{base: 'Q', letters: '\u0051\u24C6\uFF31\uA756\uA758\u024A'},
{base: 'R', letters: '\u0052\u24C7\uFF32\u0154\u1E58\u0158\u0210\u0212\u1E5A\u1E5C\u0156\u1E5E\u024C\u2C64\uA75A\uA7A6\uA782'},
{base: 'S', letters: '\u0053\u24C8\uFF33\u1E9E\u015A\u1E64\u015C\u1E60\u0160\u1E66\u1E62\u1E68\u0218\u015E\u2C7E\uA7A8\uA784'},
{base: 'T', letters: '\u0054\u24C9\uFF34\u1E6A\u0164\u1E6C\u021A\u0162\u1E70\u1E6E\u0166\u01AC\u01AE\u023E\uA786'},
{base: 'TZ',letters: '\uA728'},
{base: 'U', letters: '\u0055\u24CA\uFF35\u00D9\u00DA\u00DB\u0168\u1E78\u016A\u1E7A\u016C\u00DC\u01DB\u01D7\u01D5\u01D9\u1EE6\u016E\u0170\u01D3\u0214\u0216\u01AF\u1EEA\u1EE8\u1EEE\u1EEC\u1EF0\u1EE4\u1E72\u0172\u1E76\u1E74\u0244'},
{base: 'V', letters: '\u0056\u24CB\uFF36\u1E7C\u1E7E\u01B2\uA75E\u0245'},
{base: 'VY',letters: '\uA760'},
{base: 'W', letters: '\u0057\u24CC\uFF37\u1E80\u1E82\u0174\u1E86\u1E84\u1E88\u2C72'},
{base: 'X', letters: '\u0058\u24CD\uFF38\u1E8A\u1E8C'},
{base: 'Y', letters: '\u0059\u24CE\uFF39\u1EF2\u00DD\u0176\u1EF8\u0232\u1E8E\u0178\u1EF6\u1EF4\u01B3\u024E\u1EFE'},
{base: 'Z', letters: '\u005A\u24CF\uFF3A\u0179\u1E90\u017B\u017D\u1E92\u1E94\u01B5\u0224\u2C7F\u2C6B\uA762'},
{base: 'a', letters: '\u0061\u24D0\uFF41\u1E9A\u00E0\u00E1\u00E2\u1EA7\u1EA5\u1EAB\u1EA9\u00E3\u0101\u0103\u1EB1\u1EAF\u1EB5\u1EB3\u0227\u01E1\u00E4\u01DF\u1EA3\u00E5\u01FB\u01CE\u0201\u0203\u1EA1\u1EAD\u1EB7\u1E01\u0105\u2C65\u0250'},
{base: 'aa',letters: '\uA733'},
{base: 'ae',letters: '\u00E6\u01FD\u01E3'},
{base: 'ao',letters: '\uA735'},
{base: 'au',letters: '\uA737'},
{base: 'av',letters: '\uA739\uA73B'},
{base: 'ay',letters: '\uA73D'},
{base: 'b', letters: '\u0062\u24D1\uFF42\u1E03\u1E05\u1E07\u0180\u0183\u0253'},
{base: 'c', letters: '\u0063\u24D2\uFF43\u0107\u0109\u010B\u010D\u00E7\u1E09\u0188\u023C\uA73F\u2184'},
{base: 'd', letters: '\u0064\u24D3\uFF44\u1E0B\u010F\u1E0D\u1E11\u1E13\u1E0F\u0111\u018C\u0256\u0257\uA77A'},
{base: 'dz',letters: '\u01F3\u01C6'},
{base: 'e', letters: '\u0065\u24D4\uFF45\u00E8\u00E9\u00EA\u1EC1\u1EBF\u1EC5\u1EC3\u1EBD\u0113\u1E15\u1E17\u0115\u0117\u00EB\u1EBB\u011B\u0205\u0207\u1EB9\u1EC7\u0229\u1E1D\u0119\u1E19\u1E1B\u0247\u025B\u01DD'},
{base: 'f', letters: '\u0066\u24D5\uFF46\u1E1F\u0192\uA77C'},
{base: 'g', letters: '\u0067\u24D6\uFF47\u01F5\u011D\u1E21\u011F\u0121\u01E7\u0123\u01E5\u0260\uA7A1\u1D79\uA77F'},
{base: 'h', letters: '\u0068\u24D7\uFF48\u0125\u1E23\u1E27\u021F\u1E25\u1E29\u1E2B\u1E96\u0127\u2C68\u2C76\u0265'},
{base: 'hv',letters: '\u0195'},
{base: 'i', letters: '\u0069\u24D8\uFF49\u00EC\u00ED\u00EE\u0129\u012B\u012D\u00EF\u1E2F\u1EC9\u01D0\u0209\u020B\u1ECB\u012F\u1E2D\u0268\u0131'},
{base: 'j', letters: '\u006A\u24D9\uFF4A\u0135\u01F0\u0249'},
{base: 'k', letters: '\u006B\u24DA\uFF4B\u1E31\u01E9\u1E33\u0137\u1E35\u0199\u2C6A\uA741\uA743\uA745\uA7A3'},
{base: 'l', letters: '\u006C\u24DB\uFF4C\u0140\u013A\u013E\u1E37\u1E39\u013C\u1E3D\u1E3B\u017F\u0142\u019A\u026B\u2C61\uA749\uA781\uA747'},
{base: 'lj',letters: '\u01C9'},
{base: 'm', letters: '\u006D\u24DC\uFF4D\u1E3F\u1E41\u1E43\u0271\u026F'},
{base: 'n', letters: '\u006E\u24DD\uFF4E\u01F9\u0144\u00F1\u1E45\u0148\u1E47\u0146\u1E4B\u1E49\u019E\u0272\u0149\uA791\uA7A5'},
{base: 'nj',letters: '\u01CC'},
{base: 'o', letters: '\u006F\u24DE\uFF4F\u00F2\u00F3\u00F4\u1ED3\u1ED1\u1ED7\u1ED5\u00F5\u1E4D\u022D\u1E4F\u014D\u1E51\u1E53\u014F\u022F\u0231\u00F6\u022B\u1ECF\u0151\u01D2\u020D\u020F\u01A1\u1EDD\u1EDB\u1EE1\u1EDF\u1EE3\u1ECD\u1ED9\u01EB\u01ED\u00F8\u01FF\u0254\uA74B\uA74D\u0275'},
{base: 'oi',letters: '\u01A3'},
{base: 'ou',letters: '\u0223'},
{base: 'oo',letters: '\uA74F'},
{base: 'p',letters: '\u0070\u24DF\uFF50\u1E55\u1E57\u01A5\u1D7D\uA751\uA753\uA755'},
{base: 'q',letters: '\u0071\u24E0\uFF51\u024B\uA757\uA759'},
{base: 'r',letters: '\u0072\u24E1\uFF52\u0155\u1E59\u0159\u0211\u0213\u1E5B\u1E5D\u0157\u1E5F\u024D\u027D\uA75B\uA7A7\uA783'},
{base: 's',letters: '\u0073\u24E2\uFF53\u00DF\u015B\u1E65\u015D\u1E61\u0161\u1E67\u1E63\u1E69\u0219\u015F\u023F\uA7A9\uA785\u1E9B'},
{base: 't',letters: '\u0074\u24E3\uFF54\u1E6B\u1E97\u0165\u1E6D\u021B\u0163\u1E71\u1E6F\u0167\u01AD\u0288\u2C66\uA787'},
{base: 'tz',letters: '\uA729'},
{base: 'u',letters: '\u0075\u24E4\uFF55\u00F9\u00FA\u00FB\u0169\u1E79\u016B\u1E7B\u016D\u00FC\u01DC\u01D8\u01D6\u01DA\u1EE7\u016F\u0171\u01D4\u0215\u0217\u01B0\u1EEB\u1EE9\u1EEF\u1EED\u1EF1\u1EE5\u1E73\u0173\u1E77\u1E75\u0289'},
{base: 'v',letters: '\u0076\u24E5\uFF56\u1E7D\u1E7F\u028B\uA75F\u028C'},
{base: 'vy',letters: '\uA761'},
{base: 'w',letters: '\u0077\u24E6\uFF57\u1E81\u1E83\u0175\u1E87\u1E85\u1E98\u1E89\u2C73'},
{base: 'x',letters: '\u0078\u24E7\uFF58\u1E8B\u1E8D'},
{base: 'y',letters: '\u0079\u24E8\uFF59\u1EF3\u00FD\u0177\u1EF9\u0233\u1E8F\u00FF\u1EF7\u1E99\u1EF5\u01B4\u024F\u1EFF'},
{base: 'z',letters: '\u007A\u24E9\uFF5A\u017A\u1E91\u017C\u017E\u1E93\u1E95\u01B6\u0225\u0240\u2C6C\uA763'}
]
const initNeutraliser = () => {
for (const diacritic of defaultAccentsDiacritics) {
const letters = diacritic.letters
for (let i = 0; i < letters.length; i++) {
diacriticsMap[letters[i]] = diacritic.base
}
}
}
/* eslint-disable no-control-regex */
const removeDiacritics = str => str.replace(/[^\u0000-\u007E]/g, a => diacriticsMap[a] || a)
$.extend($.fn.bootstrapTable.defaults, {
searchAccentNeutralise: false
})
$.BootstrapTable = class extends $.BootstrapTable {
init () {
if (this.options.searchAccentNeutralise) {
initNeutraliser()
}
super.init()
}
initSearch () {
if (this.options.sidePagination !== 'server') {
let s = this.searchText && this.searchText.toLowerCase()
const f = $.isEmptyObject(this.filterColumns) ? null : this.filterColumns
// Check filter
this.data = f ? this.options.data.filter((item, i) => {
for (const key in f) {
if (item[key] !== f[key]) {
return false
}
}
return true
}) : this.options.data
this.data = s ? this.options.data.filter((item, i) => {
for (let [key, value] of Object.entries(item)) {
key = $.isNumeric(key) ? parseInt(key, 10) : key
const column = this.columns[this.fieldsColumnsIndex[key]]
const j = this.header.fields.indexOf(key)
if (column && column.searchFormatter) {
value = $.fn.bootstrapTable.utils.calculateObjectValue(column,
this.header.formatters[j], [value, item, i], value)
}
const index = this.header.fields.indexOf(key)
if (index !== -1 && this.header.searchables[index] && typeof value === 'string') {
if (this.options.searchAccentNeutralise) {
value = removeDiacritics(value)
s = removeDiacritics(s)
}
if (this.options.strictSearch) {
if ((`${value}`).toLowerCase() === s) {
return true
}
} else {
if ((`${value}`).toLowerCase().includes(s)) {
return true
}
}
}
}
return false
}) : this.data
}
}
}
})(jQuery)

View File

@@ -0,0 +1,17 @@
{
"name": "Accent Neutralise",
"version": "1.0.0",
"description": "Plugin to neutralise the words.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/accent-neutralise",
"example": "#",
"plugins": [{
"name": "bootstrap-table-accent-neutralise",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/accent-neutralise"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Auto Refresh",
"version": "1.0.0",
"description": "Plugin to automatically refresh the table on an interval.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/auto-refresh",
"example": "#",
"plugins": [{
"name": "bootstrap-table-auto-refresh",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/auto-refresh"
}],
"author": {
"name": "fenichaler",
"image": "https://avatars.githubusercontent.com/u/3437075"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Cookie",
"version": "1.2.1",
"description": "Plugin to use the cookie of the browser.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/cookie",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/cookie.html",
"plugins": [{
"name": "bootstrap-table-cookie",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/cookie"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Copy Rows",
"version": "1.0.0",
"description": "Allows pushing of selected column data to the clipboard.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/copy-rows",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/copy-rows.html",
"plugins": [{
"name": "copy-rows",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/copy-rows"
}],
"author": {
"name": "Homer Glascock",
"image": "https://avatars1.githubusercontent.com/u/5546710"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "DeferURL",
"version": "1.0.0",
"description": "Plugin to defer server side processing.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/defer-url",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/defer-url.html",
"plugins": [{
"name": "bootstrap-table-defer-url",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/defer-url"
}],
"author": {
"name": "rubensa",
"image": "https://avatars1.githubusercontent.com/u/1469340"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Table Editable",
"version": "1.1.0",
"description": "Use the x-editable to in-place editing your table.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/editable",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/editable.html",
"plugins": [{
"name": "x-editable",
"url": "https://github.com/vitalets/x-editable"
}],
"author": {
"name": "wenzhixin",
"image": "https://avatars1.githubusercontent.com/u/2117018"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Table Export",
"version": "1.1.0",
"description": "Export your table data to JSON, XML, CSV, TXT, SQL, Word, Excel, PNG, PDF.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/export",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/export.html",
"plugins": [{
"name": "tableExport.jquery.plugin",
"url": "https://github.com/hhurz/tableExport.jquery.plugin"
}],
"author": {
"name": "wenzhixin",
"image": "https://avatars1.githubusercontent.com/u/2117018"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Filter Control",
"version": "2.1.0",
"description": "Plugin to add input/select element on the top of the columns in order to filter the data.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/filter-control",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/filter-control.html",
"plugins": [{
"name": "bootstrap-table-filter-control",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/filter-control"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "Group By V2",
"version": "1.0.0",
"description": "Group the data by field",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/group-by-v2",
"example": "",
"plugins": [],
"author": {
"name": "Knoxvillekm",
"image": "https://avatars3.githubusercontent.com/u/11072464"
}
}

View File

@@ -0,0 +1,53 @@
table.treetable tbody tr td {
cursor: default;
}
table.treetable span {
background-position: center left;
background-repeat: no-repeat;
padding: .2em 0 .2em 1.5em;
}
table.treetable tr.collapsed span.indenter a {
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKT2lDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVNnVFPpFj333vRCS4iAlEtvUhUIIFJCi4AUkSYqIQkQSoghodkVUcERRUUEG8igiAOOjoCMFVEsDIoK2AfkIaKOg6OIisr74Xuja9a89+bN/rXXPues852zzwfACAyWSDNRNYAMqUIeEeCDx8TG4eQuQIEKJHAAEAizZCFz/SMBAPh+PDwrIsAHvgABeNMLCADATZvAMByH/w/qQplcAYCEAcB0kThLCIAUAEB6jkKmAEBGAYCdmCZTAKAEAGDLY2LjAFAtAGAnf+bTAICd+Jl7AQBblCEVAaCRACATZYhEAGg7AKzPVopFAFgwABRmS8Q5ANgtADBJV2ZIALC3AMDOEAuyAAgMADBRiIUpAAR7AGDIIyN4AISZABRG8lc88SuuEOcqAAB4mbI8uSQ5RYFbCC1xB1dXLh4ozkkXKxQ2YQJhmkAuwnmZGTKBNA/g88wAAKCRFRHgg/P9eM4Ors7ONo62Dl8t6r8G/yJiYuP+5c+rcEAAAOF0ftH+LC+zGoA7BoBt/qIl7gRoXgugdfeLZrIPQLUAoOnaV/Nw+H48PEWhkLnZ2eXk5NhKxEJbYcpXff5nwl/AV/1s+X48/Pf14L7iJIEyXYFHBPjgwsz0TKUcz5IJhGLc5o9H/LcL//wd0yLESWK5WCoU41EScY5EmozzMqUiiUKSKcUl0v9k4t8s+wM+3zUAsGo+AXuRLahdYwP2SycQWHTA4vcAAPK7b8HUKAgDgGiD4c93/+8//UegJQCAZkmScQAAXkQkLlTKsz/HCAAARKCBKrBBG/TBGCzABhzBBdzBC/xgNoRCJMTCQhBCCmSAHHJgKayCQiiGzbAdKmAv1EAdNMBRaIaTcA4uwlW4Dj1wD/phCJ7BKLyBCQRByAgTYSHaiAFiilgjjggXmYX4IcFIBBKLJCDJiBRRIkuRNUgxUopUIFVIHfI9cgI5h1xGupE7yAAygvyGvEcxlIGyUT3UDLVDuag3GoRGogvQZHQxmo8WoJvQcrQaPYw2oefQq2gP2o8+Q8cwwOgYBzPEbDAuxsNCsTgsCZNjy7EirAyrxhqwVqwDu4n1Y8+xdwQSgUXACTYEd0IgYR5BSFhMWE7YSKggHCQ0EdoJNwkDhFHCJyKTqEu0JroR+cQYYjIxh1hILCPWEo8TLxB7iEPENyQSiUMyJ7mQAkmxpFTSEtJG0m5SI+ksqZs0SBojk8naZGuyBzmULCAryIXkneTD5DPkG+Qh8lsKnWJAcaT4U+IoUspqShnlEOU05QZlmDJBVaOaUt2ooVQRNY9aQq2htlKvUYeoEzR1mjnNgxZJS6WtopXTGmgXaPdpr+h0uhHdlR5Ol9BX0svpR+iX6AP0dwwNhhWDx4hnKBmbGAcYZxl3GK+YTKYZ04sZx1QwNzHrmOeZD5lvVVgqtip8FZHKCpVKlSaVGyovVKmqpqreqgtV81XLVI+pXlN9rkZVM1PjqQnUlqtVqp1Q61MbU2epO6iHqmeob1Q/pH5Z/YkGWcNMw09DpFGgsV/jvMYgC2MZs3gsIWsNq4Z1gTXEJrHN2Xx2KruY/R27iz2qqaE5QzNKM1ezUvOUZj8H45hx+Jx0TgnnKKeX836K3hTvKeIpG6Y0TLkxZVxrqpaXllirSKtRq0frvTau7aedpr1Fu1n7gQ5Bx0onXCdHZ4/OBZ3nU9lT3acKpxZNPTr1ri6qa6UbobtEd79up+6Ynr5egJ5Mb6feeb3n+hx9L/1U/W36p/VHDFgGswwkBtsMzhg8xTVxbzwdL8fb8VFDXcNAQ6VhlWGX4YSRudE8o9VGjUYPjGnGXOMk423GbcajJgYmISZLTepN7ppSTbmmKaY7TDtMx83MzaLN1pk1mz0x1zLnm+eb15vft2BaeFostqi2uGVJsuRaplnutrxuhVo5WaVYVVpds0atna0l1rutu6cRp7lOk06rntZnw7Dxtsm2qbcZsOXYBtuutm22fWFnYhdnt8Wuw+6TvZN9un2N/T0HDYfZDqsdWh1+c7RyFDpWOt6azpzuP33F9JbpL2dYzxDP2DPjthPLKcRpnVOb00dnF2e5c4PziIuJS4LLLpc+Lpsbxt3IveRKdPVxXeF60vWdm7Obwu2o26/uNu5p7ofcn8w0nymeWTNz0MPIQ+BR5dE/C5+VMGvfrH5PQ0+BZ7XnIy9jL5FXrdewt6V3qvdh7xc+9j5yn+M+4zw33jLeWV/MN8C3yLfLT8Nvnl+F30N/I/9k/3r/0QCngCUBZwOJgUGBWwL7+Hp8Ib+OPzrbZfay2e1BjKC5QRVBj4KtguXBrSFoyOyQrSH355jOkc5pDoVQfujW0Adh5mGLw34MJ4WHhVeGP45wiFga0TGXNXfR3ENz30T6RJZE3ptnMU85ry1KNSo+qi5qPNo3ujS6P8YuZlnM1VidWElsSxw5LiquNm5svt/87fOH4p3iC+N7F5gvyF1weaHOwvSFpxapLhIsOpZATIhOOJTwQRAqqBaMJfITdyWOCnnCHcJnIi/RNtGI2ENcKh5O8kgqTXqS7JG8NXkkxTOlLOW5hCepkLxMDUzdmzqeFpp2IG0yPTq9MYOSkZBxQqohTZO2Z+pn5mZ2y6xlhbL+xW6Lty8elQfJa7OQrAVZLQq2QqboVFoo1yoHsmdlV2a/zYnKOZarnivN7cyzytuQN5zvn//tEsIS4ZK2pYZLVy0dWOa9rGo5sjxxedsK4xUFK4ZWBqw8uIq2Km3VT6vtV5eufr0mek1rgV7ByoLBtQFr6wtVCuWFfevc1+1dT1gvWd+1YfqGnRs+FYmKrhTbF5cVf9go3HjlG4dvyr+Z3JS0qavEuWTPZtJm6ebeLZ5bDpaql+aXDm4N2dq0Dd9WtO319kXbL5fNKNu7g7ZDuaO/PLi8ZafJzs07P1SkVPRU+lQ27tLdtWHX+G7R7ht7vPY07NXbW7z3/T7JvttVAVVN1WbVZftJ+7P3P66Jqun4lvttXa1ObXHtxwPSA/0HIw6217nU1R3SPVRSj9Yr60cOxx++/p3vdy0NNg1VjZzG4iNwRHnk6fcJ3/ceDTradox7rOEH0x92HWcdL2pCmvKaRptTmvtbYlu6T8w+0dbq3nr8R9sfD5w0PFl5SvNUyWna6YLTk2fyz4ydlZ19fi753GDborZ752PO32oPb++6EHTh0kX/i+c7vDvOXPK4dPKy2+UTV7hXmq86X23qdOo8/pPTT8e7nLuarrlca7nuer21e2b36RueN87d9L158Rb/1tWeOT3dvfN6b/fF9/XfFt1+cif9zsu72Xcn7q28T7xf9EDtQdlD3YfVP1v+3Njv3H9qwHeg89HcR/cGhYPP/pH1jw9DBY+Zj8uGDYbrnjg+OTniP3L96fynQ89kzyaeF/6i/suuFxYvfvjV69fO0ZjRoZfyl5O/bXyl/erA6xmv28bCxh6+yXgzMV70VvvtwXfcdx3vo98PT+R8IH8o/2j5sfVT0Kf7kxmTk/8EA5jz/GMzLdsAAAAgY0hSTQAAeiUAAICDAAD5/wAAgOkAAHUwAADqYAAAOpgAABdvkl/FRgAAAHlJREFUeNrcU1sNgDAQ6wgmcAM2MICGGlg1gJnNzWQcvwQGy1j4oUl/7tH0mpwzM7SgQyO+EZAUWh2MkkzSWhJwuRAlHYsJwEwyvs1gABDuzqoJcTw5qxaIJN0bgQRgIjnlmn1heSO5PE6Y2YXe+5Cr5+h++gs12AcAS6FS+7YOsj4AAAAASUVORK5CYII=);
padding-right: 12px;
}
table.treetable tr.expanded span.indenter a {
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKT2lDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVNnVFPpFj333vRCS4iAlEtvUhUIIFJCi4AUkSYqIQkQSoghodkVUcERRUUEG8igiAOOjoCMFVEsDIoK2AfkIaKOg6OIisr74Xuja9a89+bN/rXXPues852zzwfACAyWSDNRNYAMqUIeEeCDx8TG4eQuQIEKJHAAEAizZCFz/SMBAPh+PDwrIsAHvgABeNMLCADATZvAMByH/w/qQplcAYCEAcB0kThLCIAUAEB6jkKmAEBGAYCdmCZTAKAEAGDLY2LjAFAtAGAnf+bTAICd+Jl7AQBblCEVAaCRACATZYhEAGg7AKzPVopFAFgwABRmS8Q5ANgtADBJV2ZIALC3AMDOEAuyAAgMADBRiIUpAAR7AGDIIyN4AISZABRG8lc88SuuEOcqAAB4mbI8uSQ5RYFbCC1xB1dXLh4ozkkXKxQ2YQJhmkAuwnmZGTKBNA/g88wAAKCRFRHgg/P9eM4Ors7ONo62Dl8t6r8G/yJiYuP+5c+rcEAAAOF0ftH+LC+zGoA7BoBt/qIl7gRoXgugdfeLZrIPQLUAoOnaV/Nw+H48PEWhkLnZ2eXk5NhKxEJbYcpXff5nwl/AV/1s+X48/Pf14L7iJIEyXYFHBPjgwsz0TKUcz5IJhGLc5o9H/LcL//wd0yLESWK5WCoU41EScY5EmozzMqUiiUKSKcUl0v9k4t8s+wM+3zUAsGo+AXuRLahdYwP2SycQWHTA4vcAAPK7b8HUKAgDgGiD4c93/+8//UegJQCAZkmScQAAXkQkLlTKsz/HCAAARKCBKrBBG/TBGCzABhzBBdzBC/xgNoRCJMTCQhBCCmSAHHJgKayCQiiGzbAdKmAv1EAdNMBRaIaTcA4uwlW4Dj1wD/phCJ7BKLyBCQRByAgTYSHaiAFiilgjjggXmYX4IcFIBBKLJCDJiBRRIkuRNUgxUopUIFVIHfI9cgI5h1xGupE7yAAygvyGvEcxlIGyUT3UDLVDuag3GoRGogvQZHQxmo8WoJvQcrQaPYw2oefQq2gP2o8+Q8cwwOgYBzPEbDAuxsNCsTgsCZNjy7EirAyrxhqwVqwDu4n1Y8+xdwQSgUXACTYEd0IgYR5BSFhMWE7YSKggHCQ0EdoJNwkDhFHCJyKTqEu0JroR+cQYYjIxh1hILCPWEo8TLxB7iEPENyQSiUMyJ7mQAkmxpFTSEtJG0m5SI+ksqZs0SBojk8naZGuyBzmULCAryIXkneTD5DPkG+Qh8lsKnWJAcaT4U+IoUspqShnlEOU05QZlmDJBVaOaUt2ooVQRNY9aQq2htlKvUYeoEzR1mjnNgxZJS6WtopXTGmgXaPdpr+h0uhHdlR5Ol9BX0svpR+iX6AP0dwwNhhWDx4hnKBmbGAcYZxl3GK+YTKYZ04sZx1QwNzHrmOeZD5lvVVgqtip8FZHKCpVKlSaVGyovVKmqpqreqgtV81XLVI+pXlN9rkZVM1PjqQnUlqtVqp1Q61MbU2epO6iHqmeob1Q/pH5Z/YkGWcNMw09DpFGgsV/jvMYgC2MZs3gsIWsNq4Z1gTXEJrHN2Xx2KruY/R27iz2qqaE5QzNKM1ezUvOUZj8H45hx+Jx0TgnnKKeX836K3hTvKeIpG6Y0TLkxZVxrqpaXllirSKtRq0frvTau7aedpr1Fu1n7gQ5Bx0onXCdHZ4/OBZ3nU9lT3acKpxZNPTr1ri6qa6UbobtEd79up+6Ynr5egJ5Mb6feeb3n+hx9L/1U/W36p/VHDFgGswwkBtsMzhg8xTVxbzwdL8fb8VFDXcNAQ6VhlWGX4YSRudE8o9VGjUYPjGnGXOMk423GbcajJgYmISZLTepN7ppSTbmmKaY7TDtMx83MzaLN1pk1mz0x1zLnm+eb15vft2BaeFostqi2uGVJsuRaplnutrxuhVo5WaVYVVpds0atna0l1rutu6cRp7lOk06rntZnw7Dxtsm2qbcZsOXYBtuutm22fWFnYhdnt8Wuw+6TvZN9un2N/T0HDYfZDqsdWh1+c7RyFDpWOt6azpzuP33F9JbpL2dYzxDP2DPjthPLKcRpnVOb00dnF2e5c4PziIuJS4LLLpc+Lpsbxt3IveRKdPVxXeF60vWdm7Obwu2o26/uNu5p7ofcn8w0nymeWTNz0MPIQ+BR5dE/C5+VMGvfrH5PQ0+BZ7XnIy9jL5FXrdewt6V3qvdh7xc+9j5yn+M+4zw33jLeWV/MN8C3yLfLT8Nvnl+F30N/I/9k/3r/0QCngCUBZwOJgUGBWwL7+Hp8Ib+OPzrbZfay2e1BjKC5QRVBj4KtguXBrSFoyOyQrSH355jOkc5pDoVQfujW0Adh5mGLw34MJ4WHhVeGP45wiFga0TGXNXfR3ENz30T6RJZE3ptnMU85ry1KNSo+qi5qPNo3ujS6P8YuZlnM1VidWElsSxw5LiquNm5svt/87fOH4p3iC+N7F5gvyF1weaHOwvSFpxapLhIsOpZATIhOOJTwQRAqqBaMJfITdyWOCnnCHcJnIi/RNtGI2ENcKh5O8kgqTXqS7JG8NXkkxTOlLOW5hCepkLxMDUzdmzqeFpp2IG0yPTq9MYOSkZBxQqohTZO2Z+pn5mZ2y6xlhbL+xW6Lty8elQfJa7OQrAVZLQq2QqboVFoo1yoHsmdlV2a/zYnKOZarnivN7cyzytuQN5zvn//tEsIS4ZK2pYZLVy0dWOa9rGo5sjxxedsK4xUFK4ZWBqw8uIq2Km3VT6vtV5eufr0mek1rgV7ByoLBtQFr6wtVCuWFfevc1+1dT1gvWd+1YfqGnRs+FYmKrhTbF5cVf9go3HjlG4dvyr+Z3JS0qavEuWTPZtJm6ebeLZ5bDpaql+aXDm4N2dq0Dd9WtO319kXbL5fNKNu7g7ZDuaO/PLi8ZafJzs07P1SkVPRU+lQ27tLdtWHX+G7R7ht7vPY07NXbW7z3/T7JvttVAVVN1WbVZftJ+7P3P66Jqun4lvttXa1ObXHtxwPSA/0HIw6217nU1R3SPVRSj9Yr60cOxx++/p3vdy0NNg1VjZzG4iNwRHnk6fcJ3/ceDTradox7rOEH0x92HWcdL2pCmvKaRptTmvtbYlu6T8w+0dbq3nr8R9sfD5w0PFl5SvNUyWna6YLTk2fyz4ydlZ19fi753GDborZ752PO32oPb++6EHTh0kX/i+c7vDvOXPK4dPKy2+UTV7hXmq86X23qdOo8/pPTT8e7nLuarrlca7nuer21e2b36RueN87d9L158Rb/1tWeOT3dvfN6b/fF9/XfFt1+cif9zsu72Xcn7q28T7xf9EDtQdlD3YfVP1v+3Njv3H9qwHeg89HcR/cGhYPP/pH1jw9DBY+Zj8uGDYbrnjg+OTniP3L96fynQ89kzyaeF/6i/suuFxYvfvjV69fO0ZjRoZfyl5O/bXyl/erA6xmv28bCxh6+yXgzMV70VvvtwXfcdx3vo98PT+R8IH8o/2j5sfVT0Kf7kxmTk/8EA5jz/GMzLdsAAAAgY0hSTQAAeiUAAICDAAD5/wAAgOkAAHUwAADqYAAAOpgAABdvkl/FRgAAAHFJREFUeNpi/P//PwMlgImBQsA44C6gvhfa29v3MzAwOODRc6CystIRbxi0t7fjDJjKykpGYrwwi1hxnLHQ3t7+jIGBQRJJ6HllZaUUKYEYRYBPOB0gBShKwKGA////48VtbW3/8clTnBIH3gCKkzJgAGvBX0dDm0sCAAAAAElFTkSuQmCC);
padding-right: 12px;
}
table.treetable tr.branch {
background-color: #f9f9f9;
}
table.treetable tr.selected {
background-color: #3875d7;
color: #fff;
}
table.treetable tr span.indenter a {
outline: none; /* Expander shows outline after upgrading to 3.0 (#141) */
}
table.treetable tr.collapsed.selected span.indenter a {
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKT2lDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVNnVFPpFj333vRCS4iAlEtvUhUIIFJCi4AUkSYqIQkQSoghodkVUcERRUUEG8igiAOOjoCMFVEsDIoK2AfkIaKOg6OIisr74Xuja9a89+bN/rXXPues852zzwfACAyWSDNRNYAMqUIeEeCDx8TG4eQuQIEKJHAAEAizZCFz/SMBAPh+PDwrIsAHvgABeNMLCADATZvAMByH/w/qQplcAYCEAcB0kThLCIAUAEB6jkKmAEBGAYCdmCZTAKAEAGDLY2LjAFAtAGAnf+bTAICd+Jl7AQBblCEVAaCRACATZYhEAGg7AKzPVopFAFgwABRmS8Q5ANgtADBJV2ZIALC3AMDOEAuyAAgMADBRiIUpAAR7AGDIIyN4AISZABRG8lc88SuuEOcqAAB4mbI8uSQ5RYFbCC1xB1dXLh4ozkkXKxQ2YQJhmkAuwnmZGTKBNA/g88wAAKCRFRHgg/P9eM4Ors7ONo62Dl8t6r8G/yJiYuP+5c+rcEAAAOF0ftH+LC+zGoA7BoBt/qIl7gRoXgugdfeLZrIPQLUAoOnaV/Nw+H48PEWhkLnZ2eXk5NhKxEJbYcpXff5nwl/AV/1s+X48/Pf14L7iJIEyXYFHBPjgwsz0TKUcz5IJhGLc5o9H/LcL//wd0yLESWK5WCoU41EScY5EmozzMqUiiUKSKcUl0v9k4t8s+wM+3zUAsGo+AXuRLahdYwP2SycQWHTA4vcAAPK7b8HUKAgDgGiD4c93/+8//UegJQCAZkmScQAAXkQkLlTKsz/HCAAARKCBKrBBG/TBGCzABhzBBdzBC/xgNoRCJMTCQhBCCmSAHHJgKayCQiiGzbAdKmAv1EAdNMBRaIaTcA4uwlW4Dj1wD/phCJ7BKLyBCQRByAgTYSHaiAFiilgjjggXmYX4IcFIBBKLJCDJiBRRIkuRNUgxUopUIFVIHfI9cgI5h1xGupE7yAAygvyGvEcxlIGyUT3UDLVDuag3GoRGogvQZHQxmo8WoJvQcrQaPYw2oefQq2gP2o8+Q8cwwOgYBzPEbDAuxsNCsTgsCZNjy7EirAyrxhqwVqwDu4n1Y8+xdwQSgUXACTYEd0IgYR5BSFhMWE7YSKggHCQ0EdoJNwkDhFHCJyKTqEu0JroR+cQYYjIxh1hILCPWEo8TLxB7iEPENyQSiUMyJ7mQAkmxpFTSEtJG0m5SI+ksqZs0SBojk8naZGuyBzmULCAryIXkneTD5DPkG+Qh8lsKnWJAcaT4U+IoUspqShnlEOU05QZlmDJBVaOaUt2ooVQRNY9aQq2htlKvUYeoEzR1mjnNgxZJS6WtopXTGmgXaPdpr+h0uhHdlR5Ol9BX0svpR+iX6AP0dwwNhhWDx4hnKBmbGAcYZxl3GK+YTKYZ04sZx1QwNzHrmOeZD5lvVVgqtip8FZHKCpVKlSaVGyovVKmqpqreqgtV81XLVI+pXlN9rkZVM1PjqQnUlqtVqp1Q61MbU2epO6iHqmeob1Q/pH5Z/YkGWcNMw09DpFGgsV/jvMYgC2MZs3gsIWsNq4Z1gTXEJrHN2Xx2KruY/R27iz2qqaE5QzNKM1ezUvOUZj8H45hx+Jx0TgnnKKeX836K3hTvKeIpG6Y0TLkxZVxrqpaXllirSKtRq0frvTau7aedpr1Fu1n7gQ5Bx0onXCdHZ4/OBZ3nU9lT3acKpxZNPTr1ri6qa6UbobtEd79up+6Ynr5egJ5Mb6feeb3n+hx9L/1U/W36p/VHDFgGswwkBtsMzhg8xTVxbzwdL8fb8VFDXcNAQ6VhlWGX4YSRudE8o9VGjUYPjGnGXOMk423GbcajJgYmISZLTepN7ppSTbmmKaY7TDtMx83MzaLN1pk1mz0x1zLnm+eb15vft2BaeFostqi2uGVJsuRaplnutrxuhVo5WaVYVVpds0atna0l1rutu6cRp7lOk06rntZnw7Dxtsm2qbcZsOXYBtuutm22fWFnYhdnt8Wuw+6TvZN9un2N/T0HDYfZDqsdWh1+c7RyFDpWOt6azpzuP33F9JbpL2dYzxDP2DPjthPLKcRpnVOb00dnF2e5c4PziIuJS4LLLpc+Lpsbxt3IveRKdPVxXeF60vWdm7Obwu2o26/uNu5p7ofcn8w0nymeWTNz0MPIQ+BR5dE/C5+VMGvfrH5PQ0+BZ7XnIy9jL5FXrdewt6V3qvdh7xc+9j5yn+M+4zw33jLeWV/MN8C3yLfLT8Nvnl+F30N/I/9k/3r/0QCngCUBZwOJgUGBWwL7+Hp8Ib+OPzrbZfay2e1BjKC5QRVBj4KtguXBrSFoyOyQrSH355jOkc5pDoVQfujW0Adh5mGLw34MJ4WHhVeGP45wiFga0TGXNXfR3ENz30T6RJZE3ptnMU85ry1KNSo+qi5qPNo3ujS6P8YuZlnM1VidWElsSxw5LiquNm5svt/87fOH4p3iC+N7F5gvyF1weaHOwvSFpxapLhIsOpZATIhOOJTwQRAqqBaMJfITdyWOCnnCHcJnIi/RNtGI2ENcKh5O8kgqTXqS7JG8NXkkxTOlLOW5hCepkLxMDUzdmzqeFpp2IG0yPTq9MYOSkZBxQqohTZO2Z+pn5mZ2y6xlhbL+xW6Lty8elQfJa7OQrAVZLQq2QqboVFoo1yoHsmdlV2a/zYnKOZarnivN7cyzytuQN5zvn//tEsIS4ZK2pYZLVy0dWOa9rGo5sjxxedsK4xUFK4ZWBqw8uIq2Km3VT6vtV5eufr0mek1rgV7ByoLBtQFr6wtVCuWFfevc1+1dT1gvWd+1YfqGnRs+FYmKrhTbF5cVf9go3HjlG4dvyr+Z3JS0qavEuWTPZtJm6ebeLZ5bDpaql+aXDm4N2dq0Dd9WtO319kXbL5fNKNu7g7ZDuaO/PLi8ZafJzs07P1SkVPRU+lQ27tLdtWHX+G7R7ht7vPY07NXbW7z3/T7JvttVAVVN1WbVZftJ+7P3P66Jqun4lvttXa1ObXHtxwPSA/0HIw6217nU1R3SPVRSj9Yr60cOxx++/p3vdy0NNg1VjZzG4iNwRHnk6fcJ3/ceDTradox7rOEH0x92HWcdL2pCmvKaRptTmvtbYlu6T8w+0dbq3nr8R9sfD5w0PFl5SvNUyWna6YLTk2fyz4ydlZ19fi753GDborZ752PO32oPb++6EHTh0kX/i+c7vDvOXPK4dPKy2+UTV7hXmq86X23qdOo8/pPTT8e7nLuarrlca7nuer21e2b36RueN87d9L158Rb/1tWeOT3dvfN6b/fF9/XfFt1+cif9zsu72Xcn7q28T7xf9EDtQdlD3YfVP1v+3Njv3H9qwHeg89HcR/cGhYPP/pH1jw9DBY+Zj8uGDYbrnjg+OTniP3L96fynQ89kzyaeF/6i/suuFxYvfvjV69fO0ZjRoZfyl5O/bXyl/erA6xmv28bCxh6+yXgzMV70VvvtwXfcdx3vo98PT+R8IH8o/2j5sfVT0Kf7kxmTk/8EA5jz/GMzLdsAAAAgY0hSTQAAeiUAAICDAAD5/wAAgOkAAHUwAADqYAAAOpgAABdvkl/FRgAAAFpJREFUeNpi/P//PwMlgHHADWD4//8/NtyAQxwD45KAAQdKDfj//////fgMIsYAZIMw1DKREFwODAwM/4kNRKq64AADA4MjFDOQ6gKyY4HodMA49PMCxQYABgAVYHsjyZ1x7QAAAABJRU5ErkJggg==);
}
table.treetable tr.expanded.selected span.indenter a {
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKT2lDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVNnVFPpFj333vRCS4iAlEtvUhUIIFJCi4AUkSYqIQkQSoghodkVUcERRUUEG8igiAOOjoCMFVEsDIoK2AfkIaKOg6OIisr74Xuja9a89+bN/rXXPues852zzwfACAyWSDNRNYAMqUIeEeCDx8TG4eQuQIEKJHAAEAizZCFz/SMBAPh+PDwrIsAHvgABeNMLCADATZvAMByH/w/qQplcAYCEAcB0kThLCIAUAEB6jkKmAEBGAYCdmCZTAKAEAGDLY2LjAFAtAGAnf+bTAICd+Jl7AQBblCEVAaCRACATZYhEAGg7AKzPVopFAFgwABRmS8Q5ANgtADBJV2ZIALC3AMDOEAuyAAgMADBRiIUpAAR7AGDIIyN4AISZABRG8lc88SuuEOcqAAB4mbI8uSQ5RYFbCC1xB1dXLh4ozkkXKxQ2YQJhmkAuwnmZGTKBNA/g88wAAKCRFRHgg/P9eM4Ors7ONo62Dl8t6r8G/yJiYuP+5c+rcEAAAOF0ftH+LC+zGoA7BoBt/qIl7gRoXgugdfeLZrIPQLUAoOnaV/Nw+H48PEWhkLnZ2eXk5NhKxEJbYcpXff5nwl/AV/1s+X48/Pf14L7iJIEyXYFHBPjgwsz0TKUcz5IJhGLc5o9H/LcL//wd0yLESWK5WCoU41EScY5EmozzMqUiiUKSKcUl0v9k4t8s+wM+3zUAsGo+AXuRLahdYwP2SycQWHTA4vcAAPK7b8HUKAgDgGiD4c93/+8//UegJQCAZkmScQAAXkQkLlTKsz/HCAAARKCBKrBBG/TBGCzABhzBBdzBC/xgNoRCJMTCQhBCCmSAHHJgKayCQiiGzbAdKmAv1EAdNMBRaIaTcA4uwlW4Dj1wD/phCJ7BKLyBCQRByAgTYSHaiAFiilgjjggXmYX4IcFIBBKLJCDJiBRRIkuRNUgxUopUIFVIHfI9cgI5h1xGupE7yAAygvyGvEcxlIGyUT3UDLVDuag3GoRGogvQZHQxmo8WoJvQcrQaPYw2oefQq2gP2o8+Q8cwwOgYBzPEbDAuxsNCsTgsCZNjy7EirAyrxhqwVqwDu4n1Y8+xdwQSgUXACTYEd0IgYR5BSFhMWE7YSKggHCQ0EdoJNwkDhFHCJyKTqEu0JroR+cQYYjIxh1hILCPWEo8TLxB7iEPENyQSiUMyJ7mQAkmxpFTSEtJG0m5SI+ksqZs0SBojk8naZGuyBzmULCAryIXkneTD5DPkG+Qh8lsKnWJAcaT4U+IoUspqShnlEOU05QZlmDJBVaOaUt2ooVQRNY9aQq2htlKvUYeoEzR1mjnNgxZJS6WtopXTGmgXaPdpr+h0uhHdlR5Ol9BX0svpR+iX6AP0dwwNhhWDx4hnKBmbGAcYZxl3GK+YTKYZ04sZx1QwNzHrmOeZD5lvVVgqtip8FZHKCpVKlSaVGyovVKmqpqreqgtV81XLVI+pXlN9rkZVM1PjqQnUlqtVqp1Q61MbU2epO6iHqmeob1Q/pH5Z/YkGWcNMw09DpFGgsV/jvMYgC2MZs3gsIWsNq4Z1gTXEJrHN2Xx2KruY/R27iz2qqaE5QzNKM1ezUvOUZj8H45hx+Jx0TgnnKKeX836K3hTvKeIpG6Y0TLkxZVxrqpaXllirSKtRq0frvTau7aedpr1Fu1n7gQ5Bx0onXCdHZ4/OBZ3nU9lT3acKpxZNPTr1ri6qa6UbobtEd79up+6Ynr5egJ5Mb6feeb3n+hx9L/1U/W36p/VHDFgGswwkBtsMzhg8xTVxbzwdL8fb8VFDXcNAQ6VhlWGX4YSRudE8o9VGjUYPjGnGXOMk423GbcajJgYmISZLTepN7ppSTbmmKaY7TDtMx83MzaLN1pk1mz0x1zLnm+eb15vft2BaeFostqi2uGVJsuRaplnutrxuhVo5WaVYVVpds0atna0l1rutu6cRp7lOk06rntZnw7Dxtsm2qbcZsOXYBtuutm22fWFnYhdnt8Wuw+6TvZN9un2N/T0HDYfZDqsdWh1+c7RyFDpWOt6azpzuP33F9JbpL2dYzxDP2DPjthPLKcRpnVOb00dnF2e5c4PziIuJS4LLLpc+Lpsbxt3IveRKdPVxXeF60vWdm7Obwu2o26/uNu5p7ofcn8w0nymeWTNz0MPIQ+BR5dE/C5+VMGvfrH5PQ0+BZ7XnIy9jL5FXrdewt6V3qvdh7xc+9j5yn+M+4zw33jLeWV/MN8C3yLfLT8Nvnl+F30N/I/9k/3r/0QCngCUBZwOJgUGBWwL7+Hp8Ib+OPzrbZfay2e1BjKC5QRVBj4KtguXBrSFoyOyQrSH355jOkc5pDoVQfujW0Adh5mGLw34MJ4WHhVeGP45wiFga0TGXNXfR3ENz30T6RJZE3ptnMU85ry1KNSo+qi5qPNo3ujS6P8YuZlnM1VidWElsSxw5LiquNm5svt/87fOH4p3iC+N7F5gvyF1weaHOwvSFpxapLhIsOpZATIhOOJTwQRAqqBaMJfITdyWOCnnCHcJnIi/RNtGI2ENcKh5O8kgqTXqS7JG8NXkkxTOlLOW5hCepkLxMDUzdmzqeFpp2IG0yPTq9MYOSkZBxQqohTZO2Z+pn5mZ2y6xlhbL+xW6Lty8elQfJa7OQrAVZLQq2QqboVFoo1yoHsmdlV2a/zYnKOZarnivN7cyzytuQN5zvn//tEsIS4ZK2pYZLVy0dWOa9rGo5sjxxedsK4xUFK4ZWBqw8uIq2Km3VT6vtV5eufr0mek1rgV7ByoLBtQFr6wtVCuWFfevc1+1dT1gvWd+1YfqGnRs+FYmKrhTbF5cVf9go3HjlG4dvyr+Z3JS0qavEuWTPZtJm6ebeLZ5bDpaql+aXDm4N2dq0Dd9WtO319kXbL5fNKNu7g7ZDuaO/PLi8ZafJzs07P1SkVPRU+lQ27tLdtWHX+G7R7ht7vPY07NXbW7z3/T7JvttVAVVN1WbVZftJ+7P3P66Jqun4lvttXa1ObXHtxwPSA/0HIw6217nU1R3SPVRSj9Yr60cOxx++/p3vdy0NNg1VjZzG4iNwRHnk6fcJ3/ceDTradox7rOEH0x92HWcdL2pCmvKaRptTmvtbYlu6T8w+0dbq3nr8R9sfD5w0PFl5SvNUyWna6YLTk2fyz4ydlZ19fi753GDborZ752PO32oPb++6EHTh0kX/i+c7vDvOXPK4dPKy2+UTV7hXmq86X23qdOo8/pPTT8e7nLuarrlca7nuer21e2b36RueN87d9L158Rb/1tWeOT3dvfN6b/fF9/XfFt1+cif9zsu72Xcn7q28T7xf9EDtQdlD3YfVP1v+3Njv3H9qwHeg89HcR/cGhYPP/pH1jw9DBY+Zj8uGDYbrnjg+OTniP3L96fynQ89kzyaeF/6i/suuFxYvfvjV69fO0ZjRoZfyl5O/bXyl/erA6xmv28bCxh6+yXgzMV70VvvtwXfcdx3vo98PT+R8IH8o/2j5sfVT0Kf7kxmTk/8EA5jz/GMzLdsAAAAgY0hSTQAAeiUAAICDAAD5/wAAgOkAAHUwAADqYAAAOpgAABdvkl/FRgAAAFtJREFUeNpi/P//PwMlgImBQsA44C6giQENDAwM//HgBmLCAF/AMBLjBUeixf///48L7/+PCvZjU4fPAAc0AxywqcMXCwegGJ1NckL6jx5wpKYDxqGXEkkCgAEAmrqBIejdgngAAAAASUVORK5CYII=);
}
table.treetable tr.accept {
background-color: #a3bce4;
color: #fff
}
table.treetable tr.collapsed.accept td span.indenter a {
background-image: url(data:image/x-png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKT2lDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVNnVFPpFj333vRCS4iAlEtvUhUIIFJCi4AUkSYqIQkQSoghodkVUcERRUUEG8igiAOOjoCMFVEsDIoK2AfkIaKOg6OIisr74Xuja9a89+bN/rXXPues852zzwfACAyWSDNRNYAMqUIeEeCDx8TG4eQuQIEKJHAAEAizZCFz/SMBAPh+PDwrIsAHvgABeNMLCADATZvAMByH/w/qQplcAYCEAcB0kThLCIAUAEB6jkKmAEBGAYCdmCZTAKAEAGDLY2LjAFAtAGAnf+bTAICd+Jl7AQBblCEVAaCRACATZYhEAGg7AKzPVopFAFgwABRmS8Q5ANgtADBJV2ZIALC3AMDOEAuyAAgMADBRiIUpAAR7AGDIIyN4AISZABRG8lc88SuuEOcqAAB4mbI8uSQ5RYFbCC1xB1dXLh4ozkkXKxQ2YQJhmkAuwnmZGTKBNA/g88wAAKCRFRHgg/P9eM4Ors7ONo62Dl8t6r8G/yJiYuP+5c+rcEAAAOF0ftH+LC+zGoA7BoBt/qIl7gRoXgugdfeLZrIPQLUAoOnaV/Nw+H48PEWhkLnZ2eXk5NhKxEJbYcpXff5nwl/AV/1s+X48/Pf14L7iJIEyXYFHBPjgwsz0TKUcz5IJhGLc5o9H/LcL//wd0yLESWK5WCoU41EScY5EmozzMqUiiUKSKcUl0v9k4t8s+wM+3zUAsGo+AXuRLahdYwP2SycQWHTA4vcAAPK7b8HUKAgDgGiD4c93/+8//UegJQCAZkmScQAAXkQkLlTKsz/HCAAARKCBKrBBG/TBGCzABhzBBdzBC/xgNoRCJMTCQhBCCmSAHHJgKayCQiiGzbAdKmAv1EAdNMBRaIaTcA4uwlW4Dj1wD/phCJ7BKLyBCQRByAgTYSHaiAFiilgjjggXmYX4IcFIBBKLJCDJiBRRIkuRNUgxUopUIFVIHfI9cgI5h1xGupE7yAAygvyGvEcxlIGyUT3UDLVDuag3GoRGogvQZHQxmo8WoJvQcrQaPYw2oefQq2gP2o8+Q8cwwOgYBzPEbDAuxsNCsTgsCZNjy7EirAyrxhqwVqwDu4n1Y8+xdwQSgUXACTYEd0IgYR5BSFhMWE7YSKggHCQ0EdoJNwkDhFHCJyKTqEu0JroR+cQYYjIxh1hILCPWEo8TLxB7iEPENyQSiUMyJ7mQAkmxpFTSEtJG0m5SI+ksqZs0SBojk8naZGuyBzmULCAryIXkneTD5DPkG+Qh8lsKnWJAcaT4U+IoUspqShnlEOU05QZlmDJBVaOaUt2ooVQRNY9aQq2htlKvUYeoEzR1mjnNgxZJS6WtopXTGmgXaPdpr+h0uhHdlR5Ol9BX0svpR+iX6AP0dwwNhhWDx4hnKBmbGAcYZxl3GK+YTKYZ04sZx1QwNzHrmOeZD5lvVVgqtip8FZHKCpVKlSaVGyovVKmqpqreqgtV81XLVI+pXlN9rkZVM1PjqQnUlqtVqp1Q61MbU2epO6iHqmeob1Q/pH5Z/YkGWcNMw09DpFGgsV/jvMYgC2MZs3gsIWsNq4Z1gTXEJrHN2Xx2KruY/R27iz2qqaE5QzNKM1ezUvOUZj8H45hx+Jx0TgnnKKeX836K3hTvKeIpG6Y0TLkxZVxrqpaXllirSKtRq0frvTau7aedpr1Fu1n7gQ5Bx0onXCdHZ4/OBZ3nU9lT3acKpxZNPTr1ri6qa6UbobtEd79up+6Ynr5egJ5Mb6feeb3n+hx9L/1U/W36p/VHDFgGswwkBtsMzhg8xTVxbzwdL8fb8VFDXcNAQ6VhlWGX4YSRudE8o9VGjUYPjGnGXOMk423GbcajJgYmISZLTepN7ppSTbmmKaY7TDtMx83MzaLN1pk1mz0x1zLnm+eb15vft2BaeFostqi2uGVJsuRaplnutrxuhVo5WaVYVVpds0atna0l1rutu6cRp7lOk06rntZnw7Dxtsm2qbcZsOXYBtuutm22fWFnYhdnt8Wuw+6TvZN9un2N/T0HDYfZDqsdWh1+c7RyFDpWOt6azpzuP33F9JbpL2dYzxDP2DPjthPLKcRpnVOb00dnF2e5c4PziIuJS4LLLpc+Lpsbxt3IveRKdPVxXeF60vWdm7Obwu2o26/uNu5p7ofcn8w0nymeWTNz0MPIQ+BR5dE/C5+VMGvfrH5PQ0+BZ7XnIy9jL5FXrdewt6V3qvdh7xc+9j5yn+M+4zw33jLeWV/MN8C3yLfLT8Nvnl+F30N/I/9k/3r/0QCngCUBZwOJgUGBWwL7+Hp8Ib+OPzrbZfay2e1BjKC5QRVBj4KtguXBrSFoyOyQrSH355jOkc5pDoVQfujW0Adh5mGLw34MJ4WHhVeGP45wiFga0TGXNXfR3ENz30T6RJZE3ptnMU85ry1KNSo+qi5qPNo3ujS6P8YuZlnM1VidWElsSxw5LiquNm5svt/87fOH4p3iC+N7F5gvyF1weaHOwvSFpxapLhIsOpZATIhOOJTwQRAqqBaMJfITdyWOCnnCHcJnIi/RNtGI2ENcKh5O8kgqTXqS7JG8NXkkxTOlLOW5hCepkLxMDUzdmzqeFpp2IG0yPTq9MYOSkZBxQqohTZO2Z+pn5mZ2y6xlhbL+xW6Lty8elQfJa7OQrAVZLQq2QqboVFoo1yoHsmdlV2a/zYnKOZarnivN7cyzytuQN5zvn//tEsIS4ZK2pYZLVy0dWOa9rGo5sjxxedsK4xUFK4ZWBqw8uIq2Km3VT6vtV5eufr0mek1rgV7ByoLBtQFr6wtVCuWFfevc1+1dT1gvWd+1YfqGnRs+FYmKrhTbF5cVf9go3HjlG4dvyr+Z3JS0qavEuWTPZtJm6ebeLZ5bDpaql+aXDm4N2dq0Dd9WtO319kXbL5fNKNu7g7ZDuaO/PLi8ZafJzs07P1SkVPRU+lQ27tLdtWHX+G7R7ht7vPY07NXbW7z3/T7JvttVAVVN1WbVZftJ+7P3P66Jqun4lvttXa1ObXHtxwPSA/0HIw6217nU1R3SPVRSj9Yr60cOxx++/p3vdy0NNg1VjZzG4iNwRHnk6fcJ3/ceDTradox7rOEH0x92HWcdL2pCmvKaRptTmvtbYlu6T8w+0dbq3nr8R9sfD5w0PFl5SvNUyWna6YLTk2fyz4ydlZ19fi753GDborZ752PO32oPb++6EHTh0kX/i+c7vDvOXPK4dPKy2+UTV7hXmq86X23qdOo8/pPTT8e7nLuarrlca7nuer21e2b36RueN87d9L158Rb/1tWeOT3dvfN6b/fF9/XfFt1+cif9zsu72Xcn7q28T7xf9EDtQdlD3YfVP1v+3Njv3H9qwHeg89HcR/cGhYPP/pH1jw9DBY+Zj8uGDYbrnjg+OTniP3L96fynQ89kzyaeF/6i/suuFxYvfvjV69fO0ZjRoZfyl5O/bXyl/erA6xmv28bCxh6+yXgzMV70VvvtwXfcdx3vo98PT+R8IH8o/2j5sfVT0Kf7kxmTk/8EA5jz/GMzLdsAAAAgY0hSTQAAeiUAAICDAAD5/wAAgOkAAHUwAADqYAAAOpgAABdvkl/FRgAAAFpJREFUeNpi/P//PwMlgHHADWD4//8/NtyAQxwD45KAAQdKDfj//////fgMIsYAZIMw1DKREFwODAwM/4kNRKq64AADA4MjFDOQ6gKyY4HodMA49PMCxQYABgAVYHsjyZ1x7QAAAABJRU5ErkJggg==);
}
table.treetable tr.expanded.accept td span.indenter a {
background-image: url(data:image/x-png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAAsTAAALEwEAmpwYAAAKT2lDQ1BQaG90b3Nob3AgSUNDIHByb2ZpbGUAAHjanVNnVFPpFj333vRCS4iAlEtvUhUIIFJCi4AUkSYqIQkQSoghodkVUcERRUUEG8igiAOOjoCMFVEsDIoK2AfkIaKOg6OIisr74Xuja9a89+bN/rXXPues852zzwfACAyWSDNRNYAMqUIeEeCDx8TG4eQuQIEKJHAAEAizZCFz/SMBAPh+PDwrIsAHvgABeNMLCADATZvAMByH/w/qQplcAYCEAcB0kThLCIAUAEB6jkKmAEBGAYCdmCZTAKAEAGDLY2LjAFAtAGAnf+bTAICd+Jl7AQBblCEVAaCRACATZYhEAGg7AKzPVopFAFgwABRmS8Q5ANgtADBJV2ZIALC3AMDOEAuyAAgMADBRiIUpAAR7AGDIIyN4AISZABRG8lc88SuuEOcqAAB4mbI8uSQ5RYFbCC1xB1dXLh4ozkkXKxQ2YQJhmkAuwnmZGTKBNA/g88wAAKCRFRHgg/P9eM4Ors7ONo62Dl8t6r8G/yJiYuP+5c+rcEAAAOF0ftH+LC+zGoA7BoBt/qIl7gRoXgugdfeLZrIPQLUAoOnaV/Nw+H48PEWhkLnZ2eXk5NhKxEJbYcpXff5nwl/AV/1s+X48/Pf14L7iJIEyXYFHBPjgwsz0TKUcz5IJhGLc5o9H/LcL//wd0yLESWK5WCoU41EScY5EmozzMqUiiUKSKcUl0v9k4t8s+wM+3zUAsGo+AXuRLahdYwP2SycQWHTA4vcAAPK7b8HUKAgDgGiD4c93/+8//UegJQCAZkmScQAAXkQkLlTKsz/HCAAARKCBKrBBG/TBGCzABhzBBdzBC/xgNoRCJMTCQhBCCmSAHHJgKayCQiiGzbAdKmAv1EAdNMBRaIaTcA4uwlW4Dj1wD/phCJ7BKLyBCQRByAgTYSHaiAFiilgjjggXmYX4IcFIBBKLJCDJiBRRIkuRNUgxUopUIFVIHfI9cgI5h1xGupE7yAAygvyGvEcxlIGyUT3UDLVDuag3GoRGogvQZHQxmo8WoJvQcrQaPYw2oefQq2gP2o8+Q8cwwOgYBzPEbDAuxsNCsTgsCZNjy7EirAyrxhqwVqwDu4n1Y8+xdwQSgUXACTYEd0IgYR5BSFhMWE7YSKggHCQ0EdoJNwkDhFHCJyKTqEu0JroR+cQYYjIxh1hILCPWEo8TLxB7iEPENyQSiUMyJ7mQAkmxpFTSEtJG0m5SI+ksqZs0SBojk8naZGuyBzmULCAryIXkneTD5DPkG+Qh8lsKnWJAcaT4U+IoUspqShnlEOU05QZlmDJBVaOaUt2ooVQRNY9aQq2htlKvUYeoEzR1mjnNgxZJS6WtopXTGmgXaPdpr+h0uhHdlR5Ol9BX0svpR+iX6AP0dwwNhhWDx4hnKBmbGAcYZxl3GK+YTKYZ04sZx1QwNzHrmOeZD5lvVVgqtip8FZHKCpVKlSaVGyovVKmqpqreqgtV81XLVI+pXlN9rkZVM1PjqQnUlqtVqp1Q61MbU2epO6iHqmeob1Q/pH5Z/YkGWcNMw09DpFGgsV/jvMYgC2MZs3gsIWsNq4Z1gTXEJrHN2Xx2KruY/R27iz2qqaE5QzNKM1ezUvOUZj8H45hx+Jx0TgnnKKeX836K3hTvKeIpG6Y0TLkxZVxrqpaXllirSKtRq0frvTau7aedpr1Fu1n7gQ5Bx0onXCdHZ4/OBZ3nU9lT3acKpxZNPTr1ri6qa6UbobtEd79up+6Ynr5egJ5Mb6feeb3n+hx9L/1U/W36p/VHDFgGswwkBtsMzhg8xTVxbzwdL8fb8VFDXcNAQ6VhlWGX4YSRudE8o9VGjUYPjGnGXOMk423GbcajJgYmISZLTepN7ppSTbmmKaY7TDtMx83MzaLN1pk1mz0x1zLnm+eb15vft2BaeFostqi2uGVJsuRaplnutrxuhVo5WaVYVVpds0atna0l1rutu6cRp7lOk06rntZnw7Dxtsm2qbcZsOXYBtuutm22fWFnYhdnt8Wuw+6TvZN9un2N/T0HDYfZDqsdWh1+c7RyFDpWOt6azpzuP33F9JbpL2dYzxDP2DPjthPLKcRpnVOb00dnF2e5c4PziIuJS4LLLpc+Lpsbxt3IveRKdPVxXeF60vWdm7Obwu2o26/uNu5p7ofcn8w0nymeWTNz0MPIQ+BR5dE/C5+VMGvfrH5PQ0+BZ7XnIy9jL5FXrdewt6V3qvdh7xc+9j5yn+M+4zw33jLeWV/MN8C3yLfLT8Nvnl+F30N/I/9k/3r/0QCngCUBZwOJgUGBWwL7+Hp8Ib+OPzrbZfay2e1BjKC5QRVBj4KtguXBrSFoyOyQrSH355jOkc5pDoVQfujW0Adh5mGLw34MJ4WHhVeGP45wiFga0TGXNXfR3ENz30T6RJZE3ptnMU85ry1KNSo+qi5qPNo3ujS6P8YuZlnM1VidWElsSxw5LiquNm5svt/87fOH4p3iC+N7F5gvyF1weaHOwvSFpxapLhIsOpZATIhOOJTwQRAqqBaMJfITdyWOCnnCHcJnIi/RNtGI2ENcKh5O8kgqTXqS7JG8NXkkxTOlLOW5hCepkLxMDUzdmzqeFpp2IG0yPTq9MYOSkZBxQqohTZO2Z+pn5mZ2y6xlhbL+xW6Lty8elQfJa7OQrAVZLQq2QqboVFoo1yoHsmdlV2a/zYnKOZarnivN7cyzytuQN5zvn//tEsIS4ZK2pYZLVy0dWOa9rGo5sjxxedsK4xUFK4ZWBqw8uIq2Km3VT6vtV5eufr0mek1rgV7ByoLBtQFr6wtVCuWFfevc1+1dT1gvWd+1YfqGnRs+FYmKrhTbF5cVf9go3HjlG4dvyr+Z3JS0qavEuWTPZtJm6ebeLZ5bDpaql+aXDm4N2dq0Dd9WtO319kXbL5fNKNu7g7ZDuaO/PLi8ZafJzs07P1SkVPRU+lQ27tLdtWHX+G7R7ht7vPY07NXbW7z3/T7JvttVAVVN1WbVZftJ+7P3P66Jqun4lvttXa1ObXHtxwPSA/0HIw6217nU1R3SPVRSj9Yr60cOxx++/p3vdy0NNg1VjZzG4iNwRHnk6fcJ3/ceDTradox7rOEH0x92HWcdL2pCmvKaRptTmvtbYlu6T8w+0dbq3nr8R9sfD5w0PFl5SvNUyWna6YLTk2fyz4ydlZ19fi753GDborZ752PO32oPb++6EHTh0kX/i+c7vDvOXPK4dPKy2+UTV7hXmq86X23qdOo8/pPTT8e7nLuarrlca7nuer21e2b36RueN87d9L158Rb/1tWeOT3dvfN6b/fF9/XfFt1+cif9zsu72Xcn7q28T7xf9EDtQdlD3YfVP1v+3Njv3H9qwHeg89HcR/cGhYPP/pH1jw9DBY+Zj8uGDYbrnjg+OTniP3L96fynQ89kzyaeF/6i/suuFxYvfvjV69fO0ZjRoZfyl5O/bXyl/erA6xmv28bCxh6+yXgzMV70VvvtwXfcdx3vo98PT+R8IH8o/2j5sfVT0Kf7kxmTk/8EA5jz/GMzLdsAAAAgY0hSTQAAeiUAAICDAAD5/wAAgOkAAHUwAADqYAAAOpgAABdvkl/FRgAAAFtJREFUeNpi/P//PwMlgImBQsA44C6giQENDAwM//HgBmLCAF/AMBLjBUeixf///48L7/+PCvZjU4fPAAc0AxywqcMXCwegGJ1NckL6jx5wpKYDxqGXEkkCgAEAmrqBIejdgngAAAAASUVORK5CYII=);
}

View File

@@ -0,0 +1,243 @@
/**
* @author: Dennis Hernández
* @webSite: http://djhvscf.github.io/Blog
* @version: v1.1.0
*/
!function ($) {
'use strict';
var originalRowAttr,
dataTTId = 'data-tt-id',
dataTTParentId = 'data-tt-parent-id',
obj = {},
parentId = undefined;
var getParentRowId = function (that, id) {
var parentRows = that.$body.find('tr').not('[' + 'data-tt-parent-id]');
for (var i = 0; i < parentRows.length; i++) {
if (i === id) {
return $(parentRows[i]).attr('data-tt-id');
}
}
return undefined;
};
var sumData = function (that, data) {
var sumRow = {};
$.each(data, function (i, row) {
if (!row.IsParent) {
for (var prop in row) {
if (!isNaN(parseFloat(row[prop]))) {
if (that.columns[that.fieldsColumnsIndex[prop]].groupBySumGroup) {
if (sumRow[prop] === undefined) {
sumRow[prop] = 0;
}
sumRow[prop] += +row[prop];
}
}
}
}
});
return sumRow;
};
var rowAttr = function (row, index) {
//Call the User Defined Function
originalRowAttr.apply([row, index]);
obj[dataTTId.toString()] = index;
if (!row.IsParent) {
obj[dataTTParentId.toString()] = parentId === undefined ? index : parentId;
} else {
parentId = index;
delete obj[dataTTParentId.toString()];
}
return obj;
};
var setObjectKeys = function () {
// From https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/keys
Object.keys = function (o) {
if (o !== Object(o)) {
throw new TypeError('Object.keys called on a non-object');
}
var k = [],
p;
for (p in o) {
if (Object.prototype.hasOwnProperty.call(o, p)) {
k.push(p);
}
}
return k;
}
};
var getDataArrayFromItem = function (that, item) {
var itemDataArray = [];
for (var i = 0; i < that.options.groupByField.length; i++) {
itemDataArray.push(item[that.options.groupByField[i]]);
}
return itemDataArray;
};
var getNewRow = function (that, result, index) {
var newRow = {};
for (var i = 0; i < that.options.groupByField.length; i++) {
newRow[that.options.groupByField[i].toString()] = result[index][0][that.options.groupByField[i]];
}
newRow.IsParent = true;
return newRow;
};
var groupBy = function (array, f) {
var groups = {};
$.each(array, function (i, o) {
var group = JSON.stringify(f(o));
groups[group] = groups[group] || [];
groups[group].push(o);
});
return Object.keys(groups).map(function (group) {
return groups[group];
});
};
var makeGrouped = function (that, data) {
var newData = [],
sumRow = {};
var result = groupBy(data, function (item) {
return getDataArrayFromItem(that, item);
});
for (var i = 0; i < result.length; i++) {
result[i].unshift(getNewRow(that, result, i));
if (that.options.groupBySumGroup) {
sumRow = sumData(that, result[i]);
if (!$.isEmptyObject(sumRow)) {
result[i].push(sumRow);
}
}
}
newData = newData.concat.apply(newData, result);
if (!that.options.loaded && newData.length > 0) {
that.options.loaded = true;
that.options.originalData = that.options.data;
that.options.data = newData;
}
return newData;
};
$.extend($.fn.bootstrapTable.defaults, {
groupBy: false,
groupByField: [],
groupBySumGroup: false,
groupByInitExpanded: undefined, //node, 'all'
//internal variables
loaded: false,
originalData: undefined
});
$.fn.bootstrapTable.methods.push('collapseAll', 'expandAll', 'refreshGroupByField');
$.extend($.fn.bootstrapTable.COLUMN_DEFAULTS, {
groupBySumGroup: false
});
var BootstrapTable = $.fn.bootstrapTable.Constructor,
_init = BootstrapTable.prototype.init,
_initData = BootstrapTable.prototype.initData;
BootstrapTable.prototype.init = function () {
//Temporal validation
if (!this.options.sortName) {
if ((this.options.groupBy) && (this.options.groupByField.length > 0)) {
var that = this;
// Compatibility: IE < 9 and old browsers
if (!Object.keys) {
$.fn.bootstrapTable.utils.objectKeys();
}
//Make sure that the internal variables are set correctly
this.options.loaded = false;
this.options.originalData = undefined;
originalRowAttr = this.options.rowAttributes;
this.options.rowAttributes = rowAttr;
this.$el.off('post-body.bs.table').on('post-body.bs.table', function () {
that.$el.treetable({
expandable: true,
onNodeExpand: function () {
if (that.options.height) {
that.resetHeader();
}
},
onNodeCollapse: function () {
if (that.options.height) {
that.resetHeader();
}
}
}, true);
if (that.options.groupByInitExpanded !== undefined) {
if (typeof that.options.groupByInitExpanded === 'number') {
that.expandNode(that.options.groupByInitExpanded);
} else if (that.options.groupByInitExpanded.toLowerCase() === 'all') {
that.expandAll();
}
}
});
}
}
_init.apply(this, Array.prototype.slice.apply(arguments));
};
BootstrapTable.prototype.initData = function (data, type) {
//Temporal validation
if (!this.options.sortName) {
if ((this.options.groupBy) && (this.options.groupByField.length > 0)) {
this.options.groupByField = typeof this.options.groupByField === 'string' ?
this.options.groupByField.replace('[', '').replace(']', '')
.replace(/ /g, '').toLowerCase().split(',') : this.options.groupByField;
data = makeGrouped(this, data ? data : this.options.data);
}
}
_initData.apply(this, [data, type]);
};
BootstrapTable.prototype.expandAll = function () {
this.$el.treetable('expandAll');
};
BootstrapTable.prototype.collapseAll = function () {
this.$el.treetable('collapseAll');
};
BootstrapTable.prototype.expandNode = function (id) {
id = getParentRowId(this, id);
if (id !== undefined) {
this.$el.treetable('expandNode', id);
}
};
BootstrapTable.prototype.refreshGroupByField = function (groupByFields) {
if (!$.fn.bootstrapTable.utils.compareObjects(this.options.groupByField, groupByFields)) {
this.options.groupByField = groupByFields;
this.load(this.options.originalData);
}
};
}(jQuery);

View File

@@ -0,0 +1,17 @@
{
"name": "Group By",
"version": "1.1.0",
"description": "Plugin to group the data by fields.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/group-by",
"example": "#",
"plugins": [{
"name": "bootstrap-table-group-by",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/group-by"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "i18n Enhance",
"version": "1.0.0",
"description": "Plugin to add i18n API in order to change column's title and table locale.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/i18n-enhance",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/i18n-enhance.html",
"plugins": [{
"name": "bootstrap-table-i18n-enhance",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/i18n-enhance"
}],
"author": {
"name": "Jewway",
"image": "https://avatars0.githubusercontent.com/u/3501899"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Key Events",
"version": "1.0.0",
"description": "Plugin to support the key events in the bootstrap table.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/key-events",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/key-events.html",
"plugins": [{
"name": "bootstrap-table-key-events",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/key-events"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Mobile",
"version": "1.1.0",
"description": "Plugin to support the responsive feature.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/mobile",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/mobile.html",
"plugins": [{
"name": "bootstrap-table-mobile",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/mobile"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,88 @@
/**
* @author Homer Glascock <HopGlascock@gmail.com>
* @version: v1.0.0
*/
!function ($) {
"use strict";
var sprintf = $.fn.bootstrapTable.utils.sprintf;
var reInit = function (self) {
self.initHeader();
self.initSearch();
self.initPagination();
self.initBody();
};
$.extend($.fn.bootstrapTable.defaults, {
showToggleBtn: false,
multiToggleDefaults: [], //column names go here
});
$.fn.bootstrapTable.methods.push('hideAllColumns', 'showAllColumns');
var BootstrapTable = $.fn.bootstrapTable.Constructor,
_initToolbar = BootstrapTable.prototype.initToolbar;
BootstrapTable.prototype.initToolbar = function () {
_initToolbar.apply(this, Array.prototype.slice.apply(arguments));
var that = this,
$btnGroup = this.$toolbar.find('>.btn-group');
if (typeof this.options.multiToggleDefaults === 'string') {
this.options.multiToggleDefaults = JSON.parse(this.options.multiToggleDefaults);
}
if (this.options.showToggleBtn && this.options.showColumns) {
var showbtn = "<button class='btn btn-default hidden' id='showAllBtn'><span class='glyphicon glyphicon-resize-full icon-zoom-in'></span></button>",
hidebtn = "<button class='btn btn-default' id='hideAllBtn'><span class='glyphicon glyphicon-resize-small icon-zoom-out'></span></button>";
$btnGroup.append(showbtn + hidebtn);
$btnGroup.find('#showAllBtn').click(function () { that.showAllColumns();
$btnGroup.find('#hideAllBtn').toggleClass('hidden');
$btnGroup.find('#showAllBtn').toggleClass('hidden');
});
$btnGroup.find('#hideAllBtn').click(function () { that.hideAllColumns();
$btnGroup.find('#hideAllBtn').toggleClass('hidden');
$btnGroup.find('#showAllBtn').toggleClass('hidden');
});
}
};
BootstrapTable.prototype.hideAllColumns = function () {
var that = this,
defaults = that.options.multiToggleDefaults;
$.each(this.columns, function (index, column) {
//if its one of the defaults dont touch it
if (defaults.indexOf(column.field) == -1 && column.switchable) {
column.visible = false;
var $items = that.$toolbar.find('.keep-open input').prop('disabled', false);
$items.filter(sprintf('[value="%s"]', index)).prop('checked', false);
}
});
reInit(that);
};
BootstrapTable.prototype.showAllColumns = function () {
var that = this;
$.each(this.columns, function (index, column) {
if (column.switchable) {
column.visible = true;
}
var $items = that.$toolbar.find('.keep-open input').prop('disabled', false);
$items.filter(sprintf('[value="%s"]', index)).prop('checked', true);
});
reInit(that);
that.toggleColumn(0, that.columns[0].visible, false);
};
}(jQuery);

View File

@@ -0,0 +1,17 @@
{
"name": "Multi Column Toggle",
"version": "1.0.0",
"description": "Allows hiding and showing of multiple columns at once.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/multi-column-toggle",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/multi-column-toggle.html",
"plugins": [{
"name": "multi-column-toggle",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/multi-column-toggle"
}],
"author": {
"name": "Homer Glascock",
"image": "https://avatars1.githubusercontent.com/u/5546710"
}
}

View File

@@ -0,0 +1,71 @@
/**
* @author: Dennis Hernández
* @webSite: http://djhvscf.github.io/Blog
* @version: v1.0.0
*/
!function ($) {
'use strict';
$.extend($.fn.bootstrapTable.defaults, {
multipleSearch: false,
delimeter: " "
});
var BootstrapTable = $.fn.bootstrapTable.Constructor,
_initSearch = BootstrapTable.prototype.initSearch;
BootstrapTable.prototype.initSearch = function () {
if (this.options.multipleSearch) {
if (this.searchText === undefined) {
return;
}
var strArray = this.searchText.split(this.options.delimeter),
that = this,
f = $.isEmptyObject(this.filterColumns) ? null : this.filterColumns,
dataFiltered = [];
if (strArray.length === 1) {
_initSearch.apply(this, Array.prototype.slice.apply(arguments));
} else {
for (var i = 0; i < strArray.length; i++) {
var str = strArray[i].trim();
dataFiltered = str ? $.grep(dataFiltered.length === 0 ? this.options.data : dataFiltered, function (item, i) {
for (var key in item) {
key = $.isNumeric(key) ? parseInt(key, 10) : key;
var value = item[key],
column = that.columns[that.fieldsColumnsIndex[key]],
j = $.inArray(key, that.header.fields);
// Fix #142: search use formated data
if (column && column.searchFormatter) {
value = $.fn.bootstrapTable.utils.calculateObjectValue(column,
that.header.formatters[j], [value, item, i], value);
}
var index = $.inArray(key, that.header.fields);
if (index !== -1 && that.header.searchables[index] && (typeof value === 'string' || typeof value === 'number')) {
if (that.options.strictSearch) {
if ((value + '').toLowerCase() === str) {
return true;
}
} else {
if ((value + '').toLowerCase().indexOf(str) !== -1) {
return true;
}
}
}
}
return false;
}) : this.data;
}
this.data = dataFiltered;
}
} else {
_initSearch.apply(this, Array.prototype.slice.apply(arguments));
}
};
}(jQuery);

View File

@@ -0,0 +1,17 @@
{
"name": "Multiple Search",
"version": "1.0.0",
"description": "Plugin to support the multiple search.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/multiple-search",
"example": "#",
"plugins": [{
"name": "bootstrap-table-multiple-search",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/multiple-search"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
.multiple-select-row-selected {
background: lightBlue
}
.table tbody tr:hover td,
.table tbody tr:hover th {
background-color: transparent;
}
.table-striped tbody tr:nth-child(odd):hover td {
background-color: #F9F9F9;
}
.fixed-table-container tbody .selected td {
background: lightBlue;
}

View File

@@ -0,0 +1,127 @@
/**
* @author: Dennis Hernández
* @webSite: http://djhvscf.github.io/Blog
* @version: v1.0.0
*/
!function ($) {
'use strict';
document.onselectstart = function() {
return false;
};
var getTableObjectFromCurrentTarget = function (currentTarget) {
currentTarget = $(currentTarget);
return currentTarget.is("table") ? currentTarget : currentTarget.parents().find(".table");
};
var getRow = function (target) {
target = $(target);
return target.parent().parent();
};
var onRowClick = function (e) {
var that = getTableObjectFromCurrentTarget(e.currentTarget);
if (window.event.ctrlKey) {
toggleRow(e.currentTarget, that, false, false);
}
if (window.event.button === 0) {
if (!window.event.ctrlKey && !window.event.shiftKey) {
clearAll(that);
toggleRow(e.currentTarget, that, false, false);
}
if (window.event.shiftKey) {
selectRowsBetweenIndexes([that.bootstrapTable("getOptions").multipleSelectRowLastSelectedRow.rowIndex, e.currentTarget.rowIndex], that)
}
}
};
var onCheckboxChange = function (e) {
var that = getTableObjectFromCurrentTarget(e.currentTarget);
clearAll(that);
toggleRow(getRow(e.currentTarget), that, false, false);
};
var toggleRow = function (row, that, clearAll, useShift) {
if (clearAll) {
row = $(row);
that.bootstrapTable("getOptions").multipleSelectRowLastSelectedRow = undefined;
row.removeClass(that.bootstrapTable("getOptions").multipleSelectRowCssClass);
that.bootstrapTable("uncheck", row.data("index"));
} else {
that.bootstrapTable("getOptions").multipleSelectRowLastSelectedRow = row;
row = $(row);
if (useShift) {
row.addClass(that.bootstrapTable("getOptions").multipleSelectRowCssClass);
that.bootstrapTable("check", row.data("index"));
} else {
if(row.hasClass(that.bootstrapTable("getOptions").multipleSelectRowCssClass)) {
row.removeClass(that.bootstrapTable("getOptions").multipleSelectRowCssClass)
that.bootstrapTable("uncheck", row.data("index"));
} else {
row.addClass(that.bootstrapTable("getOptions").multipleSelectRowCssClass);
that.bootstrapTable("check", row.data("index"));
}
}
}
};
var selectRowsBetweenIndexes = function (indexes, that) {
indexes.sort(function(a, b) {
return a - b;
});
for (var i = indexes[0]; i <= indexes[1]; i++) {
toggleRow(that.bootstrapTable("getOptions").multipleSelectRowRows[i-1], that, false, true);
}
};
var clearAll = function (that) {
for (var i = 0; i < that.bootstrapTable("getOptions").multipleSelectRowRows.length; i++) {
toggleRow(that.bootstrapTable("getOptions").multipleSelectRowRows[i], that, true, false);
}
};
$.extend($.fn.bootstrapTable.defaults, {
multipleSelectRow: false,
multipleSelectRowCssClass: 'multiple-select-row-selected',
//internal variables used by the extension
multipleSelectRowLastSelectedRow: undefined,
multipleSelectRowRows: []
});
var BootstrapTable = $.fn.bootstrapTable.Constructor,
_init = BootstrapTable.prototype.init,
_initBody = BootstrapTable.prototype.initBody;
BootstrapTable.prototype.init = function () {
if (this.options.multipleSelectRow) {
var that = this;
//Make sure that the internal variables have the correct value
this.options.multipleSelectRowLastSelectedRow = undefined;
this.options.multipleSelectRowRows = [];
this.$el.on("post-body.bs.table", function (e) {
setTimeout(function () {
that.options.multipleSelectRowRows = that.$body.children();
that.options.multipleSelectRowRows.click(onRowClick);
that.options.multipleSelectRowRows.find("input[type=checkbox]").change(onCheckboxChange);
}, 1);
});
}
_init.apply(this, Array.prototype.slice.apply(arguments));
};
BootstrapTable.prototype.clearAllMultipleSelectionRow = function () {
clearAll(this);
};
$.fn.bootstrapTable.methods.push('clearAllMultipleSelectionRow');
}(jQuery);

View File

@@ -0,0 +1,17 @@
{
"name": "Multiple Selection Row",
"version": "1.0.0",
"description": "Plugin to enable the multiple selection row. You can use the ctrl+click to select one row or use ctrl+shift+click to select a range of rows.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/multiple-selection-row",
"example": "",
"plugins": [{
"name": "bootstrap-table-multiple-selection-row",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/multiple-selection-row"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Multiple Sort",
"version": "1.1.0",
"description": "Plugin to support the multiple sort.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/multiple-sort",
"example": "#",
"plugins": [{
"name": "bootstrap-table-multiple-sort",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/multiple-sort"
}],
"author": {
"name": "dimbslmh",
"image": "https://avatars1.githubusercontent.com/u/745635"
}
}

View File

@@ -0,0 +1,67 @@
/**
* @author: Brian Huisman
* @webSite: http://www.greywyvern.com
* @version: v1.0.0
* JS functions to allow natural sorting on bootstrap-table columns
* add data-sorter="alphanum" or data-sorter="numericOnly" to any th
*
* @update Dennis Hernández <http://djhvscf.github.io/Blog>
* @update Duane May
*/
function alphanum(a, b) {
function chunkify(t) {
var tz = [],
x = 0,
y = -1,
n = 0,
i,
j;
while (i = (j = t.charAt(x++)).charCodeAt(0)) {
var m = (i === 46 || (i >= 48 && i <= 57));
if (m !== n) {
tz[++y] = "";
n = m;
}
tz[y] += j;
}
return tz;
}
function stringfy(v) {
if (typeof(v) === "number") {
v = "" + v;
}
if (!v) {
v = "";
}
return v;
}
var aa = chunkify(stringfy(a));
var bb = chunkify(stringfy(b));
for (x = 0; aa[x] && bb[x]; x++) {
if (aa[x] !== bb[x]) {
var c = Number(aa[x]),
d = Number(bb[x]);
if (c == aa[x] && d == bb[x]) {
return c - d;
} else {
return (aa[x] > bb[x]) ? 1 : -1;
}
}
}
return aa.length - bb.length;
}
function numericOnly(a, b) {
function stripNonNumber(s) {
s = s.replace(new RegExp(/[^0-9]/g), "");
return parseInt(s, 10);
}
return stripNonNumber(a) - stripNonNumber(b);
}

View File

@@ -0,0 +1,17 @@
{
"name": "Natural Sorting",
"version": "1.0.0",
"description": "Plugin to support the natural sorting.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/natural-sorting",
"example": "#",
"plugins": [{
"name": "bootstrap-table-natural-sorting",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/natural-sorting"
}],
"author": {
"name": "GreyWyvern",
"image": "https://avatars1.githubusercontent.com/u/137631"
}
}

View File

@@ -0,0 +1,21 @@
(The MIT License)
Copyright (c) 2019 doug-the-guy <badlydrawnsun@yahoo.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -0,0 +1,92 @@
# Bootstrap Table Pipelining
Use Plugin: [bootstrap-table-pipeline]
This plugin enables client side data caching for server side requests which will
eliminate the need to issue a new request every page change. This will allow
for a performance balance for a large data set between returning all data at once
(client side paging) and a new server side request (server side paging).
There are two new options:
- usePipeline: enables this feature
- pipelineSize: the size of each cache window
The size of the pipeline must be evenly divisible by the current page size. This is
assured by rounding up to the nearest evenly divisible value. For example, if
the pipeline size is 4990 and the current page size is 25, then pipeline size will
be dynamically set to 5000.
The cache windows are computed based on the pipeline size and the total number of rows
returned by the server side query. For example, with pipeline size 500 and total rows
1300, the cache windows will be:
[{'lower': 0, 'upper': 499}, {'lower': 500, 'upper': 999}, {'lower': 1000, 'upper': 1499}]
Using the limit (i.e. the pipelineSize) and offset parameters, the server side request
**MUST** return only the data in the requested cache window **AND** the total number of rows.
To wit, the server side code must use the offset and limit parameters to prepare the response
data.
On a page change, the new offset is checked if it is within the current cache window. If so,
the requested page data is returned from the cached data set. Otherwise, a new server side
request will be issued for the new cache window.
The current cached data is only invalidated on these events:
- sorting
- searching
- page size change
- page change moves into a new cache window
There are two new events:
- cached-data-hit.bs.table: issued when cached data is used on a page change
- cached-data-reset.bs.table: issued when the cached data is invalidated and new server side request is issued
## Features
* Created with Bootstrap 4
## Usage
```
# assumed import of bootstrap and bootstrap-table assets
<script src="/path/to/bootstrap-table-pipeline.js"></script>
...
<table id="pipeline_table"
class="table table-striped"
data-method='post'
data-use-pipeline="true"
data-pipeline-size="5000"
data-pagination="true"
data-side-pagination="server"
data-page-size="50">
<thead><tr>
<th data-field="type" data-sortable="true">Type</th>
<th data-field="value" data-sortable="true">Value</th>
<th data-field="date" data-sortable="true">Date</th>
</tr></thead>
</table>
```
## Options
### usePipeline
* type: Boolean
* description: Set true to enable pipelining
* default: `false`
## pipelineSize
* type: Integer
* description: Size of each cache window. Must be greater than 0
* default: `1000`
## Events
### onCachedDataHit(cached-data-hit.bs.table)
* Fires when paging was able to use the locally cached data.
### onCachedDataReset(cached-data-reset.bs.table)
* Fires when the locally cached data needed to be reset (i.e. on sorting, searching, page size change or paged out of current cache window)

View File

@@ -0,0 +1,18 @@
{
"name": "Pipeline",
"version": "1.0.0",
"description": "Plugin to support a hybrid approach to server/client side paging.",
"url": "",
"example": "#",
"plugins": [{
"name": "bootstrap-table-pipeline",
"url": ""
}],
"author": {
"name": "doug-the-guy",
"image": ""
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Reorder Columns",
"version": "1.1.0",
"description": "Plugin to support the reordering columns feature.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/reorder-columns",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/reorder-columns.html",
"plugins": [{
"name": "bootstrap-table-reorder-columns",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/reorder-columns"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Reorder Rows",
"version": "1.0.0",
"description": "Plugin to support the reordering rows feature.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/reorder-rows",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/reorder-rows.html",
"plugins": [{
"name": "bootstrap-table-reorder-rows",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/reorder-rows"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Resizable",
"version": "1.1.0",
"description": "Plugin to support the resizable feature.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/resizable",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/resizable.html",
"plugins": [{
"name": "bootstrap-table-resizable",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/resizable"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,332 @@
/**
* @author: Jewway
* @version: v1.1.1
*/
! function ($) {
'use strict';
function getCurrentHeader(that) {
var header = that.$header;
if (that.options.height) {
header = that.$tableHeader;
}
return header;
}
function initFilterValues(that) {
if (!$.isEmptyObject(that.filterColumnsPartial)) {
var $header = getCurrentHeader(that);
$.each(that.columns, function (idx, column) {
var value = that.filterColumnsPartial[column.field];
if (column.filter) {
if (column.filter.setFilterValue) {
var $filter = $header.find('[data-field=' + column.field + '] .filter');
column.filter.setFilterValue($filter, column.field, value);
} else {
var $ele = $header.find('[data-filter-field=' + column.field + ']');
switch (column.filter.type) {
case 'input':
$ele.val(value);
case 'select':
$ele.val(value).trigger('change');
}
}
}
});
}
}
function createFilter(that, header) {
var enableFilter = false,
isVisible,
html,
timeoutId = 0;
$.each(that.columns, function (i, column) {
isVisible = 'hidden';
html = null;
if (!column.visible) {
return;
}
if (!column.filter) {
html = $('<div class="no-filter"></div>');
} else {
var filterClass = column.filter.class ? ' ' + column.filter.class : '';
html = $('<div style="margin: 0px 2px 2px 2px;" class="filter' + filterClass + '">');
if (column.searchable) {
enableFilter = true;
isVisible = 'visible'
}
if (column.filter.template) {
html.append(column.filter.template(that, column, isVisible));
} else {
var $filter = $(that.options.filterTemplate[column.filter.type.toLowerCase()](that, column, isVisible));
switch (column.filter.type) {
case 'input':
var cpLock = true;
$filter.off('compositionstart').on('compositionstart', function (event) {
cpLock = false;
});
$filter.off('compositionend').on('compositionend', function (event) {
cpLock = true;
var $input = $(this);
clearTimeout(timeoutId);
timeoutId = setTimeout(function () {
that.onColumnSearch(event, column.field, $input.val());
}, that.options.searchTimeOut);
});
$filter.off('keyup').on('keyup', function (event) {
if (cpLock) {
var $input = $(this);
clearTimeout(timeoutId);
timeoutId = setTimeout(function () {
that.onColumnSearch(event, column.field, $input.val());
}, that.options.searchTimeOut);
}
});
$filter.off('mouseup').on('mouseup', function (event) {
var $input = $(this),
oldValue = $input.val();
if (oldValue === "") {
return;
}
setTimeout(function () {
var newValue = $input.val();
if (newValue === "") {
clearTimeout(timeoutId);
timeoutId = setTimeout(function () {
that.onColumnSearch(event, column.field, newValue);
}, that.options.searchTimeOut);
}
}, 1);
});
break;
case 'select':
$filter.on('select2:select', function (event) {
that.onColumnSearch(event, column.field, $(this).val());
});
$filter.on("select2:unselecting", function (event) {
var $select2 = $(this);
event.preventDefault();
$select2.val(null).trigger('change');
that.searchText = undefined;
that.onColumnSearch(event, column.field, $select2.val());
});
break;
}
html.append($filter);
}
}
$.each(header.children().children(), function (i, tr) {
tr = $(tr);
if (tr.data('field') === column.field) {
tr.find('.fht-cell').append(html);
return false;
}
});
});
if (!enableFilter) {
header.find('.filter').hide();
}
}
function initSelect2(that) {
var $header = getCurrentHeader(that);
$.each(that.columns, function (idx, column) {
if (column.filter && column.filter.type === 'select') {
var $selectEle = $header.find('select[data-filter-field="' + column.field + '"]');
if ($selectEle.length > 0 && !$selectEle.data().select2) {
var select2Opts = {
placeholder: "",
allowClear: true,
data: column.filter.data,
dropdownParent: that.$el.closest(".bootstrap-table")
};
$selectEle.select2(select2Opts);
}
}
});
}
$.extend($.fn.bootstrapTable.defaults, {
filter: false,
filterValues: {},
filterTemplate: {
input: function (instance, column, isVisible) {
return '<input type="text" class="form-control" data-filter-field="' + column.field + '" style="width: 100%; visibility:' + isVisible + '">';
},
select: function (instance, column, isVisible) {
return '<select data-filter-field="' + column.field + '" style="width: 100%; visibility:' + isVisible + '"></select>';
}
},
onColumnSearch: function (field, text) {
return false;
}
});
$.extend($.fn.bootstrapTable.COLUMN_DEFAULTS, {
filter: undefined
});
$.extend($.fn.bootstrapTable.Constructor.EVENTS, {
'column-search.bs.table': 'onColumnSearch'
});
var BootstrapTable = $.fn.bootstrapTable.Constructor,
_init = BootstrapTable.prototype.init,
_initHeader = BootstrapTable.prototype.initHeader,
_initSearch = BootstrapTable.prototype.initSearch;
BootstrapTable.prototype.init = function () {
//Make sure that the filtercontrol option is set
if (this.options.filter) {
var that = this;
if (that.options.filterTemplate) {
that.options.filterTemplate = $.extend({}, $.fn.bootstrapTable.defaults.filterTemplate, that.options.filterTemplate);
}
if (!$.isEmptyObject(that.options.filterValues)) {
that.filterColumnsPartial = that.options.filterValues;
that.options.filterValues = {};
}
this.$el.on('reset-view.bs.table', function () {
//Create controls on $tableHeader if the height is set
if (!that.options.height) {
return;
}
//Avoid recreate the controls
if (that.$tableHeader.find('select').length > 0 || that.$tableHeader.find('input').length > 0) {
return;
}
createFilter(that, that.$tableHeader);
}).on('post-header.bs.table', function () {
var timeoutId = 0;
initSelect2(that);
clearTimeout(timeoutId);
timeoutId = setTimeout(function () {
initFilterValues(that);
}, that.options.searchTimeOut - 1000);
}).on('column-switch.bs.table', function (field, checked) {
initFilterValues(that);
});
}
_init.apply(this, Array.prototype.slice.apply(arguments));
};
BootstrapTable.prototype.initHeader = function () {
_initHeader.apply(this, Array.prototype.slice.apply(arguments));
if (this.options.filter) {
createFilter(this, this.$header);
}
};
BootstrapTable.prototype.initSearch = function () {
var that = this,
filterValues = that.filterColumnsPartial;
// Filter for client
if (that.options.sidePagination === 'client') {
this.data = $.grep(this.data, function (row, idx) {
for (var field in filterValues) {
var column = that.columns[that.fieldsColumnsIndex[field]],
filterValue = filterValues[field].toLowerCase(),
rowValue = row[field];
rowValue = $.fn.bootstrapTable.utils.calculateObjectValue(
that.header,
that.header.formatters[$.inArray(field, that.header.fields)], [rowValue, row, idx], rowValue);
if (column.filterStrictSearch) {
if (!($.inArray(field, that.header.fields) !== -1 &&
(typeof rowValue === 'string' || typeof rowValue === 'number') &&
rowValue.toString().toLowerCase() === filterValue.toString().toLowerCase())) {
return false;
}
} else {
if (!($.inArray(field, that.header.fields) !== -1 &&
(typeof rowValue === 'string' || typeof rowValue === 'number') &&
(rowValue + '').toLowerCase().indexOf(filterValue) !== -1)) {
return false;
}
}
}
return true;
});
}
_initSearch.apply(this, Array.prototype.slice.apply(arguments));
};
BootstrapTable.prototype.onColumnSearch = function (event, field, value) {
if ($.isEmptyObject(this.filterColumnsPartial)) {
this.filterColumnsPartial = {};
}
if (value) {
this.filterColumnsPartial[field] = value;
} else {
delete this.filterColumnsPartial[field];
}
this.options.pageNumber = 1;
this.onSearch(event);
this.trigger('column-search', field, value);
};
BootstrapTable.prototype.setSelect2Data = function (field, data) {
var that = this,
$header = getCurrentHeader(that),
$selectEle = $header.find('select[data-filter-field=\"' + field + '\"]');
$selectEle.empty();
$selectEle.select2({
data: data,
placeholder: "",
allowClear: true,
dropdownParent: that.$el.closest(".bootstrap-table")
});
$.each(this.columns, function (idx, column) {
if (column.field === field) {
column.filter.data = data;
return false;
}
});
};
BootstrapTable.prototype.setFilterValues = function (values) {
this.filterColumnsPartial = values;
};
$.fn.bootstrapTable.methods.push('setSelect2Data');
$.fn.bootstrapTable.methods.push('setFilterValues');
}(jQuery);

View File

@@ -0,0 +1,17 @@
{
"name": "Select2 Filter",
"version": "1.1.0",
"description": "Plugin to add select2 filter on the top of the columns in order to filter the data.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/select2-filter",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/select2-filter.html",
"plugins": [{
"name": "bootstrap-table-select2-filter",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/select2-filter"
}],
"author": {
"name": "Jewway",
"image": "https://avatars0.githubusercontent.com/u/3501899"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Sticky Header",
"version": "1.0.0",
"description": "An extension which provides a sticky header for table columns when scrolling on a long page and / or table. Works for tables with many columns and narrow width with horizontal scrollbars too.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/sticky-header",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/sticky-header.html",
"plugins": [{
"name": "bootstrap-table-sticky-header",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/sticky-header"
}],
"author": {
"name": "vinzloh",
"image": "https://avatars0.githubusercontent.com/u/5501845"
}
}

Some files were not shown because too many files have changed in this diff Show More