mirror of
https://github.com/inventree/InvenTree.git
synced 2025-12-18 12:56:31 -06:00
Compare commits
270 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b32f081a77 | ||
|
|
f80b116307 | ||
|
|
167784049d | ||
|
|
610c0a8623 | ||
|
|
c2f9bbfdcd | ||
|
|
3df402985a | ||
|
|
35fc954ff8 | ||
|
|
ac83318081 | ||
|
|
acfd39258b | ||
|
|
0287bf78e4 | ||
|
|
f8d6fed06e | ||
|
|
beca269e3a | ||
|
|
506139c1cd | ||
|
|
cdb0f2373c | ||
|
|
287ee8f831 | ||
|
|
b807e47619 | ||
|
|
40e9b33da6 | ||
|
|
72dceb28be | ||
|
|
1a01e8abee | ||
|
|
df0bb34620 | ||
|
|
75a397aab8 | ||
|
|
f4fd84f50f | ||
|
|
3b9fae6279 | ||
|
|
39ba25c5ed | ||
|
|
d39ab9c902 | ||
|
|
dc03b33c35 | ||
|
|
69871699c0 | ||
|
|
c8d6f2246b | ||
|
|
eb3d51f8a7 | ||
|
|
6962b61fff | ||
|
|
37c1fe1ccb | ||
|
|
05e67d310a | ||
|
|
cbd2794a7e | ||
|
|
bf97804a0b | ||
|
|
b192c44b61 | ||
|
|
820d7c6a15 | ||
|
|
a2a7b60d41 | ||
|
|
38d013ffe2 | ||
|
|
2e81a304d1 | ||
|
|
8f8b46e50d | ||
|
|
1199291835 | ||
|
|
f6123cc261 | ||
|
|
8a128a2bdd | ||
|
|
bbace2b1c9 | ||
|
|
0c6334b3b4 | ||
|
|
405523881c | ||
|
|
bb105d8c08 | ||
|
|
de23023277 | ||
|
|
8db769968f | ||
|
|
ea63a03fe4 | ||
|
|
75c24fb8f4 | ||
|
|
85225538e6 | ||
|
|
f441f672d6 | ||
|
|
18dcc60efd | ||
|
|
a3dc3bdbf4 | ||
|
|
2df0fd8a67 | ||
|
|
d689b95963 | ||
|
|
1fa64dd8cc | ||
|
|
6f0b2b31a8 | ||
|
|
f5e02fd292 | ||
|
|
5dbd3030d1 | ||
|
|
fdcbb68616 | ||
|
|
6e713b15ae | ||
|
|
8bf614607c | ||
|
|
c4d68aeef9 | ||
|
|
571b71f232 | ||
|
|
defa03b83a | ||
|
|
0bfbd45cec | ||
|
|
a310437dc7 | ||
|
|
7694092935 | ||
|
|
55c64b546f | ||
|
|
68ba9653ef | ||
|
|
567c7edbaf | ||
|
|
3eb1914f1e | ||
|
|
41f09f7578 | ||
|
|
ebe01530e6 | ||
|
|
6e932f85cf | ||
|
|
ed95cf2a8f | ||
|
|
3a52a1631d | ||
|
|
3b11a01966 | ||
|
|
a74b29f059 | ||
|
|
7dab02555d | ||
|
|
5f3932b7e1 | ||
|
|
0f51127adf | ||
|
|
ad1c1ae604 | ||
|
|
7adf2e0835 | ||
|
|
a1eb3623ba | ||
|
|
7681cd2c44 | ||
|
|
e88defd026 | ||
|
|
d9b769d27b | ||
|
|
04261dbcac | ||
|
|
43457d4136 | ||
|
|
21f209f7cc | ||
|
|
38fac47e39 | ||
|
|
8807492db6 | ||
|
|
35577fad41 | ||
|
|
5af6b92f90 | ||
|
|
5ca007a184 | ||
|
|
0e1923a90c | ||
|
|
aa7eaaab3a | ||
|
|
aed7754bc2 | ||
|
|
d86f964fb1 | ||
|
|
3637b28da7 | ||
|
|
825366f684 | ||
|
|
f3ca704e97 | ||
|
|
0c2f1cceb6 | ||
|
|
b372db8960 | ||
|
|
824aa8138b | ||
|
|
e6e6473503 | ||
|
|
bf2111ef9d | ||
|
|
903c65d08a | ||
|
|
9c93130224 | ||
|
|
0ba16bbbbd | ||
|
|
325841dbf1 | ||
|
|
af4d888b1b | ||
|
|
a99ba75fed | ||
|
|
226dc82cfd | ||
|
|
b55e840050 | ||
|
|
633fbd37bd | ||
|
|
8b62f7b2c0 | ||
|
|
4e58f0a3c7 | ||
|
|
d77cbb4c9b | ||
|
|
59c78460c8 | ||
|
|
22af3e2f59 | ||
|
|
edd6f25411 | ||
|
|
fdd70263ea | ||
|
|
55949e5321 | ||
|
|
df5a3013e6 | ||
|
|
045af50f99 | ||
|
|
c0c4e9c226 | ||
|
|
cd803640a9 | ||
|
|
2b9816d1a3 | ||
|
|
dce2954466 | ||
|
|
e3fb12dc03 | ||
|
|
341f638e5d | ||
|
|
d36cf358f8 | ||
|
|
676bb02f6e | ||
|
|
4869d0dab8 | ||
|
|
fc86064bd1 | ||
|
|
f88d5577d1 | ||
|
|
e9c6dd8273 | ||
|
|
3a48af6bd4 | ||
|
|
77fd6b6bb3 | ||
|
|
7483fd203d | ||
|
|
e207606686 | ||
|
|
74d0eb729c | ||
|
|
c4a71a991a | ||
|
|
2a924ec85b | ||
|
|
5bc00298c6 | ||
|
|
538ff9be7b | ||
|
|
be30cec2ad | ||
|
|
c9c93bce39 | ||
|
|
f97cdef9fc | ||
|
|
ec2a66e7a5 | ||
|
|
577185cd98 | ||
|
|
9197517f38 | ||
|
|
e1e63fa644 | ||
|
|
e85dd73f62 | ||
|
|
7fe8207463 | ||
|
|
2557383892 | ||
|
|
fb71e847bb | ||
|
|
3bfde82394 | ||
|
|
282ecebc39 | ||
|
|
b42f3de357 | ||
|
|
b29d86403e | ||
|
|
9a215f97f5 | ||
|
|
0f7d385755 | ||
|
|
b42f8a620b | ||
|
|
cf7a20e1b7 | ||
|
|
f6ba180cc4 | ||
|
|
1272b89839 | ||
|
|
0f2675c139 | ||
|
|
65ecb975c6 | ||
|
|
fb0baa9e7a | ||
|
|
f07d8a7a80 | ||
|
|
76410ef68d | ||
|
|
0a94758d63 | ||
|
|
d8f69c0609 | ||
|
|
8f6893a6b2 | ||
|
|
f35ce29612 | ||
|
|
d502d93380 | ||
|
|
ab921ccb31 | ||
|
|
e7d926f983 | ||
|
|
f85b773a50 | ||
|
|
d64fbfc254 | ||
|
|
2fbb8c757f | ||
|
|
edad000d8e | ||
|
|
917a88c6f4 | ||
|
|
428a4c0386 | ||
|
|
4b5fcd4e69 | ||
|
|
c5d0902379 | ||
|
|
e3f6624cf9 | ||
|
|
22068da3fa | ||
|
|
4f8dddc597 | ||
|
|
914743627b | ||
|
|
a0b595de6e | ||
|
|
357f715789 | ||
|
|
64dbf8c1e3 | ||
|
|
89e458bcba | ||
|
|
1d3a23ca4e | ||
|
|
b8b3dfc90e | ||
|
|
c3a5d777b1 | ||
|
|
053df0f59d | ||
|
|
0892de8c99 | ||
|
|
eb7fd4de2b | ||
|
|
b983a8636c | ||
|
|
7d36049ac9 | ||
|
|
386aa5952c | ||
|
|
75f75ed820 | ||
|
|
5c7d3af150 | ||
|
|
21ff17332d | ||
|
|
7cd62527c0 | ||
|
|
716e577916 | ||
|
|
8eec2f32c0 | ||
|
|
fa28697799 | ||
|
|
829e01dd33 | ||
|
|
576bef5d82 | ||
|
|
3511450b3d | ||
|
|
f396642d16 | ||
|
|
d2d59e0709 | ||
|
|
f27503b1af | ||
|
|
9070eaad60 | ||
|
|
4254b7dda3 | ||
|
|
544c7d389c | ||
|
|
ef679b1663 | ||
|
|
5180d86388 | ||
|
|
9b1a310ffe | ||
|
|
213a63318d | ||
|
|
a04b22b090 | ||
|
|
b4a7787447 | ||
|
|
1b054c5a9c | ||
|
|
b8369fb726 | ||
|
|
8a764c2c05 | ||
|
|
082b5d1c80 | ||
|
|
7f231cb6c1 | ||
|
|
df8b480abb | ||
|
|
9d0264c319 | ||
|
|
9db3efa085 | ||
|
|
4b14986591 | ||
|
|
9715af564f | ||
|
|
36bb3c5645 | ||
|
|
d1cc81fc9f | ||
|
|
5135d6b5d1 | ||
|
|
4f4852e2e7 | ||
|
|
fbc12fb496 | ||
|
|
e2f78829ec | ||
|
|
445551e6f3 | ||
|
|
e1b670ba57 | ||
|
|
73cc39bb68 | ||
|
|
9f01962c4e | ||
|
|
a23235400d | ||
|
|
53ac6c724d | ||
|
|
b5b6d75e23 | ||
|
|
ec9bff9be4 | ||
|
|
dfaee0ea96 | ||
|
|
93df90d295 | ||
|
|
6a6a5932f3 | ||
|
|
a35e03671c | ||
|
|
88f86efd4c | ||
|
|
1cdcac09be | ||
|
|
f3efabeeb8 | ||
|
|
fc5645a9a5 | ||
|
|
be1820fb94 | ||
|
|
a63529a9cf | ||
|
|
148bf0764b | ||
|
|
7058696030 | ||
|
|
bee3e93162 | ||
|
|
95d29f18e9 | ||
|
|
43836b3450 | ||
|
|
36bfd62c93 |
@@ -1,16 +1,11 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/python-3
|
||||
{
|
||||
"name": "InvenTree",
|
||||
"build": {
|
||||
"dockerfile": "../Dockerfile",
|
||||
"context": "..",
|
||||
"target": "devcontainer",
|
||||
"args": {
|
||||
"base_image": "mcr.microsoft.com/vscode/devcontainers/base:alpine-3.18",
|
||||
"workspace": "${containerWorkspaceFolder}"
|
||||
}
|
||||
},
|
||||
"name": "InvenTree devcontainer",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "inventree",
|
||||
"overrideCommand": true,
|
||||
"workspaceFolder": "/home/inventree/",
|
||||
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
@@ -21,12 +16,10 @@
|
||||
"python.defaultInterpreterPath": "${containerWorkspaceFolder}/dev/venv/bin/python",
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.pylintEnabled": false,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
|
||||
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
|
||||
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
|
||||
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
|
||||
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
|
||||
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
|
||||
@@ -44,40 +37,27 @@
|
||||
},
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [5173, 8000],
|
||||
"forwardPorts": [5173, 8000, 8080],
|
||||
"portsAttributes": {
|
||||
"5173": {
|
||||
"label": "Vite server"
|
||||
"label": "Vite Server"
|
||||
},
|
||||
"8000": {
|
||||
"label": "InvenTree server"
|
||||
"label": "InvenTree Server"
|
||||
},
|
||||
"8080": {
|
||||
"label": "mkdocs server"
|
||||
}
|
||||
},
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh ${containerWorkspaceFolder}",
|
||||
"postCreateCommand": ".devcontainer/postCreateCommand.sh",
|
||||
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "vscode",
|
||||
"containerUser": "vscode",
|
||||
"features": {
|
||||
"git": "os-provided"
|
||||
},
|
||||
|
||||
"remoteEnv": {
|
||||
// InvenTree config
|
||||
"INVENTREE_DEBUG": "True",
|
||||
"INVENTREE_LOG_LEVEL": "INFO",
|
||||
"INVENTREE_DB_ENGINE": "sqlite3",
|
||||
"INVENTREE_DB_NAME": "${containerWorkspaceFolder}/dev/database.sqlite3",
|
||||
"INVENTREE_MEDIA_ROOT": "${containerWorkspaceFolder}/dev/media",
|
||||
"INVENTREE_STATIC_ROOT": "${containerWorkspaceFolder}/dev/static",
|
||||
"INVENTREE_BACKUP_DIR": "${containerWorkspaceFolder}/dev/backup",
|
||||
"INVENTREE_CONFIG_FILE": "${containerWorkspaceFolder}/dev/config.yaml",
|
||||
"INVENTREE_SECRET_KEY_FILE": "${containerWorkspaceFolder}/dev/secret_key.txt",
|
||||
"INVENTREE_PLUGINS_ENABLED": "True",
|
||||
"INVENTREE_PLUGIN_DIR": "${containerWorkspaceFolder}/dev/plugins",
|
||||
"INVENTREE_PLUGIN_FILE": "${containerWorkspaceFolder}/dev/plugins.txt",
|
||||
|
||||
// Python config
|
||||
"PIP_USER": "no",
|
||||
|
||||
40
.devcontainer/docker-compose.yml
Normal file
40
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,40 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
db:
|
||||
image: postgres:13
|
||||
restart: unless-stopped
|
||||
expose:
|
||||
- 5432/tcp
|
||||
volumes:
|
||||
- inventreedatabase:/var/lib/postgresql/data:z
|
||||
environment:
|
||||
POSTGRES_DB: inventree
|
||||
POSTGRES_USER: inventree_user
|
||||
POSTGRES_PASSWORD: inventree_password
|
||||
|
||||
inventree:
|
||||
build:
|
||||
context: ..
|
||||
target: dev
|
||||
args:
|
||||
base_image: "mcr.microsoft.com/vscode/devcontainers/base:alpine-3.18"
|
||||
data_dir: "dev"
|
||||
volumes:
|
||||
- ../:/home/inventree:z
|
||||
|
||||
environment:
|
||||
INVENTREE_DEBUG: True
|
||||
INVENTREE_DB_ENGINE: postgresql
|
||||
INVENTREE_DB_NAME: inventree
|
||||
INVENTREE_DB_HOST: db
|
||||
INVENTREE_DB_USER: inventree_user
|
||||
INVENTREE_DB_PASSWORD: inventree_password
|
||||
INVENTREE_PLUGINS_ENABLED: True
|
||||
INVENTREE_PY_ENV: /home/inventree/dev/venv
|
||||
|
||||
depends_on:
|
||||
- db
|
||||
|
||||
volumes:
|
||||
inventreedatabase:
|
||||
@@ -1,21 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Avoiding Dubious Ownership in Dev Containers for setup commands that use git
|
||||
# Note that the local workspace directory is passed through as the first argument $1
|
||||
git config --global --add safe.directory $1
|
||||
|
||||
# create folders
|
||||
mkdir -p $1/dev/{commandhistory,plugins}
|
||||
cd $1
|
||||
git config --global --add safe.directory /home/inventree
|
||||
|
||||
# create venv
|
||||
python3 -m venv $1/dev/venv
|
||||
. $1/dev/venv/bin/activate
|
||||
python3 -m venv /home/inventree/dev/venv --system-site-packages --upgrade-deps
|
||||
. /home/inventree/dev/venv/bin/activate
|
||||
|
||||
# setup InvenTree server
|
||||
pip install invoke
|
||||
invoke update
|
||||
# Run initial InvenTree server setup
|
||||
invoke update -s
|
||||
|
||||
# Configure dev environment
|
||||
invoke setup-dev
|
||||
|
||||
# Install required frontend packages
|
||||
invoke frontend-install
|
||||
|
||||
# remove existing gitconfig created by "Avoiding Dubious Ownership" step
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"ignore": "D018,H006,H008,H020,H021,H023,H025,H030,H031,T002"
|
||||
}
|
||||
13
.github/actions/setup/action.yaml
vendored
13
.github/actions/setup/action.yaml
vendored
@@ -49,11 +49,14 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -m pip install -U pip
|
||||
pip3 install invoke wheel
|
||||
pip3 install invoke wheel uv
|
||||
- name: Set the VIRTUAL_ENV variable for uv to work
|
||||
run: echo "VIRTUAL_ENV=${Python_ROOT_DIR}" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
- name: Install Specific Python Dependencies
|
||||
if: ${{ inputs.pip-dependency }}
|
||||
shell: bash
|
||||
run: pip3 install ${{ inputs.pip-dependency }}
|
||||
run: uv pip install ${{ inputs.pip-dependency }}
|
||||
|
||||
# NPM installs
|
||||
- name: Install node.js ${{ env.node_version }}
|
||||
@@ -79,12 +82,12 @@ runs:
|
||||
- name: Install dev requirements
|
||||
if: ${{ inputs.dev-install == 'true' ||inputs.install == 'true' }}
|
||||
shell: bash
|
||||
run: pip install -r requirements-dev.txt
|
||||
run: uv pip install -r requirements-dev.txt
|
||||
- name: Run invoke install
|
||||
if: ${{ inputs.install == 'true' }}
|
||||
shell: bash
|
||||
run: invoke install
|
||||
run: invoke install --uv
|
||||
- name: Run invoke update
|
||||
if: ${{ inputs.update == 'true' }}
|
||||
shell: bash
|
||||
run: invoke update
|
||||
run: invoke update --uv
|
||||
|
||||
18
.github/workflows/check_translations.yaml
vendored
18
.github/workflows/check_translations.yaml
vendored
@@ -8,6 +8,9 @@ on:
|
||||
branches:
|
||||
- l10
|
||||
|
||||
env:
|
||||
python_version: 3.9
|
||||
|
||||
jobs:
|
||||
|
||||
check:
|
||||
@@ -21,22 +24,15 @@ jobs:
|
||||
INVENTREE_MEDIA_ROOT: ./media
|
||||
INVENTREE_STATIC_ROOT: ./static
|
||||
INVENTREE_BACKUP_DIR: ./backup
|
||||
python_version: 3.9
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||
- name: Set Up Python ${{ env.python_version }}
|
||||
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
python-version: ${{ env.python_version }}
|
||||
cache: 'pip'
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install gettext
|
||||
pip3 install invoke
|
||||
invoke install
|
||||
install: true
|
||||
apt-dependency: gettext
|
||||
- name: Test Translations
|
||||
run: invoke translate
|
||||
- name: Check Migration Files
|
||||
|
||||
32
.github/workflows/docker.yaml
vendored
32
.github/workflows/docker.yaml
vendored
@@ -25,7 +25,6 @@ on:
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
|
||||
paths-filter:
|
||||
name: Filter
|
||||
runs-on: ubuntu-latest
|
||||
@@ -46,11 +45,10 @@ jobs:
|
||||
- docker.dev.env
|
||||
- Dockerfile
|
||||
- requirements.txt
|
||||
|
||||
- tasks.py
|
||||
|
||||
# Build the docker image
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: paths-filter
|
||||
if: needs.paths-filter.outputs.docker == 'true' || github.event_name == 'release' || github.event_name == 'push'
|
||||
permissions:
|
||||
@@ -59,7 +57,9 @@ jobs:
|
||||
id-token: write
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
python_version: 3.9
|
||||
python_version: "3.11"
|
||||
runs-on: ubuntu-latest # in the future we can try to use alternative runners here
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||
@@ -74,10 +74,17 @@ jobs:
|
||||
python3 ci/version_check.py
|
||||
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
|
||||
- name: Test Docker Image
|
||||
id: test-docker
|
||||
run: |
|
||||
docker build . --target production --tag inventree-test
|
||||
docker run --rm inventree-test invoke --version
|
||||
docker run --rm inventree-test invoke --list
|
||||
docker run --rm inventree-test gunicorn --version
|
||||
docker run --rm inventree-test pg_dump --version
|
||||
- name: Build Docker Image
|
||||
# Build the development docker image (using docker-compose.yml)
|
||||
run: |
|
||||
docker-compose build --no-cache
|
||||
run: docker-compose build --no-cache
|
||||
- name: Update Docker Image
|
||||
run: |
|
||||
docker-compose run inventree-dev-server invoke update
|
||||
@@ -102,6 +109,9 @@ jobs:
|
||||
docker-compose run inventree-dev-server invoke test --disable-pty
|
||||
docker-compose run inventree-dev-server invoke test --migrations --disable-pty
|
||||
docker-compose down
|
||||
- name: Clean up test folder
|
||||
run: |
|
||||
rm -rf InvenTree/_testfolder
|
||||
- name: Set up QEMU
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # pin@v3.0.0
|
||||
@@ -135,8 +145,8 @@ jobs:
|
||||
inventree/inventree
|
||||
ghcr.io/inventree/inventree
|
||||
|
||||
- name: Build and Push
|
||||
id: build-and-push
|
||||
- name: Push Docker Images
|
||||
id: push-docker
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # pin@v5.0.0
|
||||
with:
|
||||
@@ -150,9 +160,3 @@ jobs:
|
||||
build-args: |
|
||||
commit_hash=${{ env.git_commit_hash }}
|
||||
commit_date=${{ env.git_commit_date }}
|
||||
|
||||
- name: Sign the published image
|
||||
if: ${{ false }} # github.event_name != 'pull_request'
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "true"
|
||||
run: cosign sign ${{ steps.meta.outputs.tags }}@${{ steps.build-and-push.outputs.digest }}
|
||||
|
||||
149
.github/workflows/qc_checks.yaml
vendored
149
.github/workflows/qc_checks.yaml
vendored
@@ -30,6 +30,7 @@ jobs:
|
||||
server: ${{ steps.filter.outputs.server }}
|
||||
migrations: ${{ steps.filter.outputs.migrations }}
|
||||
frontend: ${{ steps.filter.outputs.frontend }}
|
||||
api: ${{ steps.filter.outputs.api }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||
@@ -44,30 +45,16 @@ jobs:
|
||||
migrations:
|
||||
- '**/migrations/**'
|
||||
- '.github/workflows**'
|
||||
api:
|
||||
- 'InvenTree/InvenTree/api_version.py'
|
||||
frontend:
|
||||
- 'src/frontend/**'
|
||||
|
||||
pep_style:
|
||||
name: Style [Python]
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: paths-filter
|
||||
if: needs.paths-filter.outputs.server == 'true'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
dev-install: true
|
||||
- name: Run flake8
|
||||
run: flake8 InvenTree --extend-ignore=D
|
||||
|
||||
javascript:
|
||||
name: Style - Classic UI [JS]
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: [ 'pep_style', 'pre-commit' ]
|
||||
needs: [ 'pre-commit' ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||
@@ -121,11 +108,100 @@ jobs:
|
||||
- name: Check Config
|
||||
run: |
|
||||
pip install pyyaml
|
||||
pip install -r docs/requirements.txt
|
||||
python docs/ci/check_mkdocs_config.py
|
||||
- name: Check Links
|
||||
uses: gaurav-nelson/github-action-markdown-link-check@v1
|
||||
with:
|
||||
folder-path: docs
|
||||
config-file: docs/mlc_config.json
|
||||
check-modified-files-only: 'yes'
|
||||
use-quiet-mode: 'yes'
|
||||
|
||||
schema:
|
||||
name: Tests - API Schema Documentation
|
||||
runs-on: ubuntu-20.04
|
||||
needs: paths-filter
|
||||
if: needs.paths-filter.outputs.server == 'true'
|
||||
env:
|
||||
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
|
||||
INVENTREE_DB_NAME: ../inventree_unit_test_db.sqlite3
|
||||
INVENTREE_ADMIN_USER: testuser
|
||||
INVENTREE_ADMIN_PASSWORD: testpassword
|
||||
INVENTREE_ADMIN_EMAIL: test@test.com
|
||||
INVENTREE_PYTHON_TEST_SERVER: http://localhost:12345
|
||||
INVENTREE_PYTHON_TEST_USERNAME: testuser
|
||||
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
apt-dependency: gettext poppler-utils
|
||||
dev-install: true
|
||||
update: true
|
||||
- name: Export API Documentation
|
||||
run: invoke schema --ignore-warnings --filename InvenTree/schema.yml
|
||||
- name: Upload schema
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # pin@v3.1.3
|
||||
with:
|
||||
name: schema.yml
|
||||
path: InvenTree/schema.yml
|
||||
- name: Download public schema
|
||||
if: needs.paths-filter.outputs.api == 'false'
|
||||
run: |
|
||||
pip install linkcheckmd requests
|
||||
python -m linkcheckmd docs --recurse
|
||||
pip install requests >/dev/null 2>&1
|
||||
version="$(python3 ci/version_check.py only_version 2>&1)"
|
||||
echo "Version: $version"
|
||||
url="https://raw.githubusercontent.com/inventree/schema/main/export/${version}/api.yaml"
|
||||
echo "URL: $url"
|
||||
curl -s -o api.yaml $url
|
||||
echo "Downloaded api.yaml"
|
||||
- name: Check for differences in API Schema
|
||||
if: needs.paths-filter.outputs.api == 'false'
|
||||
run: |
|
||||
diff --color -u InvenTree/schema.yml api.yaml
|
||||
diff -u InvenTree/schema.yml api.yaml && echo "no difference in API schema " || exit 2
|
||||
- name: Check schema - including warnings
|
||||
run: invoke schema
|
||||
continue-on-error: true
|
||||
- name: Extract version for publishing
|
||||
id: version
|
||||
if: github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true'
|
||||
run: |
|
||||
pip install requests >/dev/null 2>&1
|
||||
version="$(python3 ci/version_check.py only_version 2>&1)"
|
||||
echo "Version: $version"
|
||||
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||
|
||||
schema-push:
|
||||
name: Push new schema
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [paths-filter, schema]
|
||||
if: needs.schema.result == 'success' && github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true'
|
||||
env:
|
||||
version: ${{ needs.schema.outputs.version }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: inventree/schema
|
||||
token: ${{ secrets.SCHEMA_PAT }}
|
||||
- name: Download schema artifact
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: schema.yml
|
||||
- name: Move schema to correct location
|
||||
run: |
|
||||
echo "Version: $version"
|
||||
mkdir export/${version}
|
||||
mv schema.yml export/${version}/api.yaml
|
||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "Update API schema for ${version}"
|
||||
|
||||
python:
|
||||
name: Tests - inventree-python
|
||||
@@ -167,27 +243,11 @@ jobs:
|
||||
invoke check-server
|
||||
coverage run -m unittest discover -s test/
|
||||
|
||||
docstyle:
|
||||
name: Style [Python Docstrings]
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: pre-commit
|
||||
continue-on-error: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
install: true
|
||||
- name: Run flake8
|
||||
run: flake8 InvenTree --statistics
|
||||
|
||||
coverage:
|
||||
name: Tests - DB [SQLite] + Coverage
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: [ 'pep_style', 'pre-commit' ]
|
||||
needs: [ 'pre-commit' ]
|
||||
continue-on-error: true # continue if a step fails so that coverage gets pushed
|
||||
|
||||
env:
|
||||
@@ -220,7 +280,7 @@ jobs:
|
||||
postgres:
|
||||
name: Tests - DB [PostgreSQL]
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [ 'pep_style', 'pre-commit' ]
|
||||
needs: [ 'pre-commit' ]
|
||||
|
||||
env:
|
||||
INVENTREE_DB_ENGINE: django.db.backends.postgresql
|
||||
@@ -252,7 +312,7 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
apt-dependency: gettext poppler-utils libpq-dev
|
||||
pip-dependency: psycopg2 django-redis>=5.0.0
|
||||
pip-dependency: psycopg django-redis>=5.0.0
|
||||
dev-install: true
|
||||
update: true
|
||||
- name: Run Tests
|
||||
@@ -264,7 +324,7 @@ jobs:
|
||||
name: Tests - DB [MySQL]
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: [ 'pep_style', 'pre-commit' ]
|
||||
needs: [ 'pre-commit' ]
|
||||
|
||||
env:
|
||||
# Database backend configuration
|
||||
@@ -334,7 +394,7 @@ jobs:
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
apt-dependency: gettext poppler-utils libpq-dev
|
||||
pip-dependency: psycopg2
|
||||
pip-dependency: psycopg
|
||||
dev-install: true
|
||||
update: true
|
||||
- name: Run Tests
|
||||
@@ -389,6 +449,13 @@ jobs:
|
||||
chmod +rw /home/runner/work/InvenTree/db.sqlite3
|
||||
invoke migrate
|
||||
|
||||
- name: 0.13.5 Database
|
||||
run: |
|
||||
rm /home/runner/work/InvenTree/db.sqlite3
|
||||
cp test-db/stable_0.13.5.sqlite3 /home/runner/work/InvenTree/db.sqlite3
|
||||
chmod +rw /home/runner/work/InvenTree/db.sqlite3
|
||||
invoke migrate
|
||||
|
||||
platform_ui:
|
||||
name: Tests - Platform UI
|
||||
runs-on: ubuntu-20.04
|
||||
@@ -438,7 +505,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: cd src/frontend && yarn install
|
||||
- name: Build frontend
|
||||
run: cd src/frontend && npm run build
|
||||
run: cd src/frontend && npm run compile && npm run build
|
||||
- name: Zip frontend
|
||||
run: |
|
||||
cd InvenTree/web/static
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: cd src/frontend && yarn install
|
||||
- name: Build frontend
|
||||
run: cd src/frontend && npm run build
|
||||
run: cd src/frontend && npm run compile && npm run build
|
||||
- name: Zip frontend
|
||||
run: |
|
||||
cd InvenTree/web/static/web
|
||||
|
||||
35
.github/workflows/sponsors.yml
vendored
35
.github/workflows/sponsors.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Generate Sponsors README
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: 30 15 * * 0-6
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'Inventree'
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11
|
||||
|
||||
- name: Generate Sponsors
|
||||
uses: JamesIves/github-sponsors-readme-action@a2d75a8d58b117b19777a910e284ccb082aaf117
|
||||
with:
|
||||
token: ${{ secrets.INVENTREE_SPONSORS_TOKEN }}
|
||||
file: 'README.md'
|
||||
organization: true
|
||||
|
||||
- name: Commit files
|
||||
run: |
|
||||
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git checkout -b sponsors
|
||||
git add README.md
|
||||
git commit -m "updated sponsors"
|
||||
|
||||
- name: Push Changes
|
||||
uses: ad-m/github-push-action@d91a481090679876dfc4178fef17f286781251df
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: sponsors
|
||||
force: true
|
||||
22
.github/workflows/translations.yml
vendored
22
.github/workflows/translations.yml
vendored
@@ -5,6 +5,10 @@ on:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
python_version: 3.9
|
||||
node_version: 16
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
@@ -22,20 +26,12 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Set up Node 16
|
||||
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # pin to v3.8.2
|
||||
with:
|
||||
node-version: 16
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gettext
|
||||
pip3 install invoke
|
||||
invoke install
|
||||
install: true
|
||||
npm: true
|
||||
apt-dependency: gettext
|
||||
- name: Make Translations
|
||||
run: invoke translate
|
||||
- name: Commit files
|
||||
|
||||
19
.gitignore
vendored
19
.gitignore
vendored
@@ -26,6 +26,7 @@ var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
*.DS_Store
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
@@ -38,15 +39,6 @@ local_settings.py
|
||||
# Files used for testing
|
||||
inventree-demo-dataset/
|
||||
inventree-data/
|
||||
dummy_image.*
|
||||
_tmp.csv
|
||||
InvenTree/label.pdf
|
||||
InvenTree/label.png
|
||||
InvenTree/part_image_123abc.png
|
||||
label.pdf
|
||||
label.png
|
||||
InvenTree/my_special*
|
||||
_tests*.txt
|
||||
|
||||
# Local static and media file storage (only when running in development mode)
|
||||
inventree_media
|
||||
@@ -69,6 +61,7 @@ secret_key.txt
|
||||
.idea/
|
||||
*.code-workspace
|
||||
.bash_history
|
||||
.DS_Store
|
||||
|
||||
# https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||
.vscode/*
|
||||
@@ -104,6 +97,14 @@ InvenTree/plugins/
|
||||
|
||||
# Compiled translation files
|
||||
*.mo
|
||||
messages.ts
|
||||
|
||||
# Generated API schema file
|
||||
api.yaml
|
||||
|
||||
# web frontend (static files)
|
||||
InvenTree/web/static
|
||||
|
||||
# Generated docs files
|
||||
docs/docs/api/*.yml
|
||||
docs/docs/api/schema/*.yml
|
||||
|
||||
@@ -19,9 +19,9 @@ before:
|
||||
- contrib/packager.io/before.sh
|
||||
dependencies:
|
||||
- curl
|
||||
- python3.9
|
||||
- python3.9-venv
|
||||
- python3.9-dev
|
||||
- "python3.9 | python3.10 | python3.11"
|
||||
- "python3.9-venv | python3.10-venv | python3.11-venv"
|
||||
- "python3.9-dev | python3.10-dev | python3.11-dev"
|
||||
- python3-pip
|
||||
- python3-cffi
|
||||
- python3-brotli
|
||||
@@ -36,4 +36,3 @@ dependencies:
|
||||
targets:
|
||||
ubuntu-20.04: true
|
||||
debian-11: true
|
||||
debian-12: true
|
||||
|
||||
@@ -4,7 +4,8 @@ exclude: |
|
||||
(?x)^(
|
||||
InvenTree/InvenTree/static/.*|
|
||||
InvenTree/locale/.*|
|
||||
src/frontend/src/locales/.*
|
||||
src/frontend/src/locales/.*|
|
||||
.*/migrations/.*
|
||||
)$
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
@@ -14,36 +15,29 @@ repos:
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: mixed-line-ending
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: '6.1.0'
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.2.2
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [
|
||||
'flake8-bugbear',
|
||||
'flake8-comprehensions',
|
||||
'flake8-docstrings',
|
||||
'flake8-string-format',
|
||||
'flake8-tidy-imports',
|
||||
'pep8-naming',
|
||||
'flake8-logging'
|
||||
]
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: '5.12.0'
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/jazzband/pip-tools
|
||||
rev: 7.3.0
|
||||
- id: ruff-format
|
||||
args: [--preview]
|
||||
- id: ruff
|
||||
args: [
|
||||
--fix,
|
||||
--preview
|
||||
]
|
||||
- repo: https://github.com/matmair/ruff-pre-commit
|
||||
rev: 830893bf46db844d9c99b6c468e285199adf2de6 # uv-018
|
||||
hooks:
|
||||
- id: pip-compile
|
||||
name: pip-compile requirements-dev.in
|
||||
args: [requirements-dev.in, -o, requirements-dev.txt]
|
||||
args: [requirements-dev.in, -o, requirements-dev.txt, --python-version=3.9]
|
||||
files: ^requirements-dev\.(in|txt)$
|
||||
- id: pip-compile
|
||||
name: pip-compile requirements.txt
|
||||
args: [requirements.in, -o, requirements.txt]
|
||||
args: [requirements.in, -o, requirements.txt,--python-version=3.9]
|
||||
files: ^requirements\.(in|txt)$
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.34.0
|
||||
rev: v1.34.1
|
||||
hooks:
|
||||
- id: djlint-django
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
@@ -58,7 +52,7 @@ repos:
|
||||
src/frontend/src/locales/.* |
|
||||
)$
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: "v3.0.3"
|
||||
rev: "v4.0.0-alpha.8"
|
||||
hooks:
|
||||
- id: prettier
|
||||
files: ^src/frontend/.*\.(js|jsx|ts|tsx)$
|
||||
@@ -66,7 +60,7 @@ repos:
|
||||
- "prettier@^2.4.1"
|
||||
- "@trivago/prettier-plugin-sort-imports"
|
||||
- repo: https://github.com/pre-commit/mirrors-eslint
|
||||
rev: "v8.51.0"
|
||||
rev: "v9.0.0-beta.0"
|
||||
hooks:
|
||||
- id: eslint
|
||||
additional_dependencies:
|
||||
|
||||
9
.vscode/launch.json
vendored
9
.vscode/launch.json
vendored
@@ -14,13 +14,20 @@
|
||||
"justMyCode": true
|
||||
},
|
||||
{
|
||||
"name": "Python: Django - 3rd party",
|
||||
"name": "InvenTree Server - 3rd party",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/InvenTree/manage.py",
|
||||
"args": ["runserver"],
|
||||
"django": true,
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "InvenTree Frontend - Vite",
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"url": "http://localhost:5173",
|
||||
"webRoot": "${workspaceFolder}/src/frontend"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
2
.vscode/tasks.json
vendored
2
.vscode/tasks.json
vendored
@@ -45,7 +45,7 @@
|
||||
{
|
||||
"label": "setup-test",
|
||||
"type": "shell",
|
||||
"command": "inv setup-test --path dev/inventree-demo-dataset",
|
||||
"command": "inv setup-test -i --path dev/inventree-demo-dataset",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
|
||||
261
CONTRIBUTING.md
261
CONTRIBUTING.md
@@ -1,259 +1,6 @@
|
||||
### Contributing to InvenTree
|
||||
|
||||
Hi there, thank you for your interest in contributing!
|
||||
Please read the contribution guidelines below, before submitting your first pull request to the InvenTree codebase.
|
||||
Please read our contribution guidelines, before submitting your first pull request to the InvenTree codebase.
|
||||
|
||||
## Quickstart
|
||||
|
||||
The following commands will get you quickly configure and run a development server, complete with a demo dataset to work with:
|
||||
|
||||
### Bare Metal
|
||||
|
||||
```bash
|
||||
git clone https://github.com/inventree/InvenTree.git && cd InvenTree
|
||||
python3 -m venv env && source env/bin/activate
|
||||
pip install invoke && invoke
|
||||
pip install invoke && invoke setup-dev --tests
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
```bash
|
||||
git clone https://github.com/inventree/InvenTree.git && cd InvenTree
|
||||
docker compose run inventree-dev-server invoke install
|
||||
docker compose run inventree-dev-server invoke setup-test --dev
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
Read the [InvenTree setup documentation](https://docs.inventree.org/en/latest/start/intro/) for a complete installation reference guide.
|
||||
|
||||
### Setup Devtools
|
||||
|
||||
Run the following command to set up all toolsets for development.
|
||||
|
||||
```bash
|
||||
invoke setup-dev
|
||||
```
|
||||
|
||||
*We recommend you run this command before starting to contribute. This will install and set up `pre-commit` to run some checks before each commit and help reduce errors.*
|
||||
|
||||
## Branches and Versioning
|
||||
|
||||
InvenTree roughly follow the [GitLab flow](https://docs.gitlab.com/ee/topics/gitlab_flow.html) branching style, to allow simple management of multiple tagged releases, short-lived branches, and development on the main branch.
|
||||
|
||||
### Version Numbering
|
||||
|
||||
InvenTree version numbering follows the [semantic versioning](https://semver.org/) specification.
|
||||
|
||||
### Master Branch
|
||||
|
||||
The HEAD of the "main" or "master" branch of InvenTree represents the current "latest" state of code development.
|
||||
|
||||
- All feature branches are merged into master
|
||||
- All bug fixes are merged into master
|
||||
|
||||
**No pushing to master:** New features must be submitted as a pull request from a separate branch (one branch per feature).
|
||||
|
||||
### Feature Branches
|
||||
|
||||
Feature branches should be branched *from* the *master* branch.
|
||||
|
||||
- One major feature per branch / pull request
|
||||
- Feature pull requests are merged back *into* the master branch
|
||||
- Features *may* also be merged into a release candidate branch
|
||||
|
||||
### Stable Branch
|
||||
|
||||
The HEAD of the "stable" branch represents the latest stable release code.
|
||||
|
||||
- Versioned releases are merged into the "stable" branch
|
||||
- Bug fix branches are made *from* the "stable" branch
|
||||
|
||||
#### Release Candidate Branches
|
||||
|
||||
- Release candidate branches are made from master, and merged into stable.
|
||||
- RC branches are targeted at a major/minor version e.g. "0.5"
|
||||
- When a release candidate branch is merged into *stable*, the release is tagged
|
||||
|
||||
#### Bugfix Branches
|
||||
|
||||
- If a bug is discovered in a tagged release version of InvenTree, a "bugfix" or "hotfix" branch should be made *from* that tagged release
|
||||
- When approved, the branch is merged back *into* stable, with an incremented PATCH number (e.g. 0.4.1 -> 0.4.2)
|
||||
- The bugfix *must* also be cherry picked into the *master* branch.
|
||||
|
||||
## Environment
|
||||
### Target version
|
||||
We are currently targeting:
|
||||
| Name | Minimum version | Note |
|
||||
|---|---| --- |
|
||||
| Python | 3.9 | |
|
||||
| Django | 3.2 | |
|
||||
| Node | 18 | Only needed for frontend development |
|
||||
|
||||
### Auto creating updates
|
||||
The following tools can be used to auto-upgrade syntax that was depreciated in new versions:
|
||||
```bash
|
||||
pip install pyupgrade
|
||||
pip install django-upgrade
|
||||
```
|
||||
|
||||
To update the codebase run the following script.
|
||||
```bash
|
||||
pyupgrade `find . -name "*.py"`
|
||||
django-upgrade --target-version 3.2 `find . -name "*.py"`
|
||||
```
|
||||
|
||||
## Credits
|
||||
If you add any new dependencies / libraries, they need to be added to [the docs](https://github.com/inventree/inventree/blob/master/docs/docs/credits.md). Please try to do that as timely as possible.
|
||||
|
||||
|
||||
## Migration Files
|
||||
|
||||
Any required migration files **must** be included in the commit, or the pull-request will be rejected. If you change the underlying database schema, make sure you run `invoke migrate` and commit the migration files before submitting the PR.
|
||||
|
||||
*Note: A github action checks for unstaged migration files and will reject the PR if it finds any!*
|
||||
|
||||
## Unit Testing
|
||||
|
||||
Any new code should be covered by unit tests - a submitted PR may not be accepted if the code coverage for any new features is insufficient, or the overall code coverage is decreased.
|
||||
|
||||
The InvenTree code base makes use of [GitHub actions](https://github.com/features/actions) to run a suite of automated tests against the code base every time a new pull request is received. These actions include (but are not limited to):
|
||||
|
||||
- Checking Python and Javascript code against standard style guides
|
||||
- Running unit test suite
|
||||
- Automated building and pushing of docker images
|
||||
- Generating translation files
|
||||
|
||||
The various github actions can be found in the `./github/workflows` directory
|
||||
|
||||
### Run tests locally
|
||||
|
||||
To run test locally, use:
|
||||
```
|
||||
invoke test
|
||||
```
|
||||
|
||||
To run only partial tests, for example for a module use:
|
||||
```
|
||||
invoke test --runtest order
|
||||
```
|
||||
|
||||
To see all the available options:
|
||||
|
||||
```
|
||||
invoke test --help
|
||||
```
|
||||
|
||||
## Code Style
|
||||
|
||||
Code style is automatically checked as part of the project's CI pipeline on GitHub. This means that any pull requests which do not conform to the style guidelines will fail CI checks.
|
||||
|
||||
### Backend Code
|
||||
|
||||
Backend code (Python) is checked against the [PEP style guidelines](https://peps.python.org/pep-0008/). Please write docstrings for each function and class - we follow the [google doc-style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings) for python.
|
||||
|
||||
### Frontend Code
|
||||
|
||||
Frontend code (Javascript) is checked using [eslint](https://eslint.org/). While docstrings are not enforced for front-end code, good code documentation is encouraged!
|
||||
|
||||
### Running Checks Locally
|
||||
|
||||
If you have followed the setup devtools procedure, then code style checking is performend automatically whenever you commit changes to the code.
|
||||
|
||||
### Django templates
|
||||
|
||||
Django are checked by [djlint](https://github.com/Riverside-Healthcare/djlint) through pre-commit.
|
||||
|
||||
The following rules out of the [default set](https://djlint.com/docs/linter/) are not applied:
|
||||
```bash
|
||||
D018: (Django) Internal links should use the { % url ... % } pattern
|
||||
H006: Img tag should have height and width attributes
|
||||
H008: Attributes should be double quoted
|
||||
H021: Inline styles should be avoided
|
||||
H023: Do not use entity references
|
||||
H025: Tag seems to be an orphan
|
||||
H030: Consider adding a meta description
|
||||
H031: Consider adding meta keywords
|
||||
T002: Double quotes should be used in tags
|
||||
```
|
||||
|
||||
|
||||
## Documentation
|
||||
|
||||
New features or updates to existing features should be accompanied by user documentation.
|
||||
|
||||
## Translations
|
||||
|
||||
Any user-facing strings *must* be passed through the translation engine.
|
||||
|
||||
- InvenTree code is written in English
|
||||
- User translatable strings are provided in English as the primary language
|
||||
- Secondary language translations are provided [via Crowdin](https://crowdin.com/project/inventree)
|
||||
|
||||
*Note: Translation files are updated via GitHub actions - you do not need to compile translations files before submitting a pull request!*
|
||||
|
||||
### Python Code
|
||||
|
||||
For strings exposed via Python code, use the following format:
|
||||
|
||||
```python
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
user_facing_string = _('This string will be exposed to the translation engine!')
|
||||
```
|
||||
|
||||
### Templated Strings
|
||||
|
||||
HTML and javascript files are passed through the django templating engine. Translatable strings are implemented as follows:
|
||||
|
||||
```html
|
||||
{ % load i18n % }
|
||||
|
||||
<span>{ % trans "This string will be translated" % } - this string will not!</span>
|
||||
```
|
||||
|
||||
## Github use
|
||||
|
||||
### Tags
|
||||
|
||||
The tags describe issues and PRs in multiple areas:
|
||||
|
||||
| Area | Name | Description |
|
||||
| --- | --- | --- |
|
||||
| Triage Labels | | |
|
||||
| | triage:not-checked | Item was not checked by the core team |
|
||||
| | triage:not-approved | Item is not green-light by maintainer |
|
||||
| Type Labels | | |
|
||||
| | breaking | Indicates a major update or change which breaks compatibility |
|
||||
| | bug | Identifies a bug which needs to be addressed |
|
||||
| | dependency | Relates to a project dependency |
|
||||
| | duplicate | Duplicate of another issue or PR |
|
||||
| | enhancement | This is an suggested enhancement, extending the functionality of an existing feature |
|
||||
| | experimental | This is a new *experimental* feature which needs to be enabled manually |
|
||||
| | feature | This is a new feature, introducing novel functionality |
|
||||
| | help wanted | Assistance required |
|
||||
| | invalid | This issue or PR is considered invalid |
|
||||
| | inactive | Indicates lack of activity |
|
||||
| | migration | Database migration, requires special attention |
|
||||
| | question | This is a question |
|
||||
| | roadmap | This is a roadmap feature with no immediate plans for implementation |
|
||||
| | security | Relates to a security issue |
|
||||
| | starter | Good issue for a developer new to the project |
|
||||
| | wontfix | No work will be done against this issue or PR |
|
||||
| Feature Labels | | |
|
||||
| | API | Relates to the API |
|
||||
| | barcode | Barcode scanning and integration |
|
||||
| | build | Build orders |
|
||||
| | importer | Data importing and processing |
|
||||
| | order | Purchase order and sales orders |
|
||||
| | part | Parts |
|
||||
| | plugin | Plugin ecosystem |
|
||||
| | pricing | Pricing functionality |
|
||||
| | report | Report generation |
|
||||
| | stock | Stock item management |
|
||||
| | user interface | User interface |
|
||||
| Ecosystem Labels | | |
|
||||
| | backport | Tags that the issue will be backported to a stable branch as a bug-fix |
|
||||
| | demo | Relates to the InvenTree demo server or dataset |
|
||||
| | docker | Docker / docker-compose |
|
||||
| | CI | CI / unit testing ecosystem |
|
||||
| | refactor | Refactoring existing code |
|
||||
| | setup | Relates to the InvenTree setup / installation process |
|
||||
Refer to our [contribution guidelines](https://docs.inventree.org/en/latest/develop/contributing/) for more information!
|
||||
|
||||
33
Dockerfile
33
Dockerfile
@@ -9,25 +9,26 @@
|
||||
# - Runs InvenTree web server under django development server
|
||||
# - Monitors source files for any changes, and live-reloads server
|
||||
|
||||
ARG base_image=python:3.10-alpine3.18
|
||||
ARG base_image=python:3.11-alpine3.18
|
||||
FROM ${base_image} as inventree_base
|
||||
|
||||
# Build arguments for this image
|
||||
ARG commit_tag=""
|
||||
ARG commit_hash=""
|
||||
ARG commit_date=""
|
||||
ARG commit_tag=""
|
||||
|
||||
ARG data_dir="data"
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PIP_DISABLE_PIP_VERSION_CHECK 1
|
||||
ENV INVOKE_RUN_SHELL="/bin/ash"
|
||||
|
||||
ENV INVENTREE_LOG_LEVEL="WARNING"
|
||||
ENV INVENTREE_DOCKER="true"
|
||||
|
||||
# InvenTree paths
|
||||
ENV INVENTREE_HOME="/home/inventree"
|
||||
ENV INVENTREE_MNG_DIR="${INVENTREE_HOME}/InvenTree"
|
||||
ENV INVENTREE_DATA_DIR="${INVENTREE_HOME}/data"
|
||||
ENV INVENTREE_DATA_DIR="${INVENTREE_HOME}/${data_dir}"
|
||||
ENV INVENTREE_STATIC_ROOT="${INVENTREE_DATA_DIR}/static"
|
||||
ENV INVENTREE_MEDIA_ROOT="${INVENTREE_DATA_DIR}/media"
|
||||
ENV INVENTREE_BACKUP_DIR="${INVENTREE_DATA_DIR}/backup"
|
||||
@@ -61,12 +62,11 @@ RUN apk add --no-cache \
|
||||
libjpeg libwebp zlib \
|
||||
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#alpine-3-12
|
||||
py3-pip py3-pillow py3-cffi py3-brotli pango poppler-utils openldap \
|
||||
# SQLite support
|
||||
sqlite \
|
||||
# PostgreSQL support
|
||||
postgresql-libs postgresql-client \
|
||||
# MySQL / MariaDB support
|
||||
mariadb-connector-c-dev mariadb-client && \
|
||||
# Postgres client
|
||||
postgresql13-client \
|
||||
# MySQL / MariaDB client
|
||||
mariadb-client mariadb-connector-c \
|
||||
&& \
|
||||
# fonts
|
||||
apk --update --upgrade --no-cache add fontconfig ttf-freefont font-noto terminus-font && fc-cache -f
|
||||
|
||||
@@ -90,13 +90,13 @@ RUN if [ `apk --print-arch` = "armv7" ]; then \
|
||||
COPY tasks.py docker/gunicorn.conf.py docker/init.sh ./
|
||||
RUN chmod +x init.sh
|
||||
|
||||
ENTRYPOINT ["/bin/sh", "./init.sh"]
|
||||
ENTRYPOINT ["/bin/ash", "./init.sh"]
|
||||
|
||||
FROM inventree_base as prebuild
|
||||
|
||||
ENV PATH=/root/.local/bin:$PATH
|
||||
RUN ./install_build_packages.sh --no-cache --virtual .build-deps && \
|
||||
pip install --user -r base_requirements.txt -r requirements.txt --no-cache-dir && \
|
||||
pip install --user -r base_requirements.txt -r requirements.txt --no-cache && \
|
||||
apk --purge del .build-deps
|
||||
|
||||
# Frontend builder image:
|
||||
@@ -141,7 +141,7 @@ EXPOSE 5173
|
||||
# Install packages required for building python packages
|
||||
RUN ./install_build_packages.sh
|
||||
|
||||
RUN pip install -r base_requirements.txt --no-cache-dir
|
||||
RUN pip install uv --no-cache-dir && pip install -r base_requirements.txt --no-cache
|
||||
|
||||
# Install nodejs / npm / yarn
|
||||
|
||||
@@ -164,10 +164,3 @@ ENTRYPOINT ["/bin/ash", "./docker/init.sh"]
|
||||
|
||||
# Launch the development server
|
||||
CMD ["invoke", "server", "-a", "${INVENTREE_WEB_ADDR}:${INVENTREE_WEB_PORT}"]
|
||||
|
||||
# Image target for devcontainer
|
||||
FROM dev as devcontainer
|
||||
|
||||
ARG workspace="/workspaces/InvenTree"
|
||||
|
||||
WORKDIR ${WORKSPACE}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Admin classes"""
|
||||
"""Admin classes."""
|
||||
|
||||
from django.contrib import admin
|
||||
from django.db.models.fields import CharField
|
||||
from django.http.request import HttpRequest
|
||||
|
||||
from djmoney.contrib.exchange.admin import RateAdmin
|
||||
@@ -10,7 +11,7 @@ from import_export.resources import ModelResource
|
||||
|
||||
|
||||
class InvenTreeResource(ModelResource):
|
||||
"""Custom subclass of the ModelResource class provided by django-import-export"
|
||||
"""Custom subclass of the ModelResource class provided by django-import-export".
|
||||
|
||||
Ensures that exported data are escaped to prevent malicious formula injection.
|
||||
Ref: https://owasp.org/www-community/attacks/CSV_Injection
|
||||
@@ -19,6 +20,9 @@ class InvenTreeResource(ModelResource):
|
||||
MAX_IMPORT_ROWS = 1000
|
||||
MAX_IMPORT_COLS = 100
|
||||
|
||||
# List of fields which should be converted to empty strings if they are null
|
||||
CONVERT_NULL_FIELDS = []
|
||||
|
||||
def import_data_inner(
|
||||
self,
|
||||
dataset,
|
||||
@@ -27,14 +31,18 @@ class InvenTreeResource(ModelResource):
|
||||
using_transactions,
|
||||
collect_failed_rows,
|
||||
rollback_on_validation_errors=None,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
"""Override the default import_data_inner function to provide better error handling"""
|
||||
"""Override the default import_data_inner function to provide better error handling."""
|
||||
if len(dataset) > self.MAX_IMPORT_ROWS:
|
||||
raise ImportExportError(f"Dataset contains too many rows (max {self.MAX_IMPORT_ROWS})")
|
||||
raise ImportExportError(
|
||||
f'Dataset contains too many rows (max {self.MAX_IMPORT_ROWS})'
|
||||
)
|
||||
|
||||
if len(dataset.headers) > self.MAX_IMPORT_COLS:
|
||||
raise ImportExportError(f"Dataset contains too many columns (max {self.MAX_IMPORT_COLS})")
|
||||
raise ImportExportError(
|
||||
f'Dataset contains too many columns (max {self.MAX_IMPORT_COLS})'
|
||||
)
|
||||
|
||||
return super().import_data_inner(
|
||||
dataset,
|
||||
@@ -43,7 +51,7 @@ class InvenTreeResource(ModelResource):
|
||||
using_transactions,
|
||||
collect_failed_rows,
|
||||
rollback_on_validation_errors=rollback_on_validation_errors,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def export_resource(self, obj):
|
||||
@@ -69,22 +77,61 @@ class InvenTreeResource(ModelResource):
|
||||
return row
|
||||
|
||||
def get_fields(self, **kwargs):
|
||||
"""Return fields, with some common exclusions"""
|
||||
"""Return fields, with some common exclusions."""
|
||||
fields = super().get_fields(**kwargs)
|
||||
|
||||
fields_to_exclude = [
|
||||
'metadata',
|
||||
'lft', 'rght', 'tree_id', 'level',
|
||||
]
|
||||
fields_to_exclude = ['metadata', 'lft', 'rght', 'tree_id', 'level']
|
||||
|
||||
return [f for f in fields if f.column_name not in fields_to_exclude]
|
||||
|
||||
def before_import(self, dataset, using_transactions, dry_run, **kwargs):
|
||||
"""Run custom code before importing data.
|
||||
|
||||
- Determine the list of fields which need to be converted to empty strings
|
||||
"""
|
||||
# Construct a map of field names
|
||||
db_fields = {field.name: field for field in self.Meta.model._meta.fields}
|
||||
|
||||
for field_name, field in self.fields.items():
|
||||
# Skip read-only fields (they cannot be imported)
|
||||
if field.readonly:
|
||||
continue
|
||||
|
||||
# Determine the name of the associated column in the dataset
|
||||
column = getattr(field, 'column_name', field_name)
|
||||
|
||||
# Determine the attribute name of the associated database field
|
||||
attribute = getattr(field, 'attribute', field_name)
|
||||
|
||||
# Check if the associated database field is a non-nullable string
|
||||
if db_field := db_fields.get(attribute):
|
||||
if (
|
||||
isinstance(db_field, CharField)
|
||||
and db_field.blank
|
||||
and not db_field.null
|
||||
):
|
||||
if column not in self.CONVERT_NULL_FIELDS:
|
||||
self.CONVERT_NULL_FIELDS.append(column)
|
||||
|
||||
return super().before_import(dataset, using_transactions, dry_run, **kwargs)
|
||||
|
||||
def before_import_row(self, row, row_number=None, **kwargs):
|
||||
"""Run custom code before importing each row.
|
||||
|
||||
- Convert any null fields to empty strings, for fields which do not support null values
|
||||
"""
|
||||
for field in self.CONVERT_NULL_FIELDS:
|
||||
if field in row and row[field] is None:
|
||||
row[field] = ''
|
||||
|
||||
return super().before_import_row(row, row_number, **kwargs)
|
||||
|
||||
|
||||
class CustomRateAdmin(RateAdmin):
|
||||
"""Admin interface for the Rate class"""
|
||||
"""Admin interface for the Rate class."""
|
||||
|
||||
def has_add_permission(self, request: HttpRequest) -> bool:
|
||||
"""Disable the 'add' permission for Rate objects"""
|
||||
"""Disable the 'add' permission for Rate objects."""
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Main JSON interface views."""
|
||||
|
||||
import sys
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.http import JsonResponse
|
||||
@@ -8,6 +10,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from django_q.models import OrmQ
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework import permissions, serializers
|
||||
from rest_framework.generics import GenericAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ValidationError
|
||||
from rest_framework.views import APIView
|
||||
@@ -17,7 +20,8 @@ import users.models
|
||||
from InvenTree.filters import SEARCH_ORDER_FILTER
|
||||
from InvenTree.mixins import ListCreateAPI
|
||||
from InvenTree.permissions import RolePermission
|
||||
from part.templatetags.inventree_extras import plugins_info
|
||||
from InvenTree.templatetags.inventree_extras import plugins_info
|
||||
from part.models import Part
|
||||
from plugin.serializers import MetadataSerializer
|
||||
from users.models import ApiToken
|
||||
|
||||
@@ -28,13 +32,41 @@ from .version import inventreeApiText
|
||||
from .views import AjaxView
|
||||
|
||||
|
||||
class VersionViewSerializer(serializers.Serializer):
|
||||
"""Serializer for a single version."""
|
||||
|
||||
class VersionSerializer(serializers.Serializer):
|
||||
"""Serializer for server version."""
|
||||
|
||||
server = serializers.CharField()
|
||||
api = serializers.IntegerField()
|
||||
commit_hash = serializers.CharField()
|
||||
commit_date = serializers.CharField()
|
||||
commit_branch = serializers.CharField()
|
||||
python = serializers.CharField()
|
||||
django = serializers.CharField()
|
||||
|
||||
class LinkSerializer(serializers.Serializer):
|
||||
"""Serializer for all possible links."""
|
||||
|
||||
doc = serializers.URLField()
|
||||
code = serializers.URLField()
|
||||
credit = serializers.URLField()
|
||||
app = serializers.URLField()
|
||||
bug = serializers.URLField()
|
||||
|
||||
dev = serializers.BooleanField()
|
||||
up_to_date = serializers.BooleanField()
|
||||
version = VersionSerializer()
|
||||
links = LinkSerializer()
|
||||
|
||||
|
||||
class VersionView(APIView):
|
||||
"""Simple JSON endpoint for InvenTree version information."""
|
||||
|
||||
permission_classes = [
|
||||
permissions.IsAdminUser,
|
||||
]
|
||||
permission_classes = [permissions.IsAdminUser]
|
||||
|
||||
@extend_schema(responses={200: OpenApiResponse(response=VersionViewSerializer)})
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Return information about the InvenTree server."""
|
||||
return JsonResponse({
|
||||
@@ -47,20 +79,21 @@ class VersionView(APIView):
|
||||
'commit_date': InvenTree.version.inventreeCommitDate(),
|
||||
'commit_branch': InvenTree.version.inventreeBranch(),
|
||||
'python': InvenTree.version.inventreePythonVersion(),
|
||||
'django': InvenTree.version.inventreeDjangoVersion()
|
||||
'django': InvenTree.version.inventreeDjangoVersion(),
|
||||
},
|
||||
'links': {
|
||||
'doc': InvenTree.version.inventreeDocUrl(),
|
||||
'code': InvenTree.version.inventreeGithubUrl(),
|
||||
'credit': InvenTree.version.inventreeCreditsUrl(),
|
||||
'app': InvenTree.version.inventreeAppUrl(),
|
||||
'bug': f'{InvenTree.version.inventreeGithubUrl()}/issues'
|
||||
}
|
||||
'bug': f'{InvenTree.version.inventreeGithubUrl()}issues',
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
class VersionSerializer(serializers.Serializer):
|
||||
"""Serializer for a single version."""
|
||||
|
||||
version = serializers.CharField()
|
||||
date = serializers.CharField()
|
||||
gh = serializers.CharField()
|
||||
@@ -69,16 +102,21 @@ class VersionSerializer(serializers.Serializer):
|
||||
|
||||
class Meta:
|
||||
"""Meta class for VersionSerializer."""
|
||||
|
||||
fields = ['version', 'date', 'gh', 'text', 'latest']
|
||||
|
||||
|
||||
class VersionApiSerializer(serializers.Serializer):
|
||||
"""Serializer for the version api endpoint."""
|
||||
|
||||
VersionSerializer(many=True)
|
||||
|
||||
|
||||
class VersionTextView(ListAPI):
|
||||
"""Simple JSON endpoint for InvenTree version text."""
|
||||
|
||||
serializer_class = VersionSerializer
|
||||
|
||||
permission_classes = [permissions.IsAdminUser]
|
||||
|
||||
@extend_schema(responses={200: OpenApiResponse(response=VersionApiSerializer)})
|
||||
@@ -96,7 +134,7 @@ class InfoView(AjaxView):
|
||||
permission_classes = [permissions.AllowAny]
|
||||
|
||||
def worker_pending_tasks(self):
|
||||
"""Return the current number of outstanding background tasks"""
|
||||
"""Return the current number of outstanding background tasks."""
|
||||
return OrmQ.objects.count()
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
@@ -114,37 +152,37 @@ class InfoView(AjaxView):
|
||||
'worker_running': is_worker_running(),
|
||||
'worker_pending_tasks': self.worker_pending_tasks(),
|
||||
'plugins_enabled': settings.PLUGINS_ENABLED,
|
||||
'plugins_install_disabled': settings.PLUGINS_INSTALL_DISABLED,
|
||||
'active_plugins': plugins_info(),
|
||||
'email_configured': is_email_configured(),
|
||||
'debug_mode': settings.DEBUG,
|
||||
'docker_mode': settings.DOCKER,
|
||||
'default_locale': settings.LANGUAGE_CODE,
|
||||
# Following fields are only available to staff users
|
||||
'system_health': check_system_health() if is_staff else None,
|
||||
'database': InvenTree.version.inventreeDatabase()if is_staff else None,
|
||||
'database': InvenTree.version.inventreeDatabase() if is_staff else None,
|
||||
'platform': InvenTree.version.inventreePlatform() if is_staff else None,
|
||||
'installer': InvenTree.version.inventreeInstaller() if is_staff else None,
|
||||
'target': InvenTree.version.inventreeTarget()if is_staff else None,
|
||||
'target': InvenTree.version.inventreeTarget() if is_staff else None,
|
||||
}
|
||||
|
||||
return JsonResponse(data)
|
||||
|
||||
def check_auth_header(self, request):
|
||||
"""Check if user is authenticated via a token in the header."""
|
||||
# TODO @matmair: remove after refacgtor of Token check is done
|
||||
headers = request.headers.get('Authorization', request.headers.get('authorization'))
|
||||
if not headers:
|
||||
return False
|
||||
from InvenTree.middleware import get_token_from_request
|
||||
|
||||
auth = headers.strip()
|
||||
if not (auth.lower().startswith('token') and len(auth.split()) == 2):
|
||||
return False
|
||||
if token := get_token_from_request(request):
|
||||
# Does the provided token match a valid user?
|
||||
try:
|
||||
token = ApiToken.objects.get(key=token)
|
||||
|
||||
# Check if the token is active and the user is a staff member
|
||||
if token.active and token.user and token.user.is_staff:
|
||||
return True
|
||||
except ApiToken.DoesNotExist:
|
||||
pass
|
||||
|
||||
token_key = auth.split()[1]
|
||||
try:
|
||||
token = ApiToken.objects.get(key=token_key)
|
||||
if token.active and token.user and token.user.is_staff:
|
||||
return True
|
||||
except ApiToken.DoesNotExist:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
@@ -154,37 +192,37 @@ class NotFoundView(AjaxView):
|
||||
permission_classes = [permissions.AllowAny]
|
||||
|
||||
def not_found(self, request):
|
||||
"""Return a 404 error"""
|
||||
"""Return a 404 error."""
|
||||
return JsonResponse(
|
||||
{
|
||||
'detail': _('API endpoint not found'),
|
||||
'url': request.build_absolute_uri(),
|
||||
},
|
||||
status=404
|
||||
status=404,
|
||||
)
|
||||
|
||||
def options(self, request, *args, **kwargs):
|
||||
"""Return 404"""
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Return 404"""
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
"""Return 404"""
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def patch(self, request, *args, **kwargs):
|
||||
"""Return 404"""
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def put(self, request, *args, **kwargs):
|
||||
"""Return 404"""
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
def delete(self, request, *args, **kwargs):
|
||||
"""Return 404"""
|
||||
"""Return 404."""
|
||||
return self.not_found(request)
|
||||
|
||||
|
||||
@@ -200,7 +238,7 @@ class BulkDeleteMixin:
|
||||
"""
|
||||
|
||||
def filter_delete_queryset(self, queryset, request):
|
||||
"""Provide custom filtering for the queryset *before* it is deleted"""
|
||||
"""Provide custom filtering for the queryset *before* it is deleted."""
|
||||
return queryset
|
||||
|
||||
def delete(self, request, *args, **kwargs):
|
||||
@@ -228,24 +266,25 @@ class BulkDeleteMixin:
|
||||
|
||||
if not items and not filters:
|
||||
raise ValidationError({
|
||||
"non_field_errors": ["List of items or filters must be provided for bulk deletion"],
|
||||
'non_field_errors': [
|
||||
'List of items or filters must be provided for bulk deletion'
|
||||
]
|
||||
})
|
||||
|
||||
if items and type(items) is not list:
|
||||
raise ValidationError({
|
||||
"items": ["'items' must be supplied as a list object"]
|
||||
'items': ["'items' must be supplied as a list object"]
|
||||
})
|
||||
|
||||
if filters and type(filters) is not dict:
|
||||
raise ValidationError({
|
||||
"filters": ["'filters' must be supplied as a dict object"]
|
||||
'filters': ["'filters' must be supplied as a dict object"]
|
||||
})
|
||||
|
||||
# Keep track of how many items we deleted
|
||||
n_deleted = 0
|
||||
|
||||
with transaction.atomic():
|
||||
|
||||
# Start with *all* models and perform basic filtering
|
||||
queryset = model.objects.all()
|
||||
queryset = self.filter_delete_queryset(queryset, request)
|
||||
@@ -261,16 +300,12 @@ class BulkDeleteMixin:
|
||||
n_deleted = queryset.count()
|
||||
queryset.delete()
|
||||
|
||||
return Response(
|
||||
{
|
||||
'success': f"Deleted {n_deleted} items",
|
||||
},
|
||||
status=204
|
||||
)
|
||||
return Response({'success': f'Deleted {n_deleted} items'}, status=204)
|
||||
|
||||
|
||||
class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
|
||||
"""Custom API endpoint which provides BulkDelete functionality in addition to List and Create"""
|
||||
"""Custom API endpoint which provides BulkDelete functionality in addition to List and Create."""
|
||||
|
||||
...
|
||||
|
||||
|
||||
@@ -307,24 +342,17 @@ class APIDownloadMixin:
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
"""This function must be implemented to provide a downloadFile request."""
|
||||
raise NotImplementedError("download_queryset method not implemented!")
|
||||
raise NotImplementedError('download_queryset method not implemented!')
|
||||
|
||||
|
||||
class AttachmentMixin:
|
||||
"""Mixin for creating attachment objects, and ensuring the user information is saved correctly."""
|
||||
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
RolePermission,
|
||||
]
|
||||
permission_classes = [permissions.IsAuthenticated, RolePermission]
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
search_fields = [
|
||||
'attachment',
|
||||
'comment',
|
||||
'link',
|
||||
]
|
||||
search_fields = ['attachment', 'comment', 'link']
|
||||
|
||||
def perform_create(self, serializer):
|
||||
"""Save the user information when a file is uploaded."""
|
||||
@@ -333,8 +361,18 @@ class AttachmentMixin:
|
||||
attachment.save()
|
||||
|
||||
|
||||
class APISearchView(APIView):
|
||||
"""A general-purpose 'search' API endpoint
|
||||
class APISearchViewSerializer(serializers.Serializer):
|
||||
"""Serializer for the APISearchView."""
|
||||
|
||||
search = serializers.CharField()
|
||||
search_regex = serializers.BooleanField(default=False, required=False)
|
||||
search_whole = serializers.BooleanField(default=False, required=False)
|
||||
limit = serializers.IntegerField(default=1, required=False)
|
||||
offset = serializers.IntegerField(default=0, required=False)
|
||||
|
||||
|
||||
class APISearchView(GenericAPIView):
|
||||
"""A general-purpose 'search' API endpoint.
|
||||
|
||||
Returns hits against a number of different models simultaneously,
|
||||
to consolidate multiple API requests into a single query.
|
||||
@@ -342,12 +380,11 @@ class APISearchView(APIView):
|
||||
Is much more efficient and simplifies code!
|
||||
"""
|
||||
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
]
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
serializer_class = APISearchViewSerializer
|
||||
|
||||
def get_result_types(self):
|
||||
"""Construct a list of search types we can return"""
|
||||
"""Construct a list of search types we can return."""
|
||||
import build.api
|
||||
import company.api
|
||||
import order.api
|
||||
@@ -369,7 +406,7 @@ class APISearchView(APIView):
|
||||
}
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
"""Perform search query against available models"""
|
||||
"""Perform search query against available models."""
|
||||
data = request.data
|
||||
|
||||
results = {}
|
||||
@@ -384,14 +421,11 @@ class APISearchView(APIView):
|
||||
}
|
||||
|
||||
if 'search' not in data:
|
||||
raise ValidationError({
|
||||
'search': 'Search term must be provided',
|
||||
})
|
||||
raise ValidationError({'search': 'Search term must be provided'})
|
||||
|
||||
for key, cls in self.get_result_types().items():
|
||||
# Only return results which are specifically requested
|
||||
if key in data:
|
||||
|
||||
params = data[key]
|
||||
|
||||
for k, v in pass_through_params.items():
|
||||
@@ -418,42 +452,49 @@ class APISearchView(APIView):
|
||||
table = f'{app_label}_{model_name}'
|
||||
|
||||
try:
|
||||
if users.models.RuleSet.check_table_permission(request.user, table, 'view'):
|
||||
if users.models.RuleSet.check_table_permission(
|
||||
request.user, table, 'view'
|
||||
):
|
||||
results[key] = view.list(request, *args, **kwargs).data
|
||||
else:
|
||||
results[key] = {
|
||||
'error': _('User does not have permission to view this model')
|
||||
'error': _(
|
||||
'User does not have permission to view this model'
|
||||
)
|
||||
}
|
||||
except Exception as exc:
|
||||
results[key] = {
|
||||
'error': str(exc)
|
||||
}
|
||||
results[key] = {'error': str(exc)}
|
||||
|
||||
return Response(results)
|
||||
|
||||
|
||||
class MetadataView(RetrieveUpdateAPI):
|
||||
"""Generic API endpoint for reading and editing metadata for a model"""
|
||||
"""Generic API endpoint for reading and editing metadata for a model."""
|
||||
|
||||
MODEL_REF = 'model'
|
||||
|
||||
def get_model_type(self):
|
||||
"""Return the model type associated with this API instance"""
|
||||
"""Return the model type associated with this API instance."""
|
||||
model = self.kwargs.get(self.MODEL_REF, None)
|
||||
|
||||
if model is None:
|
||||
raise ValidationError(f"MetadataView called without '{self.MODEL_REF}' parameter")
|
||||
raise ValidationError(
|
||||
f"MetadataView called without '{self.MODEL_REF}' parameter"
|
||||
)
|
||||
|
||||
return model
|
||||
|
||||
def get_permission_model(self):
|
||||
"""Return the 'permission' model associated with this view"""
|
||||
"""Return the 'permission' model associated with this view."""
|
||||
return self.get_model_type()
|
||||
|
||||
def get_queryset(self):
|
||||
"""Return the queryset for this endpoint"""
|
||||
"""Return the queryset for this endpoint."""
|
||||
return self.get_model_type().objects.all()
|
||||
|
||||
def get_serializer(self, *args, **kwargs):
|
||||
"""Return MetadataSerializer instance"""
|
||||
"""Return MetadataSerializer instance."""
|
||||
# Detect if we are currently generating the OpenAPI schema
|
||||
if 'spectacular' in sys.argv:
|
||||
return MetadataSerializer(Part, *args, **kwargs)
|
||||
return MetadataSerializer(self.get_model_type(), *args, **kwargs)
|
||||
|
||||
@@ -1,13 +1,92 @@
|
||||
"""InvenTree API version information."""
|
||||
|
||||
|
||||
# InvenTree API version
|
||||
INVENTREE_API_VERSION = 160
|
||||
INVENTREE_API_VERSION = 180
|
||||
"""Increment this API version number whenever there is a significant change to the API that any clients need to know about."""
|
||||
|
||||
INVENTREE_API_TEXT = """
|
||||
|
||||
v160 -> 2023-012-11 : https://github.com/inventree/InvenTree/pull/6072
|
||||
v180 - 2024-3-02 : https://github.com/inventree/InvenTree/pull/6463
|
||||
- Tweaks to API documentation to allow automatic documentation generation
|
||||
|
||||
v179 - 2024-03-01 : https://github.com/inventree/InvenTree/pull/6605
|
||||
- Adds "subcategories" count to PartCategory serializer
|
||||
- Adds "sublocations" count to StockLocation serializer
|
||||
- Adds "image" field to PartBrief serializer
|
||||
- Adds "image" field to CompanyBrief serializer
|
||||
|
||||
v178 - 2024-02-29 : https://github.com/inventree/InvenTree/pull/6604
|
||||
- Adds "external_stock" field to the Part API endpoint
|
||||
- Adds "external_stock" field to the BomItem API endpoint
|
||||
- Adds "external_stock" field to the BuildLine API endpoint
|
||||
- Stock quantites represented in the BuildLine API endpoint are now filtered by Build.source_location
|
||||
|
||||
v177 - 2024-02-27 : https://github.com/inventree/InvenTree/pull/6581
|
||||
- Adds "subcategoies" count to PartCategoryTree serializer
|
||||
- Adds "sublocations" count to StockLocationTree serializer
|
||||
|
||||
v176 - 2024-02-26 : https://github.com/inventree/InvenTree/pull/6535
|
||||
- Adds the field "plugins_install_disabled" to the Server info API endpoint
|
||||
|
||||
v175 - 2024-02-21 : https://github.com/inventree/InvenTree/pull/6538
|
||||
- Adds "parts" count to PartParameterTemplate serializer
|
||||
|
||||
v174 - 2024-02-21 : https://github.com/inventree/InvenTree/pull/6536
|
||||
- Expose PartCategory filters to the API documentation
|
||||
- Expose StockLocation filters to the API documentation
|
||||
|
||||
v173 - 2024-02-20 : https://github.com/inventree/InvenTree/pull/6483
|
||||
- Adds "merge_items" to the PurchaseOrderLine create API endpoint
|
||||
- Adds "auto_pricing" to the PurchaseOrderLine create/update API endpoint
|
||||
|
||||
v172 - 2024-02-20 : https://github.com/inventree/InvenTree/pull/6526
|
||||
- Adds "enabled" field to the PartTestTemplate API endpoint
|
||||
- Adds "enabled" filter to the PartTestTemplate list
|
||||
- Adds "enabled" filter to the StockItemTestResult list
|
||||
|
||||
v171 - 2024-02-19 : https://github.com/inventree/InvenTree/pull/6516
|
||||
- Adds "key" as a filterable parameter to PartTestTemplate list endpoint
|
||||
|
||||
v170 -> 2024-02-19 : https://github.com/inventree/InvenTree/pull/6514
|
||||
- Adds "has_results" filter to the PartTestTemplate list endpoint
|
||||
|
||||
v169 -> 2024-02-14 : https://github.com/inventree/InvenTree/pull/6430
|
||||
- Adds 'key' field to PartTestTemplate API endpoint
|
||||
- Adds annotated 'results' field to PartTestTemplate API endpoint
|
||||
- Adds 'template' field to StockItemTestResult API endpoint
|
||||
|
||||
v168 -> 2024-02-14 : https://github.com/inventree/InvenTree/pull/4824
|
||||
- Adds machine CRUD API endpoints
|
||||
- Adds machine settings API endpoints
|
||||
- Adds machine restart API endpoint
|
||||
- Adds machine types/drivers list API endpoints
|
||||
- Adds machine registry status API endpoint
|
||||
- Adds 'required' field to the global Settings API
|
||||
- Discover sub-sub classes of the StatusCode API
|
||||
|
||||
v167 -> 2024-02-07: https://github.com/inventree/InvenTree/pull/6440
|
||||
- Fixes for OpenAPI schema generation
|
||||
|
||||
v166 -> 2024-02-04 : https://github.com/inventree/InvenTree/pull/6400
|
||||
- Adds package_name to plugin API
|
||||
- Adds mechanism for uninstalling plugins via the API
|
||||
|
||||
v165 -> 2024-01-28 : https://github.com/inventree/InvenTree/pull/6040
|
||||
- Adds supplier_part.name, part.creation_user, part.required_for_sales_order
|
||||
|
||||
v164 -> 2024-01-24 : https://github.com/inventree/InvenTree/pull/6343
|
||||
- Adds "building" quantity to BuildLine API serializer
|
||||
|
||||
v163 -> 2024-01-22 : https://github.com/inventree/InvenTree/pull/6314
|
||||
- Extends API endpoint to expose auth configuration information for signin pages
|
||||
|
||||
v162 -> 2024-01-14 : https://github.com/inventree/InvenTree/pull/6230
|
||||
- Adds API endpoints to provide information on background tasks
|
||||
|
||||
v161 -> 2024-01-13 : https://github.com/inventree/InvenTree/pull/6222
|
||||
- Adds API endpoint for system error information
|
||||
|
||||
v160 -> 2023-12-11 : https://github.com/inventree/InvenTree/pull/6072
|
||||
- Adds API endpoint for allocating stock items against a sales order via barcode scan
|
||||
|
||||
v159 -> 2023-12-08 : https://github.com/inventree/InvenTree/pull/6056
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""AppConfig for inventree app."""
|
||||
"""AppConfig for InvenTree app."""
|
||||
|
||||
import logging
|
||||
from importlib import import_module
|
||||
@@ -12,16 +12,16 @@ from django.db import transaction
|
||||
from django.db.utils import IntegrityError, OperationalError
|
||||
|
||||
import InvenTree.conversion
|
||||
import InvenTree.ready
|
||||
import InvenTree.tasks
|
||||
from InvenTree.config import get_setting
|
||||
from InvenTree.ready import (canAppAccessDatabase, isInMainThread,
|
||||
isInTestMode, isPluginRegistryLoaded)
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class InvenTreeConfig(AppConfig):
|
||||
"""AppConfig for inventree app."""
|
||||
|
||||
name = 'InvenTree'
|
||||
|
||||
def ready(self):
|
||||
@@ -37,29 +37,37 @@ class InvenTreeConfig(AppConfig):
|
||||
- Adding users set in the current environment
|
||||
"""
|
||||
# skip loading if plugin registry is not loaded or we run in a background thread
|
||||
if not isPluginRegistryLoaded() or not isInMainThread():
|
||||
if (
|
||||
not InvenTree.ready.isPluginRegistryLoaded()
|
||||
or not InvenTree.ready.isInMainThread()
|
||||
):
|
||||
return
|
||||
|
||||
if canAppAccessDatabase() or settings.TESTING_ENV:
|
||||
# Skip if running migrations
|
||||
if InvenTree.ready.isRunningMigrations():
|
||||
return
|
||||
|
||||
if InvenTree.ready.canAppAccessDatabase() or settings.TESTING_ENV:
|
||||
self.remove_obsolete_tasks()
|
||||
|
||||
self.collect_tasks()
|
||||
self.start_background_tasks()
|
||||
|
||||
if not isInTestMode(): # pragma: no cover
|
||||
if not InvenTree.ready.isInTestMode(): # pragma: no cover
|
||||
self.update_exchange_rates()
|
||||
# Let the background worker check for migrations
|
||||
InvenTree.tasks.offload_task(InvenTree.tasks.check_for_migrations)
|
||||
|
||||
self.update_site_url()
|
||||
self.collect_notification_methods()
|
||||
self.collect_state_transition_methods()
|
||||
|
||||
# Ensure the unit registry is loaded
|
||||
InvenTree.conversion.get_unit_registry()
|
||||
|
||||
if canAppAccessDatabase() or settings.TESTING_ENV:
|
||||
if InvenTree.ready.canAppAccessDatabase() or settings.TESTING_ENV:
|
||||
self.add_user_on_startup()
|
||||
self.add_user_from_file()
|
||||
|
||||
def remove_obsolete_tasks(self):
|
||||
"""Delete any obsolete scheduled tasks in the database."""
|
||||
@@ -77,11 +85,11 @@ class InvenTreeConfig(AppConfig):
|
||||
try:
|
||||
Schedule.objects.filter(func__in=obsolete).delete()
|
||||
except Exception:
|
||||
logger.exception("Failed to remove obsolete tasks - database not ready")
|
||||
logger.exception('Failed to remove obsolete tasks - database not ready')
|
||||
|
||||
def start_background_tasks(self):
|
||||
"""Start all background tests for InvenTree."""
|
||||
logger.info("Starting background tasks...")
|
||||
logger.info('Starting background tasks...')
|
||||
|
||||
from django_q.models import Schedule
|
||||
|
||||
@@ -98,15 +106,16 @@ class InvenTreeConfig(AppConfig):
|
||||
tasks = InvenTree.tasks.tasks.task_list
|
||||
|
||||
for task in tasks:
|
||||
|
||||
ref_name = f'{task.func.__module__}.{task.func.__name__}'
|
||||
|
||||
if ref_name in existing_tasks.keys():
|
||||
# This task already exists - update the details if required
|
||||
existing_task = existing_tasks[ref_name]
|
||||
|
||||
if existing_task.schedule_type != task.interval or existing_task.minutes != task.minutes:
|
||||
|
||||
if (
|
||||
existing_task.schedule_type != task.interval
|
||||
or existing_task.minutes != task.minutes
|
||||
):
|
||||
existing_task.schedule_type = task.interval
|
||||
existing_task.minutes = task.minutes
|
||||
tasks_to_update.append(existing_task)
|
||||
@@ -124,20 +133,27 @@ class InvenTreeConfig(AppConfig):
|
||||
|
||||
if len(tasks_to_create) > 0:
|
||||
Schedule.objects.bulk_create(tasks_to_create)
|
||||
logger.info("Created %s new scheduled tasks", len(tasks_to_create))
|
||||
logger.info('Created %s new scheduled tasks', len(tasks_to_create))
|
||||
|
||||
if len(tasks_to_update) > 0:
|
||||
Schedule.objects.bulk_update(tasks_to_update, ['schedule_type', 'minutes'])
|
||||
logger.info("Updated %s existing scheduled tasks", len(tasks_to_update))
|
||||
logger.info('Updated %s existing scheduled tasks', len(tasks_to_update))
|
||||
|
||||
# Put at least one task onto the background worker stack,
|
||||
# which will be processed as soon as the worker comes online
|
||||
InvenTree.tasks.offload_task(
|
||||
InvenTree.tasks.heartbeat,
|
||||
force_async=True,
|
||||
)
|
||||
self.add_heartbeat()
|
||||
|
||||
logger.info("Started %s scheduled background tasks...", len(tasks))
|
||||
logger.info('Started %s scheduled background tasks...', len(tasks))
|
||||
|
||||
def add_heartbeat(self):
|
||||
"""Ensure there is at least one background task in the queue."""
|
||||
import django_q.models
|
||||
|
||||
try:
|
||||
if django_q.models.OrmQ.objects.count() == 0:
|
||||
InvenTree.tasks.offload_task(
|
||||
InvenTree.tasks.heartbeat, force_async=True
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def collect_tasks(self):
|
||||
"""Collect all background tasks."""
|
||||
@@ -149,7 +165,7 @@ class InvenTreeConfig(AppConfig):
|
||||
try:
|
||||
import_module(f'{app.module.__package__}.tasks')
|
||||
except Exception as e: # pragma: no cover
|
||||
logger.exception("Error loading tasks for %s: %s", app_name, e)
|
||||
logger.exception('Error loading tasks for %s: %s', app_name, e)
|
||||
|
||||
def update_exchange_rates(self): # pragma: no cover
|
||||
"""Update exchange rates each time the server is started.
|
||||
@@ -180,16 +196,20 @@ class InvenTreeConfig(AppConfig):
|
||||
|
||||
if last_update is None:
|
||||
# Never been updated
|
||||
logger.info("Exchange backend has never been updated")
|
||||
logger.info('Exchange backend has never been updated')
|
||||
update = True
|
||||
|
||||
# Backend currency has changed?
|
||||
if base_currency != backend.base_currency:
|
||||
logger.info("Base currency changed from %s to %s", backend.base_currency, base_currency)
|
||||
logger.info(
|
||||
'Base currency changed from %s to %s',
|
||||
backend.base_currency,
|
||||
base_currency,
|
||||
)
|
||||
update = True
|
||||
|
||||
except (ExchangeBackend.DoesNotExist):
|
||||
logger.info("Exchange backend not found - updating")
|
||||
except ExchangeBackend.DoesNotExist:
|
||||
logger.info('Exchange backend not found - updating')
|
||||
update = True
|
||||
|
||||
except Exception:
|
||||
@@ -200,9 +220,49 @@ class InvenTreeConfig(AppConfig):
|
||||
try:
|
||||
update_exchange_rates()
|
||||
except OperationalError:
|
||||
logger.warning("Could not update exchange rates - database not ready")
|
||||
logger.warning('Could not update exchange rates - database not ready')
|
||||
except Exception as e:
|
||||
logger.exception("Error updating exchange rates: %s (%s)", e, type(e))
|
||||
logger.exception('Error updating exchange rates: %s (%s)', e, type(e))
|
||||
|
||||
def update_site_url(self):
|
||||
"""Update the site URL setting.
|
||||
|
||||
- If a fixed SITE_URL is specified (via configuration), it should override the INVENTREE_BASE_URL setting
|
||||
- If multi-site support is enabled, update the site URL for the current site
|
||||
"""
|
||||
import common.models
|
||||
|
||||
if not InvenTree.ready.canAppAccessDatabase():
|
||||
return
|
||||
|
||||
if InvenTree.ready.isImportingData() or InvenTree.ready.isRunningMigrations():
|
||||
return
|
||||
|
||||
if settings.SITE_URL:
|
||||
try:
|
||||
if (
|
||||
common.models.InvenTreeSetting.get_setting('INVENTREE_BASE_URL')
|
||||
!= settings.SITE_URL
|
||||
):
|
||||
common.models.InvenTreeSetting.set_setting(
|
||||
'INVENTREE_BASE_URL', settings.SITE_URL
|
||||
)
|
||||
logger.info('Updated INVENTREE_SITE_URL to %s', settings.SITE_URL)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# If multi-site support is enabled, update the site URL for the current site
|
||||
try:
|
||||
from django.contrib.sites.models import Site
|
||||
|
||||
site = Site.objects.get_current()
|
||||
site.domain = settings.SITE_URL
|
||||
site.save()
|
||||
|
||||
logger.info('Updated current site URL to %s', settings.SITE_URL)
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def add_user_on_startup(self):
|
||||
"""Add a user on startup."""
|
||||
@@ -214,6 +274,9 @@ class InvenTreeConfig(AppConfig):
|
||||
add_user = get_setting('INVENTREE_ADMIN_USER', 'admin_user')
|
||||
add_email = get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email')
|
||||
add_password = get_setting('INVENTREE_ADMIN_PASSWORD', 'admin_password')
|
||||
add_password_file = get_setting(
|
||||
'INVENTREE_ADMIN_PASSWORD_FILE', 'admin_password_file', None
|
||||
)
|
||||
|
||||
# check if all values are present
|
||||
set_variables = 0
|
||||
@@ -229,24 +292,68 @@ class InvenTreeConfig(AppConfig):
|
||||
|
||||
# not all needed variables set
|
||||
if set_variables < 3:
|
||||
logger.warning('Not all required settings for adding a user on startup are present:\nINVENTREE_ADMIN_USER, INVENTREE_ADMIN_EMAIL, INVENTREE_ADMIN_PASSWORD')
|
||||
settings.USER_ADDED = True
|
||||
|
||||
# if a password file is present, do not warn - will be handled later
|
||||
if add_password_file:
|
||||
return
|
||||
logger.warning(
|
||||
'Not all required settings for adding a user on startup are present:\nINVENTREE_ADMIN_USER, INVENTREE_ADMIN_EMAIL, INVENTREE_ADMIN_PASSWORD'
|
||||
)
|
||||
return
|
||||
|
||||
# good to go -> create user
|
||||
self._create_admin_user(add_user, add_email, add_password)
|
||||
|
||||
# do not try again
|
||||
settings.USER_ADDED = True
|
||||
|
||||
def _create_admin_user(self, add_user, add_email, add_password):
|
||||
user = get_user_model()
|
||||
try:
|
||||
with transaction.atomic():
|
||||
if user.objects.filter(username=add_user).exists():
|
||||
logger.info("User %s already exists - skipping creation", add_user)
|
||||
logger.info('User %s already exists - skipping creation', add_user)
|
||||
else:
|
||||
new_user = user.objects.create_superuser(add_user, add_email, add_password)
|
||||
new_user = user.objects.create_superuser(
|
||||
add_user, add_email, add_password
|
||||
)
|
||||
logger.info('User %s was created!', str(new_user))
|
||||
except IntegrityError:
|
||||
logger.warning('The user "%s" could not be created', add_user)
|
||||
|
||||
def add_user_from_file(self):
|
||||
"""Add the superuser from a file."""
|
||||
# stop if checks were already created
|
||||
if hasattr(settings, 'USER_ADDED_FILE') and settings.USER_ADDED_FILE:
|
||||
return
|
||||
|
||||
# get values
|
||||
add_password_file = get_setting(
|
||||
'INVENTREE_ADMIN_PASSWORD_FILE', 'admin_password_file', None
|
||||
)
|
||||
|
||||
# no variable set -> do not try anything
|
||||
if not add_password_file:
|
||||
settings.USER_ADDED_FILE = True
|
||||
return
|
||||
|
||||
# check if file exists
|
||||
add_password_file = Path(str(add_password_file))
|
||||
if not add_password_file.exists():
|
||||
logger.warning('The file "%s" does not exist', add_password_file)
|
||||
settings.USER_ADDED_FILE = True
|
||||
return
|
||||
|
||||
# good to go -> create user
|
||||
self._create_admin_user(
|
||||
get_setting('INVENTREE_ADMIN_USER', 'admin_user', 'admin'),
|
||||
get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email', ''),
|
||||
add_password_file.read_text(encoding='utf-8'),
|
||||
)
|
||||
|
||||
# do not try again
|
||||
settings.USER_ADDED = True
|
||||
settings.USER_ADDED_FILE = True
|
||||
|
||||
def collect_notification_methods(self):
|
||||
"""Collect all notification methods."""
|
||||
|
||||
85
InvenTree/InvenTree/backends.py
Normal file
85
InvenTree/InvenTree/backends.py
Normal file
@@ -0,0 +1,85 @@
|
||||
"""Custom backend implementations."""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
|
||||
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
|
||||
|
||||
from maintenance_mode.backends import AbstractStateBackend
|
||||
|
||||
import common.models
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class InvenTreeMaintenanceModeBackend(AbstractStateBackend):
|
||||
"""Custom backend for managing state of maintenance mode.
|
||||
|
||||
Stores a timestamp in the database to determine when maintenance mode will elapse.
|
||||
"""
|
||||
|
||||
SETTING_KEY = '_MAINTENANCE_MODE'
|
||||
|
||||
def get_value(self) -> bool:
|
||||
"""Get the current state of the maintenance mode.
|
||||
|
||||
Returns:
|
||||
bool: True if maintenance mode is active, False otherwise.
|
||||
"""
|
||||
try:
|
||||
setting = common.models.InvenTreeSetting.objects.get(key=self.SETTING_KEY)
|
||||
value = str(setting.value).strip()
|
||||
except common.models.InvenTreeSetting.DoesNotExist:
|
||||
# Database is accessible, but setting is not available - assume False
|
||||
return False
|
||||
except (IntegrityError, OperationalError, ProgrammingError):
|
||||
# Database is inaccessible - assume we are not in maintenance mode
|
||||
logger.debug('Failed to read maintenance mode state - assuming True')
|
||||
return True
|
||||
|
||||
# Extract timestamp from string
|
||||
try:
|
||||
# If the timestamp is in the past, we are now *out* of maintenance mode
|
||||
timestamp = datetime.datetime.fromisoformat(value)
|
||||
return timestamp > datetime.datetime.now()
|
||||
except ValueError:
|
||||
# If the value is not a valid timestamp, assume maintenance mode is not active
|
||||
return False
|
||||
|
||||
def set_value(self, value: bool, retries: int = 5, minutes: int = 5):
|
||||
"""Set the state of the maintenance mode.
|
||||
|
||||
Instead of simply writing "true" or "false" to the setting,
|
||||
we write a timestamp to the setting, which is used to determine
|
||||
when maintenance mode will elapse.
|
||||
This ensures that we will always *exit* maintenance mode after a certain time period.
|
||||
"""
|
||||
logger.debug('Setting maintenance mode state: %s', value)
|
||||
|
||||
if value:
|
||||
# Save as isoformat
|
||||
timestamp = datetime.datetime.now() + datetime.timedelta(minutes=minutes)
|
||||
timestamp = timestamp.isoformat()
|
||||
else:
|
||||
# Blank timestamp means maintenance mode is not active
|
||||
timestamp = ''
|
||||
|
||||
while retries > 0:
|
||||
try:
|
||||
common.models.InvenTreeSetting.set_setting(self.SETTING_KEY, timestamp)
|
||||
|
||||
# Read the value back to confirm
|
||||
if self.get_value() == value:
|
||||
break
|
||||
except (IntegrityError, OperationalError, ProgrammingError):
|
||||
# In the database is locked, then
|
||||
logger.debug(
|
||||
'Failed to set maintenance mode state (%s retries left)', retries
|
||||
)
|
||||
time.sleep(0.1)
|
||||
|
||||
retries -= 1
|
||||
|
||||
if retries == 0:
|
||||
logger.warning('Failed to set maintenance mode state')
|
||||
@@ -24,22 +24,14 @@ class RenderJavascriptFiles(InvenTreeTestCase): # pragma: no cover
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
output_dir = os.path.join(
|
||||
here,
|
||||
'..',
|
||||
'..',
|
||||
'js_tmp',
|
||||
)
|
||||
output_dir = os.path.join(here, '..', '..', 'js_tmp')
|
||||
|
||||
output_dir = os.path.abspath(output_dir)
|
||||
|
||||
if not os.path.exists(output_dir):
|
||||
os.mkdir(output_dir)
|
||||
|
||||
output_file = os.path.join(
|
||||
output_dir,
|
||||
filename,
|
||||
)
|
||||
output_file = os.path.join(output_dir, filename)
|
||||
|
||||
with open(output_file, 'wb') as output:
|
||||
output.write(response.content)
|
||||
@@ -48,12 +40,7 @@ class RenderJavascriptFiles(InvenTreeTestCase): # pragma: no cover
|
||||
"""Download files in directory."""
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
js_template_dir = os.path.join(
|
||||
here,
|
||||
'..',
|
||||
'templates',
|
||||
'js',
|
||||
)
|
||||
js_template_dir = os.path.join(here, '..', 'templates', 'js')
|
||||
|
||||
directory = os.path.join(js_template_dir, subdir)
|
||||
|
||||
@@ -76,9 +63,9 @@ class RenderJavascriptFiles(InvenTreeTestCase): # pragma: no cover
|
||||
"""Look for all javascript files."""
|
||||
n = 0
|
||||
|
||||
print("Rendering javascript files...")
|
||||
print('Rendering javascript files...')
|
||||
|
||||
n += self.download_files('translated', '/js/i18n')
|
||||
n += self.download_files('dynamic', '/js/dynamic')
|
||||
|
||||
print(f"Rendered {n} javascript files.")
|
||||
print(f'Rendered {n} javascript files.')
|
||||
|
||||
@@ -10,6 +10,9 @@ import string
|
||||
import warnings
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.files.base import ContentFile
|
||||
from django.core.files.storage import Storage
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
CONFIG_DATA = None
|
||||
CONFIG_LOOKUPS = {}
|
||||
@@ -51,12 +54,16 @@ def to_dict(value):
|
||||
try:
|
||||
return json.loads(value)
|
||||
except Exception as error:
|
||||
logger.exception("Failed to parse value '%s' as JSON with error %s. Ensure value is a valid JSON string.", value, error)
|
||||
logger.exception(
|
||||
"Failed to parse value '%s' as JSON with error %s. Ensure value is a valid JSON string.",
|
||||
value,
|
||||
error,
|
||||
)
|
||||
return {}
|
||||
|
||||
|
||||
def is_true(x):
|
||||
"""Shortcut function to determine if a value "looks" like a boolean"""
|
||||
"""Shortcut function to determine if a value "looks" like a boolean."""
|
||||
return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on']
|
||||
|
||||
|
||||
@@ -65,11 +72,16 @@ def get_base_dir() -> Path:
|
||||
return Path(__file__).parent.parent.resolve()
|
||||
|
||||
|
||||
def ensure_dir(path: Path) -> None:
|
||||
def ensure_dir(path: Path, storage=None) -> None:
|
||||
"""Ensure that a directory exists.
|
||||
|
||||
If it does not exist, create it.
|
||||
"""
|
||||
if storage and isinstance(storage, Storage):
|
||||
if not storage.exists(str(path)):
|
||||
storage.save(str(path / '.empty'), ContentFile(''))
|
||||
return
|
||||
|
||||
if not path.exists():
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@@ -90,12 +102,14 @@ def get_config_file(create=True) -> Path:
|
||||
cfg_filename = base_dir.joinpath('config.yaml').resolve()
|
||||
|
||||
if not cfg_filename.exists() and create:
|
||||
print("InvenTree configuration file 'config.yaml' not found - creating default file")
|
||||
print(
|
||||
"InvenTree configuration file 'config.yaml' not found - creating default file"
|
||||
)
|
||||
ensure_dir(cfg_filename.parent)
|
||||
|
||||
cfg_template = base_dir.joinpath("config_template.yaml")
|
||||
cfg_template = base_dir.joinpath('config_template.yaml')
|
||||
shutil.copyfile(cfg_template, cfg_filename)
|
||||
print(f"Created config file {cfg_filename}")
|
||||
print(f'Created config file {cfg_filename}')
|
||||
|
||||
return cfg_filename
|
||||
|
||||
@@ -153,7 +167,13 @@ def do_typecast(value, type, var_name=None):
|
||||
return val
|
||||
except Exception as error:
|
||||
if var_name:
|
||||
logger.exception("Failed to typecast '%s' with value '%s' to type '%s' with error %s", var_name, value, type, error)
|
||||
logger.exception(
|
||||
"Failed to typecast '%s' with value '%s' to type '%s' with error %s",
|
||||
var_name,
|
||||
value,
|
||||
type,
|
||||
error,
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
@@ -174,7 +194,12 @@ def get_setting(env_var=None, config_key=None, default_value=None, typecast=None
|
||||
def set_metadata(source: str):
|
||||
"""Set lookup metadata for the setting."""
|
||||
key = env_var or config_key
|
||||
CONFIG_LOOKUPS[key] = {'env_var': env_var, 'config_key': config_key, 'source': source, 'accessed': datetime.datetime.now()}
|
||||
CONFIG_LOOKUPS[key] = {
|
||||
'env_var': env_var,
|
||||
'config_key': config_key,
|
||||
'source': source,
|
||||
'accessed': datetime.datetime.now(),
|
||||
}
|
||||
|
||||
# First, try to load from the environment variables
|
||||
if env_var is not None:
|
||||
@@ -192,7 +217,6 @@ def get_setting(env_var=None, config_key=None, default_value=None, typecast=None
|
||||
|
||||
# Hack to allow 'path traversal' in configuration file
|
||||
for key in config_key.strip().split('.'):
|
||||
|
||||
if type(cfg_data) is not dict or key not in cfg_data:
|
||||
result = None
|
||||
break
|
||||
@@ -210,12 +234,12 @@ def get_setting(env_var=None, config_key=None, default_value=None, typecast=None
|
||||
|
||||
|
||||
def get_boolean_setting(env_var=None, config_key=None, default_value=False):
|
||||
"""Helper function for retrieving a boolean configuration setting"""
|
||||
"""Helper function for retrieving a boolean configuration setting."""
|
||||
return is_true(get_setting(env_var, config_key, default_value))
|
||||
|
||||
|
||||
def get_media_dir(create=True):
|
||||
"""Return the absolute path for the 'media' directory (where uploaded files are stored)"""
|
||||
"""Return the absolute path for the 'media' directory (where uploaded files are stored)."""
|
||||
md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root')
|
||||
|
||||
if not md:
|
||||
@@ -230,7 +254,7 @@ def get_media_dir(create=True):
|
||||
|
||||
|
||||
def get_static_dir(create=True):
|
||||
"""Return the absolute path for the 'static' directory (where static files are stored)"""
|
||||
"""Return the absolute path for the 'static' directory (where static files are stored)."""
|
||||
sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root')
|
||||
|
||||
if not sd:
|
||||
@@ -245,7 +269,7 @@ def get_static_dir(create=True):
|
||||
|
||||
|
||||
def get_backup_dir(create=True):
|
||||
"""Return the absolute path for the backup directory"""
|
||||
"""Return the absolute path for the backup directory."""
|
||||
bd = get_setting('INVENTREE_BACKUP_DIR', 'backup_dir')
|
||||
|
||||
if not bd:
|
||||
@@ -276,18 +300,22 @@ def get_plugin_file():
|
||||
plugin_file = Path(plugin_file)
|
||||
|
||||
if not plugin_file.exists():
|
||||
logger.warning("Plugin configuration file does not exist - creating default file")
|
||||
logger.warning(
|
||||
'Plugin configuration file does not exist - creating default file'
|
||||
)
|
||||
logger.info("Creating plugin file at '%s'", plugin_file)
|
||||
ensure_dir(plugin_file.parent)
|
||||
|
||||
# If opening the file fails (no write permission, for example), then this will throw an error
|
||||
plugin_file.write_text("# InvenTree Plugins (uses PIP framework to install)\n\n")
|
||||
plugin_file.write_text(
|
||||
'# InvenTree Plugins (uses PIP framework to install)\n\n'
|
||||
)
|
||||
|
||||
return plugin_file
|
||||
|
||||
|
||||
def get_plugin_dir():
|
||||
"""Returns the path of the custom plugins directory"""
|
||||
"""Returns the path of the custom plugins directory."""
|
||||
return get_setting('INVENTREE_PLUGIN_DIR', 'plugin_dir')
|
||||
|
||||
|
||||
@@ -303,7 +331,7 @@ def get_secret_key():
|
||||
"""
|
||||
# Look for environment variable
|
||||
if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'):
|
||||
logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY") # pragma: no cover
|
||||
logger.info('SECRET_KEY loaded by INVENTREE_SECRET_KEY') # pragma: no cover
|
||||
return secret_key
|
||||
|
||||
# Look for secret key file
|
||||
@@ -311,7 +339,7 @@ def get_secret_key():
|
||||
secret_key_file = Path(secret_key_file).resolve()
|
||||
else:
|
||||
# Default location for secret key file
|
||||
secret_key_file = get_base_dir().joinpath("secret_key.txt").resolve()
|
||||
secret_key_file = get_base_dir().joinpath('secret_key.txt').resolve()
|
||||
|
||||
if not secret_key_file.exists():
|
||||
logger.info("Generating random key file at '%s'", secret_key_file)
|
||||
@@ -329,7 +357,9 @@ def get_secret_key():
|
||||
return key_data
|
||||
|
||||
|
||||
def get_custom_file(env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False):
|
||||
def get_custom_file(
|
||||
env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False
|
||||
):
|
||||
"""Returns the checked path to a custom file.
|
||||
|
||||
Set lookup_media to True to also search in the media folder.
|
||||
@@ -345,12 +375,17 @@ def get_custom_file(env_ref: str, conf_ref: str, log_ref: str, lookup_media: boo
|
||||
static_storage = StaticFilesStorage()
|
||||
|
||||
if static_storage.exists(value):
|
||||
logger.info("Loading %s from %s directory: %s", log_ref, 'static', value)
|
||||
logger.info('Loading %s from %s directory: %s', log_ref, 'static', value)
|
||||
elif lookup_media and default_storage.exists(value):
|
||||
logger.info("Loading %s from %s directory: %s", log_ref, 'media', value)
|
||||
logger.info('Loading %s from %s directory: %s', log_ref, 'media', value)
|
||||
else:
|
||||
add_dir_str = ' or media' if lookup_media else ''
|
||||
logger.warning("The %s file '%s' could not be found in the static %s directories", log_ref, value, add_dir_str)
|
||||
logger.warning(
|
||||
"The %s file '%s' could not be found in the static %s directories",
|
||||
log_ref,
|
||||
value,
|
||||
add_dir_str,
|
||||
)
|
||||
value = False
|
||||
|
||||
return value
|
||||
@@ -362,18 +397,22 @@ def get_frontend_settings(debug=True):
|
||||
Note that the new config settings use the 'FRONTEND' key,
|
||||
whereas the legacy key was 'PUI' (platform UI) which is now deprecated
|
||||
"""
|
||||
|
||||
# Legacy settings
|
||||
pui_settings = get_setting('INVENTREE_PUI_SETTINGS', 'pui_settings', {}, typecast=dict)
|
||||
pui_settings = get_setting(
|
||||
'INVENTREE_PUI_SETTINGS', 'pui_settings', {}, typecast=dict
|
||||
)
|
||||
|
||||
if len(pui_settings) > 0:
|
||||
warnings.warn(
|
||||
"The 'INVENTREE_PUI_SETTINGS' key is deprecated. Please use 'INVENTREE_FRONTEND_SETTINGS' instead",
|
||||
DeprecationWarning, stacklevel=2
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
# New settings
|
||||
frontend_settings = get_setting('INVENTREE_FRONTEND_SETTINGS', 'frontend_settings', {}, typecast=dict)
|
||||
frontend_settings = get_setting(
|
||||
'INVENTREE_FRONTEND_SETTINGS', 'frontend_settings', {}, typecast=dict
|
||||
)
|
||||
|
||||
# Merge settings
|
||||
settings = {**pui_settings, **frontend_settings}
|
||||
@@ -385,10 +424,13 @@ def get_frontend_settings(debug=True):
|
||||
if base_url:
|
||||
warnings.warn(
|
||||
"The 'INVENTREE_PUI_URL_BASE' key is deprecated. Please use 'INVENTREE_FRONTEND_URL_BASE' instead",
|
||||
DeprecationWarning, stacklevel=2
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
else:
|
||||
base_url = get_setting('INVENTREE_FRONTEND_URL_BASE', 'frontend_url_base', 'platform')
|
||||
base_url = get_setting(
|
||||
'INVENTREE_FRONTEND_URL_BASE', 'frontend_url_base', 'platform'
|
||||
)
|
||||
|
||||
settings['base_url'] = base_url
|
||||
|
||||
|
||||
@@ -31,9 +31,7 @@ def health_status(request):
|
||||
}
|
||||
|
||||
# The following keys are required to denote system health
|
||||
health_keys = [
|
||||
'django_q_running',
|
||||
]
|
||||
health_keys = ['django_q_running']
|
||||
|
||||
all_healthy = True
|
||||
|
||||
@@ -72,11 +70,9 @@ def user_roles(request):
|
||||
"""
|
||||
user = request.user
|
||||
|
||||
roles = {
|
||||
}
|
||||
|
||||
for role in RuleSet.RULESET_MODELS.keys():
|
||||
roles = {}
|
||||
|
||||
for role in RuleSet.get_ruleset_models().keys():
|
||||
permissions = {}
|
||||
|
||||
for perm in ['view', 'add', 'change', 'delete']:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Helper functions for converting between units."""
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -9,7 +10,6 @@ import pint
|
||||
|
||||
_unit_registry = None
|
||||
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
@@ -29,13 +29,19 @@ def reload_unit_registry():
|
||||
This function is called at startup, and whenever the database is updated.
|
||||
"""
|
||||
import time
|
||||
|
||||
t_start = time.time()
|
||||
|
||||
global _unit_registry
|
||||
|
||||
_unit_registry = None
|
||||
|
||||
reg = pint.UnitRegistry()
|
||||
reg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
|
||||
|
||||
# Aliases for temperature units
|
||||
reg.define('@alias degC = celsius = Celsius')
|
||||
reg.define('@alias degF = fahrenheit = Fahrenheit')
|
||||
reg.define('@alias degK = kelvin = Kelvin')
|
||||
|
||||
# Define some "standard" additional units
|
||||
reg.define('piece = 1')
|
||||
@@ -52,7 +58,9 @@ def reload_unit_registry():
|
||||
try:
|
||||
reg.define(cu.fmt_string())
|
||||
except Exception as e:
|
||||
logger.exception('Failed to load custom unit: %s - %s', cu.fmt_string(), e)
|
||||
logger.exception(
|
||||
'Failed to load custom unit: %s - %s', cu.fmt_string(), e
|
||||
)
|
||||
|
||||
# Once custom units are loaded, save registry
|
||||
_unit_registry = reg
|
||||
@@ -62,11 +70,69 @@ def reload_unit_registry():
|
||||
pass
|
||||
|
||||
dt = time.time() - t_start
|
||||
logger.debug('Loaded unit registry in %s.3f s', dt)
|
||||
logger.debug('Loaded unit registry in %.3f s', dt)
|
||||
|
||||
return reg
|
||||
|
||||
|
||||
def from_engineering_notation(value):
|
||||
"""Convert a provided value to 'natural' representation from 'engineering' notation.
|
||||
|
||||
Ref: https://en.wikipedia.org/wiki/Engineering_notation
|
||||
|
||||
In "engineering notation", the unit (or SI prefix) is often combined with the value,
|
||||
and replaces the decimal point.
|
||||
|
||||
Examples:
|
||||
- 1K2 -> 1.2K
|
||||
- 3n05 -> 3.05n
|
||||
- 8R6 -> 8.6R
|
||||
|
||||
And, we should also take into account any provided trailing strings:
|
||||
|
||||
- 1K2 ohm -> 1.2K ohm
|
||||
- 10n005F -> 10.005nF
|
||||
"""
|
||||
value = str(value).strip()
|
||||
|
||||
pattern = f'(\d+)([a-zA-Z]+)(\d+)(.*)'
|
||||
|
||||
if match := re.match(pattern, value):
|
||||
left, prefix, right, suffix = match.groups()
|
||||
return f'{left}.{right}{prefix}{suffix}'
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def convert_value(value, unit):
|
||||
"""Attempt to convert a value to a specified unit.
|
||||
|
||||
Arguments:
|
||||
value: The value to convert
|
||||
unit: The target unit to convert to
|
||||
|
||||
Returns:
|
||||
The converted value (ideally a pint.Quantity value)
|
||||
|
||||
Raises:
|
||||
Exception if the value cannot be converted to the specified unit
|
||||
"""
|
||||
ureg = get_unit_registry()
|
||||
|
||||
# Convert the provided value to a pint.Quantity object
|
||||
value = ureg.Quantity(value)
|
||||
|
||||
# Convert to the specified unit
|
||||
if unit:
|
||||
if is_dimensionless(value):
|
||||
magnitude = value.to_base_units().magnitude
|
||||
value = ureg.Quantity(magnitude, unit)
|
||||
else:
|
||||
value = value.to(unit)
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def convert_physical_value(value: str, unit: str = None, strip_units=True):
|
||||
"""Validate that the provided value is a valid physical quantity.
|
||||
|
||||
@@ -81,6 +147,18 @@ def convert_physical_value(value: str, unit: str = None, strip_units=True):
|
||||
Returns:
|
||||
The converted quantity, in the specified units
|
||||
"""
|
||||
ureg = get_unit_registry()
|
||||
|
||||
# Check that the provided unit is available in the unit registry
|
||||
if unit:
|
||||
try:
|
||||
valid = unit in ureg
|
||||
except Exception as exc:
|
||||
valid = False
|
||||
|
||||
if not valid:
|
||||
raise ValidationError(_(f'Invalid unit provided ({unit})'))
|
||||
|
||||
original = str(value).strip()
|
||||
|
||||
# Ensure that the value is a string
|
||||
@@ -91,40 +169,35 @@ def convert_physical_value(value: str, unit: str = None, strip_units=True):
|
||||
if not value:
|
||||
raise ValidationError(_('No value provided'))
|
||||
|
||||
# Create a "backup" value which be tried if the first value fails
|
||||
# e.g. value = "10k" and unit = "ohm" -> "10kohm"
|
||||
# e.g. value = "10m" and unit = "F" -> "10mF"
|
||||
# Construct a list of values to "attempt" to convert
|
||||
attempts = [value]
|
||||
|
||||
# Attempt to convert from engineering notation
|
||||
eng = from_engineering_notation(value)
|
||||
attempts.append(eng)
|
||||
|
||||
# Append the unit, if provided
|
||||
# These are the "final" attempts to convert the value, and *must* appear after previous attempts
|
||||
if unit:
|
||||
backup_value = value + unit
|
||||
else:
|
||||
backup_value = None
|
||||
attempts.append(f'{value}{unit}')
|
||||
attempts.append(f'{eng}{unit}')
|
||||
|
||||
ureg = get_unit_registry()
|
||||
value = None
|
||||
|
||||
try:
|
||||
value = ureg.Quantity(value)
|
||||
|
||||
if unit:
|
||||
if is_dimensionless(value):
|
||||
magnitude = value.to_base_units().magnitude
|
||||
value = ureg.Quantity(magnitude, unit)
|
||||
else:
|
||||
value = value.to(unit)
|
||||
|
||||
except Exception:
|
||||
if backup_value:
|
||||
try:
|
||||
value = ureg.Quantity(backup_value)
|
||||
except Exception:
|
||||
value = None
|
||||
else:
|
||||
# Run through the available "attempts", take the first successful result
|
||||
for attempt in attempts:
|
||||
try:
|
||||
value = convert_value(attempt, unit)
|
||||
break
|
||||
except Exception as exc:
|
||||
value = None
|
||||
pass
|
||||
|
||||
if value is None:
|
||||
if unit:
|
||||
raise ValidationError(_(f'Could not convert {original} to {unit}'))
|
||||
else:
|
||||
raise ValidationError(_("Invalid quantity supplied"))
|
||||
raise ValidationError(_('Invalid quantity supplied'))
|
||||
|
||||
# Calculate the "magnitude" of the value, as a float
|
||||
# If the value is specified strangely (e.g. as a fraction or a dozen), this can cause issues
|
||||
@@ -148,7 +221,7 @@ def convert_physical_value(value: str, unit: str = None, strip_units=True):
|
||||
|
||||
|
||||
def is_dimensionless(value):
|
||||
"""Determine if the provided value is 'dimensionless'
|
||||
"""Determine if the provided value is 'dimensionless'.
|
||||
|
||||
A dimensionless value might look like:
|
||||
|
||||
|
||||
@@ -30,22 +30,22 @@ def is_email_configured():
|
||||
|
||||
# Display warning unless in test mode
|
||||
if not testing: # pragma: no cover
|
||||
logger.debug("EMAIL_HOST is not configured")
|
||||
logger.debug('EMAIL_HOST is not configured')
|
||||
|
||||
# Display warning unless in test mode
|
||||
if not settings.EMAIL_HOST_USER and not testing: # pragma: no cover
|
||||
logger.debug("EMAIL_HOST_USER is not configured")
|
||||
logger.debug('EMAIL_HOST_USER is not configured')
|
||||
|
||||
# Display warning unless in test mode
|
||||
if not settings.EMAIL_HOST_PASSWORD and testing: # pragma: no cover
|
||||
logger.debug("EMAIL_HOST_PASSWORD is not configured")
|
||||
logger.debug('EMAIL_HOST_PASSWORD is not configured')
|
||||
|
||||
# Email sender must be configured
|
||||
if not settings.DEFAULT_FROM_EMAIL:
|
||||
configured = False
|
||||
|
||||
if not testing: # pragma: no cover
|
||||
logger.debug("DEFAULT_FROM_EMAIL is not configured")
|
||||
logger.debug('DEFAULT_FROM_EMAIL is not configured')
|
||||
|
||||
return configured
|
||||
|
||||
@@ -75,7 +75,7 @@ def send_email(subject, body, recipients, from_email=None, html_message=None):
|
||||
if settings.TESTING:
|
||||
from_email = 'from@test.com'
|
||||
else:
|
||||
logger.error("send_email failed: DEFAULT_FROM_EMAIL not specified")
|
||||
logger.error('send_email failed: DEFAULT_FROM_EMAIL not specified')
|
||||
return
|
||||
|
||||
InvenTree.tasks.offload_task(
|
||||
@@ -85,5 +85,5 @@ def send_email(subject, body, recipients, from_email=None, html_message=None):
|
||||
from_email,
|
||||
recipients,
|
||||
fail_silently=False,
|
||||
html_message=html_message
|
||||
html_message=html_message,
|
||||
)
|
||||
|
||||
@@ -23,13 +23,18 @@ import InvenTree.sentry
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def log_error(path):
|
||||
def log_error(path, error_name=None, error_info=None, error_data=None):
|
||||
"""Log an error to the database.
|
||||
|
||||
- Uses python exception handling to extract error details
|
||||
|
||||
Arguments:
|
||||
path: The 'path' (most likely a URL) associated with this error (optional)
|
||||
|
||||
kwargs:
|
||||
error_name: The name of the error (optional, overrides 'kind')
|
||||
error_info: The error information (optional, overrides 'info')
|
||||
error_data: The error data (optional, overrides 'data')
|
||||
"""
|
||||
kind, info, data = sys.exc_info()
|
||||
|
||||
@@ -37,19 +42,34 @@ def log_error(path):
|
||||
if kind in settings.IGNORED_ERRORS:
|
||||
return
|
||||
|
||||
if error_name:
|
||||
kind = error_name
|
||||
else:
|
||||
kind = getattr(kind, '__name__', 'Unknown Error')
|
||||
|
||||
if error_info:
|
||||
info = error_info
|
||||
|
||||
if error_data:
|
||||
data = error_data
|
||||
else:
|
||||
try:
|
||||
data = '\n'.join(traceback.format_exception(kind, info, data))
|
||||
except AttributeError:
|
||||
data = 'No traceback information available'
|
||||
|
||||
# Log error to stderr
|
||||
logger.error(info)
|
||||
|
||||
# Ensure the error information does not exceed field size limits
|
||||
path = path[:200]
|
||||
kind = kind[:128]
|
||||
|
||||
try:
|
||||
Error.objects.create(
|
||||
kind=kind.__name__,
|
||||
info=info,
|
||||
data='\n'.join(traceback.format_exception(kind, info, data)),
|
||||
path=path,
|
||||
)
|
||||
except (OperationalError, IntegrityError):
|
||||
Error.objects.create(kind=kind, info=info or '', data=data or '', path=path)
|
||||
except Exception:
|
||||
# Not much we can do if logging the error throws a db exception
|
||||
pass
|
||||
logger.exception('Failed to log exception to database')
|
||||
|
||||
|
||||
def exception_handler(exc, context):
|
||||
@@ -86,7 +106,7 @@ def exception_handler(exc, context):
|
||||
# If in DEBUG mode, provide error information in the response
|
||||
error_detail = str(exc)
|
||||
else:
|
||||
error_detail = _("Error details can be found in the admin panel")
|
||||
error_detail = _('Error details can be found in the admin panel')
|
||||
|
||||
response_data = {
|
||||
'error': type(exc).__name__,
|
||||
|
||||
@@ -18,7 +18,7 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
Uses the plugin system to actually fetch the rates from an external API.
|
||||
"""
|
||||
|
||||
name = "InvenTreeExchange"
|
||||
name = 'InvenTreeExchange'
|
||||
|
||||
def get_rates(self, **kwargs) -> None:
|
||||
"""Set the requested currency codes and get rates."""
|
||||
@@ -44,7 +44,9 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
plugin = plugins[0]
|
||||
|
||||
if not plugin:
|
||||
logger.warning('No active currency exchange plugins found - skipping update')
|
||||
logger.warning(
|
||||
'No active currency exchange plugins found - skipping update'
|
||||
)
|
||||
return {}
|
||||
|
||||
logger.info("Running exchange rate update using plugin '%s'", plugin.name)
|
||||
@@ -53,16 +55,22 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
try:
|
||||
rates = plugin.update_exchange_rates(base_currency, symbols)
|
||||
except Exception as exc:
|
||||
logger.exception("Exchange rate update failed: %s", exc)
|
||||
logger.exception('Exchange rate update failed: %s', exc)
|
||||
return {}
|
||||
|
||||
if not rates:
|
||||
logger.warning("Exchange rate update failed - no data returned from plugin %s", slug)
|
||||
logger.warning(
|
||||
'Exchange rate update failed - no data returned from plugin %s', slug
|
||||
)
|
||||
return {}
|
||||
|
||||
# Update exchange rates based on returned data
|
||||
if type(rates) is not dict:
|
||||
logger.warning("Invalid exchange rate data returned from plugin %s (type %s)", slug, type(rates))
|
||||
logger.warning(
|
||||
'Invalid exchange rate data returned from plugin %s (type %s)',
|
||||
slug,
|
||||
type(rates),
|
||||
)
|
||||
return {}
|
||||
|
||||
# Ensure base currency is provided
|
||||
@@ -72,15 +80,21 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
|
||||
@atomic
|
||||
def update_rates(self, base_currency=None, **kwargs):
|
||||
"""Call to update all exchange rates"""
|
||||
backend, _ = ExchangeBackend.objects.update_or_create(name=self.name, defaults={"base_currency": base_currency})
|
||||
"""Call to update all exchange rates."""
|
||||
backend, _ = ExchangeBackend.objects.update_or_create(
|
||||
name=self.name, defaults={'base_currency': base_currency}
|
||||
)
|
||||
|
||||
if base_currency is None:
|
||||
base_currency = currency_code_default()
|
||||
|
||||
symbols = currency_codes()
|
||||
|
||||
logger.info("Updating exchange rates for %s (%s currencies)", base_currency, len(symbols))
|
||||
logger.info(
|
||||
'Updating exchange rates for %s (%s currencies)',
|
||||
base_currency,
|
||||
len(symbols),
|
||||
)
|
||||
|
||||
# Fetch new rates from the backend
|
||||
# If the backend fails, the existing rates will not be updated
|
||||
@@ -95,6 +109,8 @@ class InvenTreeExchange(SimpleExchangeBackend):
|
||||
for currency, amount in rates.items()
|
||||
])
|
||||
else:
|
||||
logger.info("No exchange rates returned from backend - currencies not updated")
|
||||
logger.info(
|
||||
'No exchange rates returned from backend - currencies not updated'
|
||||
)
|
||||
|
||||
logger.info("Updated exchange rates for %s", base_currency)
|
||||
logger.info('Updated exchange rates for %s', base_currency)
|
||||
|
||||
@@ -31,11 +31,12 @@ class InvenTreeRestURLField(RestURLField):
|
||||
self.validators[-1].schemes = allowable_url_schemes()
|
||||
|
||||
def run_validation(self, data=empty):
|
||||
"""Override default validation behaviour for this field type"""
|
||||
|
||||
"""Override default validation behaviour for this field type."""
|
||||
import common.models
|
||||
|
||||
strict_urls = common.models.InvenTreeSetting.get_setting('INVENTREE_STRICT_URLS', True, cache=False)
|
||||
strict_urls = common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_STRICT_URLS', True, cache=False
|
||||
)
|
||||
|
||||
if not strict_urls and data is not empty:
|
||||
if '://' not in data:
|
||||
@@ -51,7 +52,7 @@ class InvenTreeURLField(models.URLField):
|
||||
default_validators = [AllowedURLValidator()]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Initialization method for InvenTreeURLField"""
|
||||
"""Initialization method for InvenTreeURLField."""
|
||||
# Max length for InvenTreeURLField is set to 200
|
||||
kwargs['max_length'] = 200
|
||||
super().__init__(**kwargs)
|
||||
@@ -97,11 +98,8 @@ class InvenTreeModelMoneyField(ModelMoneyField):
|
||||
|
||||
# If no validators are provided, add some "standard" ones
|
||||
if len(validators) == 0:
|
||||
|
||||
if not allow_negative:
|
||||
validators.append(
|
||||
MinMoneyValidator(0),
|
||||
)
|
||||
validators.append(MinMoneyValidator(0))
|
||||
|
||||
kwargs['validators'] = validators
|
||||
|
||||
@@ -144,11 +142,7 @@ class DatePickerFormField(forms.DateField):
|
||||
required = kwargs.get('required', False)
|
||||
initial = kwargs.get('initial', None)
|
||||
|
||||
widget = forms.DateInput(
|
||||
attrs={
|
||||
'type': 'date',
|
||||
}
|
||||
)
|
||||
widget = forms.DateInput(attrs={'type': 'date'})
|
||||
|
||||
forms.DateField.__init__(
|
||||
self,
|
||||
@@ -156,7 +150,7 @@ class DatePickerFormField(forms.DateField):
|
||||
initial=initial,
|
||||
help_text=help_text,
|
||||
widget=widget,
|
||||
label=label
|
||||
label=label,
|
||||
)
|
||||
|
||||
|
||||
@@ -204,13 +198,13 @@ class RoundingDecimalField(models.DecimalField):
|
||||
|
||||
|
||||
class InvenTreeNotesField(models.TextField):
|
||||
"""Custom implementation of a 'notes' field"""
|
||||
"""Custom implementation of a 'notes' field."""
|
||||
|
||||
# Maximum character limit for the various 'notes' fields
|
||||
NOTES_MAX_LENGTH = 50000
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Configure default initial values for this field"""
|
||||
"""Configure default initial values for this field."""
|
||||
kwargs['max_length'] = self.NOTES_MAX_LENGTH
|
||||
kwargs['verbose_name'] = _('Notes')
|
||||
kwargs['blank'] = True
|
||||
|
||||
@@ -17,7 +17,6 @@ class InvenTreeDateFilter(rest_filters.DateFilter):
|
||||
|
||||
def filter(self, qs, value):
|
||||
"""Override the filter method to handle timezones correctly."""
|
||||
|
||||
if settings.USE_TZ:
|
||||
if value is not None:
|
||||
tz = timezone.get_current_timezone()
|
||||
@@ -28,7 +27,7 @@ class InvenTreeDateFilter(rest_filters.DateFilter):
|
||||
|
||||
|
||||
class InvenTreeSearchFilter(filters.SearchFilter):
|
||||
"""Custom search filter which allows adjusting of search terms dynamically"""
|
||||
"""Custom search filter which allows adjusting of search terms dynamically."""
|
||||
|
||||
def get_search_fields(self, view, request):
|
||||
"""Return a set of search fields for the request, adjusted based on request params.
|
||||
@@ -36,7 +35,9 @@ class InvenTreeSearchFilter(filters.SearchFilter):
|
||||
The following query params are available to 'augment' the search (in decreasing order of priority)
|
||||
- search_regex: If True, search is performed on 'regex' comparison
|
||||
"""
|
||||
regex = InvenTree.helpers.str2bool(request.query_params.get('search_regex', False))
|
||||
regex = InvenTree.helpers.str2bool(
|
||||
request.query_params.get('search_regex', False)
|
||||
)
|
||||
|
||||
search_fields = super().get_search_fields(view, request)
|
||||
|
||||
@@ -56,7 +57,9 @@ class InvenTreeSearchFilter(filters.SearchFilter):
|
||||
|
||||
Depending on the request parameters, we may "augment" these somewhat
|
||||
"""
|
||||
whole = InvenTree.helpers.str2bool(request.query_params.get('search_whole', False))
|
||||
whole = InvenTree.helpers.str2bool(
|
||||
request.query_params.get('search_whole', False)
|
||||
)
|
||||
|
||||
terms = []
|
||||
|
||||
@@ -72,7 +75,7 @@ class InvenTreeSearchFilter(filters.SearchFilter):
|
||||
|
||||
if whole:
|
||||
# Wrap the search term to enable word-boundary matching
|
||||
term = r"\y" + term + r"\y"
|
||||
term = r'\y' + term + r'\y'
|
||||
|
||||
terms.append(term)
|
||||
|
||||
@@ -110,7 +113,6 @@ class InvenTreeOrderingFilter(filters.OrderingFilter):
|
||||
ordering = []
|
||||
|
||||
for field in ordering_initial:
|
||||
|
||||
reverse = field.startswith('-')
|
||||
|
||||
if reverse:
|
||||
@@ -164,7 +166,4 @@ SEARCH_ORDER_FILTER_ALIAS = [
|
||||
InvenTreeOrderingFilter,
|
||||
]
|
||||
|
||||
ORDER_FILTER = [
|
||||
rest_filters.DjangoFilterBackend,
|
||||
filters.OrderingFilter,
|
||||
]
|
||||
ORDER_FILTER = [rest_filters.DjangoFilterBackend, filters.OrderingFilter]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Custom string formatting functions and helpers"""
|
||||
"""Custom string formatting functions and helpers."""
|
||||
|
||||
import re
|
||||
import string
|
||||
@@ -36,16 +36,13 @@ def parse_format_string(fmt_string: str) -> dict:
|
||||
else:
|
||||
seen_groups.add(name)
|
||||
|
||||
info[group[1]] = {
|
||||
'format': group[1],
|
||||
'prefix': group[0],
|
||||
}
|
||||
info[group[1]] = {'format': group[1], 'prefix': group[0]}
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def construct_format_regex(fmt_string: str) -> str:
|
||||
r"""Construct a regular expression based on a provided format string
|
||||
r"""Construct a regular expression based on a provided format string.
|
||||
|
||||
This function turns a python format string into a regular expression,
|
||||
which can be used for two purposes:
|
||||
@@ -67,7 +64,7 @@ def construct_format_regex(fmt_string: str) -> str:
|
||||
Raises:
|
||||
ValueError: Format string is invalid
|
||||
"""
|
||||
pattern = "^"
|
||||
pattern = '^'
|
||||
|
||||
for group in string.Formatter().parse(fmt_string):
|
||||
prefix = group[0] # Prefix (literal text appearing before this group)
|
||||
@@ -75,9 +72,23 @@ def construct_format_regex(fmt_string: str) -> str:
|
||||
format = group[2] # Format specifier e.g :04d
|
||||
|
||||
rep = [
|
||||
'+', '-', '.',
|
||||
'{', '}', '(', ')',
|
||||
'^', '$', '~', '!', '@', ':', ';', '|', '\'', '"',
|
||||
'+',
|
||||
'-',
|
||||
'.',
|
||||
'{',
|
||||
'}',
|
||||
'(',
|
||||
')',
|
||||
'^',
|
||||
'$',
|
||||
'~',
|
||||
'!',
|
||||
'@',
|
||||
':',
|
||||
';',
|
||||
'|',
|
||||
"'",
|
||||
'"',
|
||||
]
|
||||
|
||||
# Escape any special regex characters
|
||||
@@ -94,7 +105,6 @@ def construct_format_regex(fmt_string: str) -> str:
|
||||
|
||||
# Add a named capture group for the format entry
|
||||
if name:
|
||||
|
||||
# Check if integer values are required
|
||||
if format.endswith('d'):
|
||||
chr = '\d'
|
||||
@@ -105,9 +115,9 @@ def construct_format_regex(fmt_string: str) -> str:
|
||||
# TODO: Introspect required width
|
||||
w = '+'
|
||||
|
||||
pattern += f"(?P<{name}>{chr}{w})"
|
||||
pattern += f'(?P<{name}>{chr}{w})'
|
||||
|
||||
pattern += "$"
|
||||
pattern += '$'
|
||||
|
||||
return pattern
|
||||
|
||||
@@ -133,7 +143,7 @@ def validate_string(value: str, fmt_string: str) -> str:
|
||||
|
||||
|
||||
def extract_named_group(name: str, value: str, fmt_string: str) -> str:
|
||||
"""Extract a named value from the provided string, given the provided format string
|
||||
"""Extract a named value from the provided string, given the provided format string.
|
||||
|
||||
Args:
|
||||
name: Name of group to extract e.g. 'ref'
|
||||
@@ -161,19 +171,27 @@ def extract_named_group(name: str, value: str, fmt_string: str) -> str:
|
||||
result = re.match(pattern, value)
|
||||
|
||||
if not result:
|
||||
raise ValueError(_("Provided value does not match required pattern: ") + fmt_string)
|
||||
raise ValueError(
|
||||
_('Provided value does not match required pattern: ') + fmt_string
|
||||
)
|
||||
|
||||
# And return the value we are interested in
|
||||
# Note: This will raise an IndexError if the named group was not matched
|
||||
return result.group(name)
|
||||
|
||||
|
||||
def format_money(money: Money, decimal_places: int = None, format: str = None) -> str:
|
||||
"""Format money object according to the currently set local
|
||||
def format_money(
|
||||
money: Money,
|
||||
decimal_places: int = None,
|
||||
format: str = None,
|
||||
include_symbol: bool = True,
|
||||
) -> str:
|
||||
"""Format money object according to the currently set local.
|
||||
|
||||
Args:
|
||||
decimal_places: Number of decimal places to use
|
||||
format: Format pattern according LDML / the babel format pattern syntax (https://babel.pocoo.org/en/latest/numbers.html)
|
||||
money (Money): The money object to format
|
||||
decimal_places (int): Number of decimal places to use
|
||||
format (str): Format pattern according LDML / the babel format pattern syntax (https://babel.pocoo.org/en/latest/numbers.html)
|
||||
|
||||
Returns:
|
||||
str: The formatted string
|
||||
@@ -186,14 +204,16 @@ def format_money(money: Money, decimal_places: int = None, format: str = None) -
|
||||
if format:
|
||||
pattern = parse_pattern(format)
|
||||
else:
|
||||
pattern = locale.currency_formats["standard"]
|
||||
pattern = locale.currency_formats['standard']
|
||||
if decimal_places is not None:
|
||||
pattern.frac_prec = (decimal_places, decimal_places)
|
||||
|
||||
return pattern.apply(
|
||||
result = pattern.apply(
|
||||
money.amount,
|
||||
locale,
|
||||
currency=money.currency.code,
|
||||
currency=money.currency.code if include_symbol else '',
|
||||
currency_digits=decimal_places is None,
|
||||
decimal_quantization=decimal_places is not None,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -6,24 +6,24 @@ from urllib.parse import urlencode
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import Group, User
|
||||
from django.contrib.sites.models import Site
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from allauth.account.adapter import DefaultAccountAdapter
|
||||
from allauth.account.forms import LoginForm, SignupForm, set_form_field_order
|
||||
from allauth.exceptions import ImmediateHttpResponse
|
||||
from allauth.core.exceptions import ImmediateHttpResponse
|
||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
||||
from allauth_2fa.adapter import OTPAdapter
|
||||
from allauth_2fa.utils import user_has_valid_totp_device
|
||||
from crispy_forms.bootstrap import (AppendedText, PrependedAppendedText,
|
||||
PrependedText)
|
||||
from crispy_forms.bootstrap import AppendedText, PrependedAppendedText, PrependedText
|
||||
from crispy_forms.helper import FormHelper
|
||||
from crispy_forms.layout import Field, Layout
|
||||
from dj_rest_auth.registration.serializers import RegisterSerializer
|
||||
from rest_framework import serializers
|
||||
|
||||
import InvenTree.helpers_model
|
||||
import InvenTree.sso
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree.exceptions import log_error
|
||||
|
||||
@@ -79,31 +79,19 @@ class HelperForm(forms.ModelForm):
|
||||
field,
|
||||
prepended_text=prefix,
|
||||
appended_text=suffix,
|
||||
placeholder=placeholder
|
||||
placeholder=placeholder,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
elif prefix:
|
||||
layouts.append(
|
||||
Field(
|
||||
PrependedText(
|
||||
field,
|
||||
prefix,
|
||||
placeholder=placeholder
|
||||
)
|
||||
)
|
||||
Field(PrependedText(field, prefix, placeholder=placeholder))
|
||||
)
|
||||
|
||||
elif suffix:
|
||||
layouts.append(
|
||||
Field(
|
||||
AppendedText(
|
||||
field,
|
||||
suffix,
|
||||
placeholder=placeholder
|
||||
)
|
||||
)
|
||||
Field(AppendedText(field, suffix, placeholder=placeholder))
|
||||
)
|
||||
|
||||
else:
|
||||
@@ -119,10 +107,7 @@ class EditUserForm(HelperForm):
|
||||
"""Metaclass options."""
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
'first_name',
|
||||
'last_name',
|
||||
]
|
||||
fields = ['first_name', 'last_name']
|
||||
|
||||
|
||||
class SetPasswordForm(HelperForm):
|
||||
@@ -132,11 +117,7 @@ class SetPasswordForm(HelperForm):
|
||||
"""Metaclass options."""
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
'enter_password',
|
||||
'confirm_password',
|
||||
'old_password',
|
||||
]
|
||||
fields = ['enter_password', 'confirm_password', 'old_password']
|
||||
|
||||
enter_password = forms.CharField(
|
||||
max_length=100,
|
||||
@@ -145,7 +126,7 @@ class SetPasswordForm(HelperForm):
|
||||
initial='',
|
||||
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
|
||||
label=_('Enter password'),
|
||||
help_text=_('Enter new password')
|
||||
help_text=_('Enter new password'),
|
||||
)
|
||||
|
||||
confirm_password = forms.CharField(
|
||||
@@ -155,20 +136,22 @@ class SetPasswordForm(HelperForm):
|
||||
initial='',
|
||||
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
|
||||
label=_('Confirm password'),
|
||||
help_text=_('Confirm new password')
|
||||
help_text=_('Confirm new password'),
|
||||
)
|
||||
|
||||
old_password = forms.CharField(
|
||||
label=_("Old password"),
|
||||
label=_('Old password'),
|
||||
strip=False,
|
||||
required=False,
|
||||
widget=forms.PasswordInput(attrs={'autocomplete': 'current-password', 'autofocus': True}),
|
||||
widget=forms.PasswordInput(
|
||||
attrs={'autocomplete': 'current-password', 'autofocus': True}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# override allauth
|
||||
class CustomLoginForm(LoginForm):
|
||||
"""Custom login form to override default allauth behaviour"""
|
||||
"""Custom login form to override default allauth behaviour."""
|
||||
|
||||
def login(self, request, redirect_url=None):
|
||||
"""Perform login action.
|
||||
@@ -195,22 +178,24 @@ class CustomSignupForm(SignupForm):
|
||||
|
||||
# check for two mail fields
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
|
||||
self.fields["email2"] = forms.EmailField(
|
||||
label=_("Email (again)"),
|
||||
self.fields['email2'] = forms.EmailField(
|
||||
label=_('Email (again)'),
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"type": "email",
|
||||
"placeholder": _("Email address confirmation"),
|
||||
'type': 'email',
|
||||
'placeholder': _('Email address confirmation'),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# check for two password fields
|
||||
if not InvenTreeSetting.get_setting('LOGIN_SIGNUP_PWD_TWICE'):
|
||||
self.fields.pop("password2")
|
||||
self.fields.pop('password2')
|
||||
|
||||
# reorder fields
|
||||
set_form_field_order(self, ["username", "email", "email2", "password1", "password2", ])
|
||||
set_form_field_order(
|
||||
self, ['username', 'email', 'email2', 'password1', 'password2']
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
"""Make sure the supplied emails match if enabled in settings."""
|
||||
@@ -218,21 +203,26 @@ class CustomSignupForm(SignupForm):
|
||||
|
||||
# check for two mail fields
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
|
||||
email = cleaned_data.get("email")
|
||||
email2 = cleaned_data.get("email2")
|
||||
email = cleaned_data.get('email')
|
||||
email2 = cleaned_data.get('email2')
|
||||
if (email and email2) and email != email2:
|
||||
self.add_error("email2", _("You must type the same email each time."))
|
||||
self.add_error('email2', _('You must type the same email each time.'))
|
||||
|
||||
return cleaned_data
|
||||
|
||||
|
||||
def registration_enabled():
|
||||
"""Determine whether user registration is enabled."""
|
||||
if InvenTreeSetting.get_setting('LOGIN_ENABLE_REG') or InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO_REG'):
|
||||
if (
|
||||
InvenTreeSetting.get_setting('LOGIN_ENABLE_REG')
|
||||
or InvenTree.sso.registration_enabled()
|
||||
):
|
||||
if settings.EMAIL_HOST:
|
||||
return True
|
||||
else:
|
||||
logger.error("Registration cannot be enabled, because EMAIL_HOST is not configured.")
|
||||
logger.error(
|
||||
'Registration cannot be enabled, because EMAIL_HOST is not configured.'
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
@@ -250,20 +240,26 @@ class RegistratonMixin:
|
||||
|
||||
def clean_email(self, email):
|
||||
"""Check if the mail is valid to the pattern in LOGIN_SIGNUP_MAIL_RESTRICTION (if enabled in settings)."""
|
||||
mail_restriction = InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_RESTRICTION', None)
|
||||
mail_restriction = InvenTreeSetting.get_setting(
|
||||
'LOGIN_SIGNUP_MAIL_RESTRICTION', None
|
||||
)
|
||||
if not mail_restriction:
|
||||
return super().clean_email(email)
|
||||
|
||||
split_email = email.split('@')
|
||||
if len(split_email) != 2:
|
||||
logger.error('The user %s has an invalid email address', email)
|
||||
raise forms.ValidationError(_('The provided primary email address is not valid.'))
|
||||
raise forms.ValidationError(
|
||||
_('The provided primary email address is not valid.')
|
||||
)
|
||||
|
||||
mailoptions = mail_restriction.split(',')
|
||||
for option in mailoptions:
|
||||
if not option.startswith('@'):
|
||||
log_error('LOGIN_SIGNUP_MAIL_RESTRICTION is not configured correctly')
|
||||
raise forms.ValidationError(_('The provided primary email address is not valid.'))
|
||||
raise forms.ValidationError(
|
||||
_('The provided primary email address is not valid.')
|
||||
)
|
||||
else:
|
||||
if split_email[1] == option[1:]:
|
||||
return super().clean_email(email)
|
||||
@@ -283,7 +279,10 @@ class RegistratonMixin:
|
||||
group = Group.objects.get(id=start_group)
|
||||
user.groups.add(group)
|
||||
except Group.DoesNotExist:
|
||||
logger.exception('The setting `SIGNUP_GROUP` contains an non existent group', start_group)
|
||||
logger.exception(
|
||||
'The setting `SIGNUP_GROUP` contains an non existent group',
|
||||
start_group,
|
||||
)
|
||||
user.save()
|
||||
return user
|
||||
|
||||
@@ -293,11 +292,14 @@ class CustomUrlMixin:
|
||||
|
||||
def get_email_confirmation_url(self, request, emailconfirmation):
|
||||
"""Custom email confirmation (activation) url."""
|
||||
url = reverse("account_confirm_email", args=[emailconfirmation.key])
|
||||
return Site.objects.get_current().domain + url
|
||||
url = reverse('account_confirm_email', args=[emailconfirmation.key])
|
||||
|
||||
return InvenTree.helpers_model.construct_absolute_url(url)
|
||||
|
||||
|
||||
class CustomAccountAdapter(CustomUrlMixin, RegistratonMixin, OTPAdapter, DefaultAccountAdapter):
|
||||
class CustomAccountAdapter(
|
||||
CustomUrlMixin, RegistratonMixin, OTPAdapter, DefaultAccountAdapter
|
||||
):
|
||||
"""Override of adapter to use dynamic settings."""
|
||||
|
||||
def send_mail(self, template_prefix, email, context):
|
||||
@@ -316,7 +318,7 @@ class CustomAccountAdapter(CustomUrlMixin, RegistratonMixin, OTPAdapter, Default
|
||||
return False
|
||||
|
||||
def get_email_confirmation_url(self, request, emailconfirmation):
|
||||
"""Construct the email confirmation url"""
|
||||
"""Construct the email confirmation url."""
|
||||
from InvenTree.helpers_model import construct_absolute_url
|
||||
|
||||
url = super().get_email_confirmation_url(request, emailconfirmation)
|
||||
@@ -324,7 +326,9 @@ class CustomAccountAdapter(CustomUrlMixin, RegistratonMixin, OTPAdapter, Default
|
||||
return url
|
||||
|
||||
|
||||
class CustomSocialAccountAdapter(CustomUrlMixin, RegistratonMixin, DefaultSocialAccountAdapter):
|
||||
class CustomSocialAccountAdapter(
|
||||
CustomUrlMixin, RegistratonMixin, DefaultSocialAccountAdapter
|
||||
):
|
||||
"""Override of adapter to use dynamic settings."""
|
||||
|
||||
def is_auto_signup_allowed(self, request, sociallogin):
|
||||
@@ -351,17 +355,32 @@ class CustomSocialAccountAdapter(CustomUrlMixin, RegistratonMixin, DefaultSocial
|
||||
if request.GET:
|
||||
redirect_url += '?' + urlencode(request.GET)
|
||||
|
||||
raise ImmediateHttpResponse(
|
||||
response=HttpResponseRedirect(redirect_url)
|
||||
)
|
||||
raise ImmediateHttpResponse(response=HttpResponseRedirect(redirect_url))
|
||||
|
||||
# Otherwise defer to the original allauth adapter.
|
||||
return super().login(request, user)
|
||||
|
||||
def authentication_error(
|
||||
self, request, provider_id, error=None, exception=None, extra_context=None
|
||||
):
|
||||
"""Callback method for authentication errors."""
|
||||
if not error:
|
||||
error = request.GET.get('error', None)
|
||||
|
||||
if not exception:
|
||||
exception = request.GET.get('error_description', None)
|
||||
|
||||
path = request.path or 'sso'
|
||||
|
||||
# Log the error to the database
|
||||
log_error(path, error_name=error, error_data=exception)
|
||||
logger.error("SSO error for provider '%s' - check admin error log", provider_id)
|
||||
|
||||
|
||||
# override dj-rest-auth
|
||||
class CustomRegisterSerializer(RegisterSerializer):
|
||||
"""Override of serializer to use dynamic settings."""
|
||||
|
||||
email = serializers.EmailField()
|
||||
|
||||
def __init__(self, instance=None, data=..., **kwargs):
|
||||
|
||||
@@ -8,6 +8,7 @@ import os
|
||||
import os.path
|
||||
import re
|
||||
from decimal import Decimal, InvalidOperation
|
||||
from typing import TypeVar
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
from django.conf import settings
|
||||
@@ -30,16 +31,81 @@ from .settings import MEDIA_URL, STATIC_URL
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def generateTestKey(test_name):
|
||||
def extract_int(reference, clip=0x7FFFFFFF, allow_negative=False):
|
||||
"""Extract an integer out of reference."""
|
||||
# Default value if we cannot convert to an integer
|
||||
ref_int = 0
|
||||
|
||||
reference = str(reference).strip()
|
||||
|
||||
# Ignore empty string
|
||||
if len(reference) == 0:
|
||||
return 0
|
||||
|
||||
# Look at the start of the string - can it be "integerized"?
|
||||
result = re.match(r'^(\d+)', reference)
|
||||
|
||||
if result and len(result.groups()) == 1:
|
||||
ref = result.groups()[0]
|
||||
try:
|
||||
ref_int = int(ref)
|
||||
except Exception:
|
||||
ref_int = 0
|
||||
else:
|
||||
# Look at the "end" of the string
|
||||
result = re.search(r'(\d+)$', reference)
|
||||
|
||||
if result and len(result.groups()) == 1:
|
||||
ref = result.groups()[0]
|
||||
try:
|
||||
ref_int = int(ref)
|
||||
except Exception:
|
||||
ref_int = 0
|
||||
|
||||
# Ensure that the returned values are within the range that can be stored in an IntegerField
|
||||
# Note: This will result in large values being "clipped"
|
||||
if clip is not None:
|
||||
if ref_int > clip:
|
||||
ref_int = clip
|
||||
elif ref_int < -clip:
|
||||
ref_int = -clip
|
||||
|
||||
if not allow_negative and ref_int < 0:
|
||||
ref_int = abs(ref_int)
|
||||
|
||||
return ref_int
|
||||
|
||||
|
||||
def generateTestKey(test_name: str) -> str:
|
||||
"""Generate a test 'key' for a given test name. This must not have illegal chars as it will be used for dict lookup in a template.
|
||||
|
||||
Tests must be named such that they will have unique keys.
|
||||
"""
|
||||
if test_name is None:
|
||||
test_name = ''
|
||||
|
||||
key = test_name.strip().lower()
|
||||
key = key.replace(" ", "")
|
||||
key = key.replace(' ', '')
|
||||
|
||||
def valid_char(char: str):
|
||||
"""Determine if a particular character is valid for use in a test key."""
|
||||
if not char.isprintable():
|
||||
return False
|
||||
|
||||
if char.isidentifier():
|
||||
return True
|
||||
|
||||
if char.isalnum():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# Remove any characters that cannot be used to represent a variable
|
||||
key = re.sub(r'[^a-zA-Z0-9]', '', key)
|
||||
key = ''.join([c for c in key if valid_char(c)])
|
||||
|
||||
# If the key starts with a non-identifier character, prefix with an underscore
|
||||
if len(key) > 0 and not key[0].isidentifier():
|
||||
key = '_' + key
|
||||
|
||||
return key
|
||||
|
||||
@@ -56,7 +122,7 @@ def constructPathString(path, max_chars=250):
|
||||
# Replace middle elements to limit the pathstring
|
||||
if len(pathstring) > max_chars:
|
||||
n = int(max_chars / 2 - 2)
|
||||
pathstring = pathstring[:n] + "..." + pathstring[-n:]
|
||||
pathstring = pathstring[:n] + '...' + pathstring[-n:]
|
||||
|
||||
return pathstring
|
||||
|
||||
@@ -82,19 +148,18 @@ def TestIfImage(img):
|
||||
|
||||
def getBlankImage():
|
||||
"""Return the qualified path for the 'blank image' placeholder."""
|
||||
return getStaticUrl("img/blank_image.png")
|
||||
return getStaticUrl('img/blank_image.png')
|
||||
|
||||
|
||||
def getBlankThumbnail():
|
||||
"""Return the qualified path for the 'blank image' thumbnail placeholder."""
|
||||
return getStaticUrl("img/blank_image.thumbnail.png")
|
||||
return getStaticUrl('img/blank_image.thumbnail.png')
|
||||
|
||||
|
||||
def getLogoImage(as_file=False, custom=True):
|
||||
"""Return the InvenTree logo image, or a custom logo if available."""
|
||||
"""Return the path to the logo-file."""
|
||||
if custom and settings.CUSTOM_LOGO:
|
||||
|
||||
static_storage = StaticFilesStorage()
|
||||
|
||||
if static_storage.exists(settings.CUSTOM_LOGO):
|
||||
@@ -106,27 +171,26 @@ def getLogoImage(as_file=False, custom=True):
|
||||
|
||||
if storage is not None:
|
||||
if as_file:
|
||||
return f"file://{storage.path(settings.CUSTOM_LOGO)}"
|
||||
return f'file://{storage.path(settings.CUSTOM_LOGO)}'
|
||||
return storage.url(settings.CUSTOM_LOGO)
|
||||
|
||||
# If we have got to this point, return the default logo
|
||||
if as_file:
|
||||
path = settings.STATIC_ROOT.joinpath('img/inventree.png')
|
||||
return f"file://{path}"
|
||||
return f'file://{path}'
|
||||
return getStaticUrl('img/inventree.png')
|
||||
|
||||
|
||||
def getSplashScreen(custom=True):
|
||||
"""Return the InvenTree splash screen, or a custom splash if available"""
|
||||
"""Return the InvenTree splash screen, or a custom splash if available."""
|
||||
static_storage = StaticFilesStorage()
|
||||
|
||||
if custom and settings.CUSTOM_SPLASH:
|
||||
|
||||
if static_storage.exists(settings.CUSTOM_SPLASH):
|
||||
return static_storage.url(settings.CUSTOM_SPLASH)
|
||||
|
||||
# No custom splash screen
|
||||
return static_storage.url("img/inventree_splash.jpg")
|
||||
return static_storage.url('img/inventree_splash.jpg')
|
||||
|
||||
|
||||
def TestIfImageURL(url):
|
||||
@@ -135,10 +199,15 @@ def TestIfImageURL(url):
|
||||
Simply tests the extension against a set of allowed values
|
||||
"""
|
||||
return os.path.splitext(os.path.basename(url))[-1].lower() in [
|
||||
'.jpg', '.jpeg', '.j2k',
|
||||
'.png', '.bmp',
|
||||
'.tif', '.tiff',
|
||||
'.webp', '.gif',
|
||||
'.jpg',
|
||||
'.jpeg',
|
||||
'.j2k',
|
||||
'.png',
|
||||
'.bmp',
|
||||
'.tif',
|
||||
'.tiff',
|
||||
'.webp',
|
||||
'.gif',
|
||||
]
|
||||
|
||||
|
||||
@@ -153,12 +222,12 @@ def str2bool(text, test=True):
|
||||
True if the text looks like the selected boolean value
|
||||
"""
|
||||
if test:
|
||||
return str(text).lower() in ['1', 'y', 'yes', 't', 'true', 'ok', 'on', ]
|
||||
return str(text).lower() in ['0', 'n', 'no', 'none', 'f', 'false', 'off', ]
|
||||
return str(text).lower() in ['1', 'y', 'yes', 't', 'true', 'ok', 'on']
|
||||
return str(text).lower() in ['0', 'n', 'no', 'none', 'f', 'false', 'off']
|
||||
|
||||
|
||||
def str2int(text, default=None):
|
||||
"""Convert a string to int if possible
|
||||
"""Convert a string to int if possible.
|
||||
|
||||
Args:
|
||||
text: Int like string
|
||||
@@ -191,7 +260,15 @@ def isNull(text):
|
||||
Returns:
|
||||
True if the text looks like a null value
|
||||
"""
|
||||
return str(text).strip().lower() in ['top', 'null', 'none', 'empty', 'false', '-1', '']
|
||||
return str(text).strip().lower() in [
|
||||
'top',
|
||||
'null',
|
||||
'none',
|
||||
'empty',
|
||||
'false',
|
||||
'-1',
|
||||
'',
|
||||
]
|
||||
|
||||
|
||||
def normalize(d):
|
||||
@@ -223,7 +300,7 @@ def increment(value):
|
||||
# Provide a default value if provided with a null input
|
||||
return '1'
|
||||
|
||||
pattern = r"(.*?)(\d+)?$"
|
||||
pattern = r'(.*?)(\d+)?$'
|
||||
|
||||
result = re.search(pattern, value)
|
||||
|
||||
@@ -282,7 +359,7 @@ def decimal2string(d):
|
||||
if '.' not in s:
|
||||
return s
|
||||
|
||||
return s.rstrip("0").rstrip(".")
|
||||
return s.rstrip('0').rstrip('.')
|
||||
|
||||
|
||||
def decimal2money(d, currency=None):
|
||||
@@ -323,10 +400,9 @@ def MakeBarcode(cls_name, object_pk: int, object_data=None, **kwargs):
|
||||
"""Generate a string for a barcode. Adds some global InvenTree parameters.
|
||||
|
||||
Args:
|
||||
object_type: string describing the object type e.g. 'StockItem'
|
||||
object_id: ID (Primary Key) of the object in the database
|
||||
object_url: url for JSON API detail view of the object
|
||||
data: Python dict object containing extra data which will be rendered to string (must only contain stringable values)
|
||||
cls_name: string describing the object type e.g. 'StockItem'
|
||||
object_pk (int): ID (Primary Key) of the object in the database
|
||||
object_data: Python dict object containing extra data which will be rendered to string (must only contain stringable values)
|
||||
|
||||
Returns:
|
||||
json string of the supplied data plus some other data
|
||||
@@ -354,17 +430,12 @@ def MakeBarcode(cls_name, object_pk: int, object_data=None, **kwargs):
|
||||
|
||||
def GetExportFormats():
|
||||
"""Return a list of allowable file formats for exporting data."""
|
||||
return [
|
||||
'csv',
|
||||
'tsv',
|
||||
'xls',
|
||||
'xlsx',
|
||||
'json',
|
||||
'yaml',
|
||||
]
|
||||
return ['csv', 'tsv', 'xls', 'xlsx', 'json', 'yaml']
|
||||
|
||||
|
||||
def DownloadFile(data, filename, content_type='application/text', inline=False) -> StreamingHttpResponse:
|
||||
def DownloadFile(
|
||||
data, filename, content_type='application/text', inline=False
|
||||
) -> StreamingHttpResponse:
|
||||
"""Create a dynamic file for the user to download.
|
||||
|
||||
Args:
|
||||
@@ -389,10 +460,12 @@ def DownloadFile(data, filename, content_type='application/text', inline=False)
|
||||
length = len(bytes(data, response.charset))
|
||||
response['Content-Length'] = length
|
||||
|
||||
disposition = "inline" if inline else "attachment"
|
||||
|
||||
response['Content-Disposition'] = f'{disposition}; filename={filename}'
|
||||
if inline:
|
||||
disposition = f'inline; filename={filename}'
|
||||
else:
|
||||
disposition = f'attachment; filename={filename}'
|
||||
|
||||
response['Content-Disposition'] = disposition
|
||||
return response
|
||||
|
||||
|
||||
@@ -449,7 +522,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
try:
|
||||
expected_quantity = int(expected_quantity)
|
||||
except ValueError:
|
||||
raise ValidationError([_("Invalid quantity provided")])
|
||||
raise ValidationError([_('Invalid quantity provided')])
|
||||
|
||||
if input_string:
|
||||
input_string = str(input_string).strip()
|
||||
@@ -457,7 +530,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
input_string = ''
|
||||
|
||||
if len(input_string) == 0:
|
||||
raise ValidationError([_("Empty serial number string")])
|
||||
raise ValidationError([_('Empty serial number string')])
|
||||
|
||||
next_value = increment_serial_number(starting_value)
|
||||
|
||||
@@ -467,19 +540,18 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
next_value = increment_serial_number(next_value)
|
||||
|
||||
# Split input string by whitespace or comma (,) characters
|
||||
groups = re.split(r"[\s,]+", input_string)
|
||||
groups = re.split(r'[\s,]+', input_string)
|
||||
|
||||
serials = []
|
||||
errors = []
|
||||
|
||||
def add_error(error: str):
|
||||
"""Helper function for adding an error message"""
|
||||
"""Helper function for adding an error message."""
|
||||
if error not in errors:
|
||||
errors.append(error)
|
||||
|
||||
def add_serial(serial):
|
||||
"""Helper function to check for duplicated values"""
|
||||
|
||||
"""Helper function to check for duplicated values."""
|
||||
serial = serial.strip()
|
||||
|
||||
# Ignore blank / empty serials
|
||||
@@ -487,7 +559,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
return
|
||||
|
||||
if serial in serials:
|
||||
add_error(_("Duplicate serial") + f": {serial}")
|
||||
add_error(_('Duplicate serial') + f': {serial}')
|
||||
else:
|
||||
serials.append(serial)
|
||||
|
||||
@@ -502,7 +574,6 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
return serials
|
||||
|
||||
for group in groups:
|
||||
|
||||
# Calculate the "remaining" quantity of serial numbers
|
||||
remaining = expected_quantity - len(serials)
|
||||
|
||||
@@ -520,7 +591,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
|
||||
if a == b:
|
||||
# Invalid group
|
||||
add_error(_(f"Invalid group range: {group}"))
|
||||
add_error(_(f'Invalid group range: {group}'))
|
||||
continue
|
||||
|
||||
group_items = []
|
||||
@@ -549,13 +620,21 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
break
|
||||
|
||||
if len(group_items) > remaining:
|
||||
add_error(_(f"Group range {group} exceeds allowed quantity ({expected_quantity})"))
|
||||
elif len(group_items) > 0 and group_items[0] == a and group_items[-1] == b:
|
||||
add_error(
|
||||
_(
|
||||
f'Group range {group} exceeds allowed quantity ({expected_quantity})'
|
||||
)
|
||||
)
|
||||
elif (
|
||||
len(group_items) > 0
|
||||
and group_items[0] == a
|
||||
and group_items[-1] == b
|
||||
):
|
||||
# In this case, the range extraction looks like it has worked
|
||||
for item in group_items:
|
||||
add_serial(item)
|
||||
else:
|
||||
add_error(_(f"Invalid group range: {group}"))
|
||||
add_error(_(f'Invalid group range: {group}'))
|
||||
|
||||
else:
|
||||
# In the case of a different number of hyphens, simply add the entire group
|
||||
@@ -573,20 +652,24 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
sequence_count = max(0, expected_quantity - len(serials))
|
||||
|
||||
if len(items) > 2 or len(items) == 0:
|
||||
add_error(_(f"Invalid group sequence: {group}"))
|
||||
add_error(_(f'Invalid group sequence: {group}'))
|
||||
continue
|
||||
elif len(items) == 2:
|
||||
try:
|
||||
if items[1]:
|
||||
sequence_count = int(items[1]) + 1
|
||||
except ValueError:
|
||||
add_error(_(f"Invalid group sequence: {group}"))
|
||||
add_error(_(f'Invalid group sequence: {group}'))
|
||||
continue
|
||||
|
||||
value = items[0]
|
||||
|
||||
# Keep incrementing up to the specified quantity
|
||||
while value is not None and value not in sequence_items and counter < sequence_count:
|
||||
while (
|
||||
value is not None
|
||||
and value not in sequence_items
|
||||
and counter < sequence_count
|
||||
):
|
||||
sequence_items.append(value)
|
||||
value = increment_serial_number(value)
|
||||
counter += 1
|
||||
@@ -595,7 +678,7 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
for item in sequence_items:
|
||||
add_serial(item)
|
||||
else:
|
||||
add_error(_(f"Invalid group sequence: {group}"))
|
||||
add_error(_(f'Invalid group sequence: {group}'))
|
||||
|
||||
else:
|
||||
# At this point, we assume that the 'group' is just a single serial value
|
||||
@@ -605,10 +688,14 @@ def extract_serial_numbers(input_string, expected_quantity: int, starting_value=
|
||||
raise ValidationError(errors)
|
||||
|
||||
if len(serials) == 0:
|
||||
raise ValidationError([_("No serial numbers found")])
|
||||
raise ValidationError([_('No serial numbers found')])
|
||||
|
||||
if len(errors) == 0 and len(serials) != expected_quantity:
|
||||
raise ValidationError([_(f"Number of unique serial numbers ({len(serials)}) must match quantity ({expected_quantity})")])
|
||||
raise ValidationError([
|
||||
_(
|
||||
f'Number of unique serial numbers ({len(serials)}) must match quantity ({expected_quantity})'
|
||||
)
|
||||
])
|
||||
|
||||
return serials
|
||||
|
||||
@@ -645,9 +732,7 @@ def validateFilterString(value, model=None):
|
||||
pair = group.split('=')
|
||||
|
||||
if len(pair) != 2:
|
||||
raise ValidationError(
|
||||
f"Invalid group: {group}"
|
||||
)
|
||||
raise ValidationError(f'Invalid group: {group}')
|
||||
|
||||
k, v = pair
|
||||
|
||||
@@ -655,9 +740,7 @@ def validateFilterString(value, model=None):
|
||||
v = v.strip()
|
||||
|
||||
if not k or not v:
|
||||
raise ValidationError(
|
||||
f"Invalid group: {group}"
|
||||
)
|
||||
raise ValidationError(f'Invalid group: {group}')
|
||||
|
||||
results[k] = v
|
||||
|
||||
@@ -666,9 +749,7 @@ def validateFilterString(value, model=None):
|
||||
try:
|
||||
model.objects.filter(**results)
|
||||
except FieldError as e:
|
||||
raise ValidationError(
|
||||
str(e),
|
||||
)
|
||||
raise ValidationError(str(e))
|
||||
|
||||
return results
|
||||
|
||||
@@ -706,7 +787,11 @@ def clean_decimal(number):
|
||||
# Number cannot be converted to Decimal (eg. a string containing letters)
|
||||
return Decimal(0)
|
||||
|
||||
return clean_number.quantize(Decimal(1)) if clean_number == clean_number.to_integral() else clean_number.normalize()
|
||||
return (
|
||||
clean_number.quantize(Decimal(1))
|
||||
if clean_number == clean_number.to_integral()
|
||||
else clean_number.normalize()
|
||||
)
|
||||
|
||||
|
||||
def strip_html_tags(value: str, raise_error=True, field_name=None):
|
||||
@@ -714,55 +799,45 @@ def strip_html_tags(value: str, raise_error=True, field_name=None):
|
||||
|
||||
If raise_error is True, a ValidationError will be thrown if HTML tags are detected
|
||||
"""
|
||||
cleaned = clean(
|
||||
value,
|
||||
strip=True,
|
||||
tags=[],
|
||||
attributes=[],
|
||||
)
|
||||
cleaned = clean(value, strip=True, tags=[], attributes=[])
|
||||
|
||||
# Add escaped characters back in
|
||||
replacements = {
|
||||
'>': '>',
|
||||
'<': '<',
|
||||
'&': '&',
|
||||
}
|
||||
replacements = {'>': '>', '<': '<', '&': '&'}
|
||||
|
||||
for o, r in replacements.items():
|
||||
cleaned = cleaned.replace(o, r)
|
||||
|
||||
# If the length changed, it means that HTML tags were removed!
|
||||
if len(cleaned) != len(value) and raise_error:
|
||||
|
||||
field = field_name or 'non_field_errors'
|
||||
|
||||
raise ValidationError({
|
||||
field: [_("Remove HTML tags from this value")]
|
||||
})
|
||||
raise ValidationError({field: [_('Remove HTML tags from this value')]})
|
||||
|
||||
return cleaned
|
||||
|
||||
|
||||
def remove_non_printable_characters(value: str, remove_newline=True, remove_ascii=True, remove_unicode=True):
|
||||
"""Remove non-printable / control characters from the provided string"""
|
||||
def remove_non_printable_characters(
|
||||
value: str, remove_newline=True, remove_ascii=True, remove_unicode=True
|
||||
):
|
||||
"""Remove non-printable / control characters from the provided string."""
|
||||
cleaned = value
|
||||
|
||||
if remove_ascii:
|
||||
# Remove ASCII control characters
|
||||
# Note that we do not sub out 0x0A (\n) here, it is done separately below
|
||||
cleaned = regex.sub(u'[\x00-\x09]+', '', cleaned)
|
||||
cleaned = regex.sub(u'[\x0b-\x1F\x7F]+', '', cleaned)
|
||||
cleaned = regex.sub('[\x00-\x09]+', '', cleaned)
|
||||
cleaned = regex.sub('[\x0b-\x1f\x7f]+', '', cleaned)
|
||||
|
||||
if remove_newline:
|
||||
cleaned = regex.sub(u'[\x0a]+', '', cleaned)
|
||||
cleaned = regex.sub('[\x0a]+', '', cleaned)
|
||||
|
||||
if remove_unicode:
|
||||
# Remove Unicode control characters
|
||||
if remove_newline:
|
||||
cleaned = regex.sub(u'[^\P{C}]+', '', cleaned)
|
||||
cleaned = regex.sub('[^\P{C}]+', '', cleaned)
|
||||
else:
|
||||
# Use 'negative-lookahead' to exclude newline character
|
||||
cleaned = regex.sub(u'(?![\x0A])[^\P{C}]+', '', cleaned)
|
||||
cleaned = regex.sub('(?![\x0a])[^\P{C}]+', '', cleaned)
|
||||
|
||||
return cleaned
|
||||
|
||||
@@ -783,7 +858,14 @@ def hash_barcode(barcode_data):
|
||||
return str(hash.hexdigest())
|
||||
|
||||
|
||||
def get_objectreference(obj, type_ref: str = 'content_type', object_ref: str = 'object_id'):
|
||||
def hash_file(filename: str):
|
||||
"""Return the MD5 hash of a file."""
|
||||
return hashlib.md5(open(filename, 'rb').read()).hexdigest()
|
||||
|
||||
|
||||
def get_objectreference(
|
||||
obj, type_ref: str = 'content_type', object_ref: str = 'object_id'
|
||||
):
|
||||
"""Lookup method for the GenericForeignKey fields.
|
||||
|
||||
Attributes:
|
||||
@@ -821,14 +903,13 @@ def get_objectreference(obj, type_ref: str = 'content_type', object_ref: str = '
|
||||
ret = {}
|
||||
if url_fnc:
|
||||
ret['link'] = url_fnc()
|
||||
return {
|
||||
'name': str(item),
|
||||
'model': str(model_cls._meta.verbose_name),
|
||||
**ret
|
||||
}
|
||||
return {'name': str(item), 'model': str(model_cls._meta.verbose_name), **ret}
|
||||
|
||||
|
||||
def inheritors(cls):
|
||||
Inheritors_T = TypeVar('Inheritors_T')
|
||||
|
||||
|
||||
def inheritors(cls: type[Inheritors_T]) -> set[type[Inheritors_T]]:
|
||||
"""Return all classes that are subclasses from the supplied cls."""
|
||||
subcls = set()
|
||||
work = [cls]
|
||||
@@ -840,3 +921,15 @@ def inheritors(cls):
|
||||
subcls.add(child)
|
||||
work.append(child)
|
||||
return subcls
|
||||
|
||||
|
||||
def is_ajax(request):
|
||||
"""Check if the current request is an AJAX request."""
|
||||
return request.headers.get('x-requested-with') == 'XMLHttpRequest'
|
||||
|
||||
|
||||
def pui_url(subpath: str) -> str:
|
||||
"""Return the URL for a PUI subpath."""
|
||||
if not subpath.startswith('/'):
|
||||
subpath = '/' + subpath
|
||||
return f'/{settings.FRONTEND_URL_BASE}{subpath}'
|
||||
|
||||
106
InvenTree/InvenTree/helpers_mixin.py
Normal file
106
InvenTree/InvenTree/helpers_mixin.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""Provides helper mixins that are used throughout the InvenTree project."""
|
||||
|
||||
import inspect
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from plugin import registry as plg_registry
|
||||
|
||||
|
||||
class ClassValidationMixin:
|
||||
"""Mixin to validate class attributes and overrides.
|
||||
|
||||
Class attributes:
|
||||
required_attributes: List of class attributes that need to be defined
|
||||
required_overrides: List of functions that need override, a nested list mean either one of them needs an override
|
||||
|
||||
Example:
|
||||
```py
|
||||
class Parent(ClassValidationMixin):
|
||||
NAME: str
|
||||
def test(self):
|
||||
pass
|
||||
|
||||
required_attributes = ["NAME"]
|
||||
required_overrides = [test]
|
||||
|
||||
class MyClass(Parent):
|
||||
pass
|
||||
|
||||
myClass = MyClass()
|
||||
myClass.validate() # raises NotImplementedError
|
||||
```
|
||||
"""
|
||||
|
||||
required_attributes = []
|
||||
required_overrides = []
|
||||
|
||||
@classmethod
|
||||
def validate(cls):
|
||||
"""Validate the class against the required attributes/overrides."""
|
||||
|
||||
def attribute_missing(key):
|
||||
"""Check if attribute is missing."""
|
||||
return not hasattr(cls, key) or getattr(cls, key) == ''
|
||||
|
||||
def override_missing(base_implementation):
|
||||
"""Check if override is missing."""
|
||||
if isinstance(base_implementation, list):
|
||||
return all(override_missing(x) for x in base_implementation)
|
||||
|
||||
return base_implementation == getattr(
|
||||
cls, base_implementation.__name__, None
|
||||
)
|
||||
|
||||
missing_attributes = list(filter(attribute_missing, cls.required_attributes))
|
||||
missing_overrides = list(filter(override_missing, cls.required_overrides))
|
||||
|
||||
errors = []
|
||||
|
||||
if len(missing_attributes) > 0:
|
||||
errors.append(
|
||||
f"did not provide the following attributes: {', '.join(missing_attributes)}"
|
||||
)
|
||||
if len(missing_overrides) > 0:
|
||||
missing_overrides_list = []
|
||||
for base_implementation in missing_overrides:
|
||||
if isinstance(base_implementation, list):
|
||||
missing_overrides_list.append(
|
||||
'one of '
|
||||
+ ' or '.join(attr.__name__ for attr in base_implementation)
|
||||
)
|
||||
else:
|
||||
missing_overrides_list.append(base_implementation.__name__)
|
||||
errors.append(
|
||||
f"did not override the required attributes: {', '.join(missing_overrides_list)}"
|
||||
)
|
||||
|
||||
if len(errors) > 0:
|
||||
raise NotImplementedError(f"'{cls}' " + ' and '.join(errors))
|
||||
|
||||
|
||||
class ClassProviderMixin:
|
||||
"""Mixin to get metadata about a class itself, e.g. the plugin that provided that class."""
|
||||
|
||||
@classmethod
|
||||
def get_provider_file(cls):
|
||||
"""File that contains the Class definition."""
|
||||
return inspect.getfile(cls)
|
||||
|
||||
@classmethod
|
||||
def get_provider_plugin(cls):
|
||||
"""Plugin that contains the Class definition, otherwise None."""
|
||||
for plg in plg_registry.plugins.values():
|
||||
if plg.package_path == cls.__module__:
|
||||
return plg
|
||||
|
||||
@classmethod
|
||||
def get_is_builtin(cls):
|
||||
"""Is this Class build in the Inventree source code?"""
|
||||
try:
|
||||
Path(cls.get_provider_file()).relative_to(settings.BASE_DIR)
|
||||
return True
|
||||
except ValueError:
|
||||
# Path(...).relative_to throws an ValueError if its not relative to the InvenTree source base dir
|
||||
return False
|
||||
@@ -19,8 +19,11 @@ import common.models
|
||||
import InvenTree
|
||||
import InvenTree.helpers_model
|
||||
import InvenTree.version
|
||||
from common.notifications import (InvenTreeNotificationBodies,
|
||||
NotificationBody, trigger_notification)
|
||||
from common.notifications import (
|
||||
InvenTreeNotificationBodies,
|
||||
NotificationBody,
|
||||
trigger_notification,
|
||||
)
|
||||
from InvenTree.format import format_money
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
@@ -31,45 +34,59 @@ def getSetting(key, backup_value=None):
|
||||
return common.models.InvenTreeSetting.get_setting(key, backup_value=backup_value)
|
||||
|
||||
|
||||
def construct_absolute_url(*arg, **kwargs):
|
||||
def get_base_url(request=None):
|
||||
"""Return the base URL for the InvenTree server.
|
||||
|
||||
The base URL is determined in the following order of decreasing priority:
|
||||
|
||||
1. If a request object is provided, use the request URL
|
||||
2. Multi-site is enabled, and the current site has a valid URL
|
||||
3. If settings.SITE_URL is set (e.g. in the Django settings), use that
|
||||
4. If the InvenTree setting INVENTREE_BASE_URL is set, use that
|
||||
"""
|
||||
# Check if a request is provided
|
||||
if request:
|
||||
return request.build_absolute_uri('/')
|
||||
|
||||
# Check if multi-site is enabled
|
||||
try:
|
||||
from django.contrib.sites.models import Site
|
||||
|
||||
return Site.objects.get_current().domain
|
||||
except (ImportError, RuntimeError):
|
||||
pass
|
||||
|
||||
# Check if a global site URL is provided
|
||||
if site_url := getattr(settings, 'SITE_URL', None):
|
||||
return site_url
|
||||
|
||||
# Check if a global InvenTree setting is provided
|
||||
try:
|
||||
if site_url := common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_BASE_URL', create=False, cache=False
|
||||
):
|
||||
return site_url
|
||||
except (ProgrammingError, OperationalError):
|
||||
pass
|
||||
|
||||
# No base URL available
|
||||
return ''
|
||||
|
||||
|
||||
def construct_absolute_url(*arg, base_url=None, request=None):
|
||||
"""Construct (or attempt to construct) an absolute URL from a relative URL.
|
||||
|
||||
This is useful when (for example) sending an email to a user with a link
|
||||
to something in the InvenTree web framework.
|
||||
A URL is constructed in the following order:
|
||||
1. If settings.SITE_URL is set (e.g. in the Django settings), use that
|
||||
2. If the InvenTree setting INVENTREE_BASE_URL is set, use that
|
||||
3. Otherwise, use the current request URL (if available)
|
||||
Args:
|
||||
*arg: The relative URL to construct
|
||||
base_url: The base URL to use for the construction (if not provided, will attempt to determine from settings)
|
||||
request: The request object to use for the construction (optional)
|
||||
"""
|
||||
relative_url = '/'.join(arg)
|
||||
|
||||
# If a site URL is provided, use that
|
||||
site_url = getattr(settings, 'SITE_URL', None)
|
||||
if not base_url:
|
||||
base_url = get_base_url(request=request)
|
||||
|
||||
if not site_url:
|
||||
# Otherwise, try to use the InvenTree setting
|
||||
try:
|
||||
site_url = common.models.InvenTreeSetting.get_setting('INVENTREE_BASE_URL', create=False, cache=False)
|
||||
except (ProgrammingError, OperationalError):
|
||||
pass
|
||||
|
||||
if not site_url:
|
||||
# Otherwise, try to use the current request
|
||||
request = kwargs.get('request', None)
|
||||
|
||||
if request:
|
||||
site_url = request.build_absolute_uri('/')
|
||||
|
||||
if not site_url:
|
||||
# No site URL available, return the relative URL
|
||||
return relative_url
|
||||
|
||||
return urljoin(site_url, relative_url)
|
||||
|
||||
|
||||
def get_base_url(**kwargs):
|
||||
"""Return the base URL for the InvenTree server"""
|
||||
return construct_absolute_url('', **kwargs)
|
||||
return urljoin(base_url, relative_url)
|
||||
|
||||
|
||||
def download_image_from_url(remote_url, timeout=2.5):
|
||||
@@ -100,12 +117,22 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
validator(remote_url)
|
||||
|
||||
# Calculate maximum allowable image size (in bytes)
|
||||
max_size = int(common.models.InvenTreeSetting.get_setting('INVENTREE_DOWNLOAD_IMAGE_MAX_SIZE')) * 1024 * 1024
|
||||
max_size = (
|
||||
int(
|
||||
common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_DOWNLOAD_IMAGE_MAX_SIZE'
|
||||
)
|
||||
)
|
||||
* 1024
|
||||
* 1024
|
||||
)
|
||||
|
||||
# Add user specified user-agent to request (if specified)
|
||||
user_agent = common.models.InvenTreeSetting.get_setting('INVENTREE_DOWNLOAD_FROM_URL_USER_AGENT')
|
||||
user_agent = common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_DOWNLOAD_FROM_URL_USER_AGENT'
|
||||
)
|
||||
if user_agent:
|
||||
headers = {"User-Agent": user_agent}
|
||||
headers = {'User-Agent': user_agent}
|
||||
else:
|
||||
headers = None
|
||||
|
||||
@@ -120,24 +147,28 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
# Throw an error if anything goes wrong
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
raise Exception(_("Connection error") + f": {str(exc)}")
|
||||
raise Exception(_('Connection error') + f': {str(exc)}')
|
||||
except requests.exceptions.Timeout as exc:
|
||||
raise exc
|
||||
except requests.exceptions.HTTPError:
|
||||
raise requests.exceptions.HTTPError(_("Server responded with invalid status code") + f": {response.status_code}")
|
||||
raise requests.exceptions.HTTPError(
|
||||
_('Server responded with invalid status code') + f': {response.status_code}'
|
||||
)
|
||||
except Exception as exc:
|
||||
raise Exception(_("Exception occurred") + f": {str(exc)}")
|
||||
raise Exception(_('Exception occurred') + f': {str(exc)}')
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(_("Server responded with invalid status code") + f": {response.status_code}")
|
||||
raise Exception(
|
||||
_('Server responded with invalid status code') + f': {response.status_code}'
|
||||
)
|
||||
|
||||
try:
|
||||
content_length = int(response.headers.get('Content-Length', 0))
|
||||
except ValueError:
|
||||
raise ValueError(_("Server responded with invalid Content-Length value"))
|
||||
raise ValueError(_('Server responded with invalid Content-Length value'))
|
||||
|
||||
if content_length > max_size:
|
||||
raise ValueError(_("Image size is too large"))
|
||||
raise ValueError(_('Image size is too large'))
|
||||
|
||||
# Download the file, ensuring we do not exceed the reported size
|
||||
file = io.BytesIO()
|
||||
@@ -149,12 +180,12 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
dl_size += len(chunk)
|
||||
|
||||
if dl_size > max_size:
|
||||
raise ValueError(_("Image download exceeded maximum size"))
|
||||
raise ValueError(_('Image download exceeded maximum size'))
|
||||
|
||||
file.write(chunk)
|
||||
|
||||
if dl_size == 0:
|
||||
raise ValueError(_("Remote server returned empty response"))
|
||||
raise ValueError(_('Remote server returned empty response'))
|
||||
|
||||
# Now, attempt to convert the downloaded data to a valid image file
|
||||
# img.verify() will throw an exception if the image is not valid
|
||||
@@ -162,13 +193,20 @@ def download_image_from_url(remote_url, timeout=2.5):
|
||||
img = Image.open(file).convert()
|
||||
img.verify()
|
||||
except Exception:
|
||||
raise TypeError(_("Supplied URL is not a valid image file"))
|
||||
raise TypeError(_('Supplied URL is not a valid image file'))
|
||||
|
||||
return img
|
||||
|
||||
|
||||
def render_currency(money, decimal_places=None, currency=None, min_decimal_places=None, max_decimal_places=None):
|
||||
"""Render a currency / Money object to a formatted string (e.g. for reports)
|
||||
def render_currency(
|
||||
money,
|
||||
decimal_places=None,
|
||||
currency=None,
|
||||
min_decimal_places=None,
|
||||
max_decimal_places=None,
|
||||
include_symbol=True,
|
||||
):
|
||||
"""Render a currency / Money object to a formatted string (e.g. for reports).
|
||||
|
||||
Arguments:
|
||||
money: The Money instance to be rendered
|
||||
@@ -176,6 +214,7 @@ def render_currency(money, decimal_places=None, currency=None, min_decimal_place
|
||||
currency: Optionally convert to the specified currency
|
||||
min_decimal_places: The minimum number of decimal places to render to. If unspecified, uses the PRICING_DECIMAL_PLACES_MIN setting.
|
||||
max_decimal_places: The maximum number of decimal places to render to. If unspecified, uses the PRICING_DECIMAL_PLACES setting.
|
||||
include_symbol: If True, include the currency symbol in the output
|
||||
"""
|
||||
if money in [None, '']:
|
||||
return '-'
|
||||
@@ -192,13 +231,19 @@ def render_currency(money, decimal_places=None, currency=None, min_decimal_place
|
||||
pass
|
||||
|
||||
if decimal_places is None:
|
||||
decimal_places = common.models.InvenTreeSetting.get_setting('PRICING_DECIMAL_PLACES', 6)
|
||||
decimal_places = common.models.InvenTreeSetting.get_setting(
|
||||
'PRICING_DECIMAL_PLACES', 6
|
||||
)
|
||||
|
||||
if min_decimal_places is None:
|
||||
min_decimal_places = common.models.InvenTreeSetting.get_setting('PRICING_DECIMAL_PLACES_MIN', 0)
|
||||
min_decimal_places = common.models.InvenTreeSetting.get_setting(
|
||||
'PRICING_DECIMAL_PLACES_MIN', 0
|
||||
)
|
||||
|
||||
if max_decimal_places is None:
|
||||
max_decimal_places = common.models.InvenTreeSetting.get_setting('PRICING_DECIMAL_PLACES', 6)
|
||||
max_decimal_places = common.models.InvenTreeSetting.get_setting(
|
||||
'PRICING_DECIMAL_PLACES', 6
|
||||
)
|
||||
|
||||
value = Decimal(str(money.amount)).normalize()
|
||||
value = str(value)
|
||||
@@ -215,7 +260,9 @@ def render_currency(money, decimal_places=None, currency=None, min_decimal_place
|
||||
|
||||
decimal_places = max(decimal_places, max_decimal_places)
|
||||
|
||||
return format_money(money, decimal_places=decimal_places)
|
||||
return format_money(
|
||||
money, decimal_places=decimal_places, include_symbol=include_symbol
|
||||
)
|
||||
|
||||
|
||||
def getModelsWithMixin(mixin_class) -> list:
|
||||
@@ -229,7 +276,9 @@ def getModelsWithMixin(mixin_class) -> list:
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
try:
|
||||
db_models = [x.model_class() for x in ContentType.objects.all() if x is not None]
|
||||
db_models = [
|
||||
x.model_class() for x in ContentType.objects.all() if x is not None
|
||||
]
|
||||
except (OperationalError, ProgrammingError):
|
||||
# Database is likely not yet ready
|
||||
db_models = []
|
||||
@@ -237,7 +286,12 @@ def getModelsWithMixin(mixin_class) -> list:
|
||||
return [x for x in db_models if x is not None and issubclass(x, mixin_class)]
|
||||
|
||||
|
||||
def notify_responsible(instance, sender, content: NotificationBody = InvenTreeNotificationBodies.NewOrder, exclude=None):
|
||||
def notify_responsible(
|
||||
instance,
|
||||
sender,
|
||||
content: NotificationBody = InvenTreeNotificationBodies.NewOrder,
|
||||
exclude=None,
|
||||
):
|
||||
"""Notify all responsible parties of a change in an instance.
|
||||
|
||||
Parses the supplied content with the provided instance and sender and sends a notification to all responsible users,
|
||||
@@ -249,10 +303,23 @@ def notify_responsible(instance, sender, content: NotificationBody = InvenTreeNo
|
||||
content (NotificationBody, optional): _description_. Defaults to InvenTreeNotificationBodies.NewOrder.
|
||||
exclude (User, optional): User instance that should be excluded. Defaults to None.
|
||||
"""
|
||||
notify_users([instance.responsible], instance, sender, content=content, exclude=exclude)
|
||||
import InvenTree.ready
|
||||
|
||||
if InvenTree.ready.isImportingData() or InvenTree.ready.isRunningMigrations():
|
||||
return
|
||||
|
||||
notify_users(
|
||||
[instance.responsible], instance, sender, content=content, exclude=exclude
|
||||
)
|
||||
|
||||
|
||||
def notify_users(users, instance, sender, content: NotificationBody = InvenTreeNotificationBodies.NewOrder, exclude=None):
|
||||
def notify_users(
|
||||
users,
|
||||
instance,
|
||||
sender,
|
||||
content: NotificationBody = InvenTreeNotificationBodies.NewOrder,
|
||||
exclude=None,
|
||||
):
|
||||
"""Notify all passed users or groups.
|
||||
|
||||
Parses the supplied content with the provided instance and sender and sends a notification to all users,
|
||||
@@ -278,10 +345,10 @@ def notify_users(users, instance, sender, content: NotificationBody = InvenTreeN
|
||||
'instance': instance,
|
||||
'name': content.name.format(**content_context),
|
||||
'message': content.message.format(**content_context),
|
||||
'link': InvenTree.helpers_model.construct_absolute_url(instance.get_absolute_url()),
|
||||
'template': {
|
||||
'subject': content.name.format(**content_context),
|
||||
}
|
||||
'link': InvenTree.helpers_model.construct_absolute_url(
|
||||
instance.get_absolute_url()
|
||||
),
|
||||
'template': {'subject': content.name.format(**content_context)},
|
||||
}
|
||||
|
||||
if content.template:
|
||||
|
||||
48
InvenTree/InvenTree/locales.py
Normal file
48
InvenTree/InvenTree/locales.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Support translation locales for InvenTree.
|
||||
|
||||
If a new language translation is supported, it must be added here
|
||||
After adding a new language, run the following command:
|
||||
python manage.py makemessages -l <language_code> -e html,js,py --no-wrap
|
||||
where <language_code> is the code for the new language
|
||||
Additionally, update the following files with the new locale code:
|
||||
|
||||
- /src/frontend/.linguirc file
|
||||
- /src/frontend/src/context/LanguageContext.tsx
|
||||
"""
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
LOCALES = [
|
||||
('bg', _('Bulgarian')),
|
||||
('cs', _('Czech')),
|
||||
('da', _('Danish')),
|
||||
('de', _('German')),
|
||||
('el', _('Greek')),
|
||||
('en', _('English')),
|
||||
('es', _('Spanish')),
|
||||
('es-mx', _('Spanish (Mexican)')),
|
||||
('fa', _('Farsi / Persian')),
|
||||
('fi', _('Finnish')),
|
||||
('fr', _('French')),
|
||||
('he', _('Hebrew')),
|
||||
('hi', _('Hindi')),
|
||||
('hu', _('Hungarian')),
|
||||
('it', _('Italian')),
|
||||
('ja', _('Japanese')),
|
||||
('ko', _('Korean')),
|
||||
('nl', _('Dutch')),
|
||||
('no', _('Norwegian')),
|
||||
('pl', _('Polish')),
|
||||
('pt', _('Portuguese')),
|
||||
('pt-br', _('Portuguese (Brazilian)')),
|
||||
('ru', _('Russian')),
|
||||
('sk', _('Slovak')),
|
||||
('sl', _('Slovenian')),
|
||||
('sr', _('Serbian')),
|
||||
('sv', _('Swedish')),
|
||||
('th', _('Thai')),
|
||||
('tr', _('Turkish')),
|
||||
('vi', _('Vietnamese')),
|
||||
('zh-hans', _('Chinese (Simplified)')),
|
||||
('zh-hant', _('Chinese (Traditional)')),
|
||||
]
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Functions for magic login."""
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.sites.models import Site
|
||||
from django.core.mail import send_mail
|
||||
from django.template.loader import render_to_string
|
||||
from django.urls import reverse
|
||||
@@ -9,23 +9,23 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import sesame.utils
|
||||
from rest_framework import serializers
|
||||
from rest_framework.generics import GenericAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
import InvenTree.version
|
||||
|
||||
|
||||
def send_simple_login_email(user, link):
|
||||
"""Send an email with the login link to this user."""
|
||||
site = Site.objects.get_current()
|
||||
site_name = InvenTree.version.inventreeInstanceName()
|
||||
|
||||
context = {
|
||||
"username": user.username,
|
||||
"site_name": site.name,
|
||||
"link": link,
|
||||
}
|
||||
email_plaintext_message = render_to_string("InvenTree/user_simple_login.txt", context)
|
||||
context = {'username': user.username, 'site_name': site_name, 'link': link}
|
||||
email_plaintext_message = render_to_string(
|
||||
'InvenTree/user_simple_login.txt', context
|
||||
)
|
||||
|
||||
send_mail(
|
||||
_(f"[{site.name}] Log in to the app"),
|
||||
_(f'[{site_name}] Log in to the app'),
|
||||
email_plaintext_message,
|
||||
settings.DEFAULT_FROM_EMAIL,
|
||||
[user.email],
|
||||
@@ -35,10 +35,10 @@ def send_simple_login_email(user, link):
|
||||
class GetSimpleLoginSerializer(serializers.Serializer):
|
||||
"""Serializer for the simple login view."""
|
||||
|
||||
email = serializers.CharField(label=_("Email"))
|
||||
email = serializers.CharField(label=_('Email'))
|
||||
|
||||
|
||||
class GetSimpleLoginView(APIView):
|
||||
class GetSimpleLoginView(GenericAPIView):
|
||||
"""View to send a simple login link."""
|
||||
|
||||
permission_classes = ()
|
||||
@@ -48,14 +48,14 @@ class GetSimpleLoginView(APIView):
|
||||
"""Get the token for the current user or fail."""
|
||||
serializer = self.serializer_class(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.email_submitted(email=serializer.data["email"])
|
||||
return Response({"status": "ok"})
|
||||
self.email_submitted(email=serializer.data['email'])
|
||||
return Response({'status': 'ok'})
|
||||
|
||||
def email_submitted(self, email):
|
||||
"""Notify user about link."""
|
||||
user = self.get_user(email)
|
||||
if user is None:
|
||||
print("user not found:", email)
|
||||
print('user not found:', email)
|
||||
return
|
||||
link = self.create_link(user)
|
||||
send_simple_login_email(user, link)
|
||||
@@ -69,7 +69,7 @@ class GetSimpleLoginView(APIView):
|
||||
|
||||
def create_link(self, user):
|
||||
"""Create a login link for this user."""
|
||||
link = reverse("sesame-login")
|
||||
link = reverse('sesame-login')
|
||||
link = self.request.build_absolute_uri(link)
|
||||
link += sesame.utils.get_query_string(user)
|
||||
return link
|
||||
|
||||
19
InvenTree/InvenTree/management/commands/check_migrations.py
Normal file
19
InvenTree/InvenTree/management/commands/check_migrations.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Check if there are any pending database migrations, and run them."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from InvenTree.tasks import check_for_migrations
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Check if there are any pending database migrations, and run them."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Check for any pending database migrations."""
|
||||
logger.info('Checking for pending database migrations')
|
||||
check_for_migrations(force=True, reload_registry=False)
|
||||
logger.info('Database migrations complete')
|
||||
@@ -12,7 +12,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Cleanup old (undefined) settings in the database."""
|
||||
logger.info("Collecting settings")
|
||||
logger.info('Collecting settings')
|
||||
from common.models import InvenTreeSetting, InvenTreeUserSetting
|
||||
|
||||
# general settings
|
||||
@@ -35,4 +35,4 @@ class Command(BaseCommand):
|
||||
setting.delete()
|
||||
logger.info("deleted user setting '%s'", setting.key)
|
||||
|
||||
logger.info("checked all settings")
|
||||
logger.info('checked all settings')
|
||||
|
||||
@@ -13,7 +13,6 @@ from django.utils.translation import override as lang_over
|
||||
def render_file(file_name, source, target, locales, ctx):
|
||||
"""Renders a file into all provided locales."""
|
||||
for locale in locales:
|
||||
|
||||
# Enforce lower-case for locale names
|
||||
locale = locale.lower()
|
||||
locale = locale.replace('_', '-')
|
||||
@@ -50,15 +49,18 @@ class Command(BaseCommand):
|
||||
# render!
|
||||
request = HttpRequest()
|
||||
ctx = {}
|
||||
processors = tuple(import_string(path) for path in settings.STATFILES_I18_PROCESSORS)
|
||||
processors = tuple(
|
||||
import_string(path) for path in settings.STATFILES_I18_PROCESSORS
|
||||
)
|
||||
for processor in processors:
|
||||
ctx.update(processor(request))
|
||||
|
||||
for file in os.listdir(SOURCE_DIR, ):
|
||||
for file in os.listdir(SOURCE_DIR):
|
||||
path = os.path.join(SOURCE_DIR, file)
|
||||
if os.path.exists(path) and os.path.isfile(path):
|
||||
print(f"render {file}")
|
||||
render_file(file, SOURCE_DIR, TARGET_DIR, locales, ctx)
|
||||
else:
|
||||
raise NotImplementedError('Using multi-level directories is not implemented at this point') # TODO multilevel dir if needed
|
||||
print(f"rendered all files in {SOURCE_DIR}")
|
||||
raise NotImplementedError(
|
||||
'Using multi-level directories is not implemented at this point'
|
||||
) # TODO multilevel dir if needed
|
||||
print(f'Rendered all files in {SOURCE_DIR}')
|
||||
|
||||
@@ -3,55 +3,73 @@
|
||||
- This is crucial after importing any fixtures, etc
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from maintenance_mode.core import maintenance_mode_on, set_maintenance_mode
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Rebuild all database models which leverage the MPTT structure."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Rebuild all database models which leverage the MPTT structure."""
|
||||
with maintenance_mode_on():
|
||||
self.rebuild_models()
|
||||
|
||||
set_maintenance_mode(False)
|
||||
|
||||
def rebuild_models(self):
|
||||
"""Rebuild all MPTT models in the database."""
|
||||
# Part model
|
||||
try:
|
||||
print("Rebuilding Part objects")
|
||||
logger.info('Rebuilding Part objects')
|
||||
|
||||
from part.models import Part
|
||||
|
||||
Part.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding Part objects")
|
||||
logger.info('Error rebuilding Part objects')
|
||||
|
||||
# Part category
|
||||
try:
|
||||
print("Rebuilding PartCategory objects")
|
||||
logger.info('Rebuilding PartCategory objects')
|
||||
|
||||
from part.models import PartCategory
|
||||
|
||||
PartCategory.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding PartCategory objects")
|
||||
logger.info('Error rebuilding PartCategory objects')
|
||||
|
||||
# StockItem model
|
||||
try:
|
||||
print("Rebuilding StockItem objects")
|
||||
logger.info('Rebuilding StockItem objects')
|
||||
|
||||
from stock.models import StockItem
|
||||
|
||||
StockItem.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding StockItem objects")
|
||||
logger.info('Error rebuilding StockItem objects')
|
||||
|
||||
# StockLocation model
|
||||
try:
|
||||
print("Rebuilding StockLocation objects")
|
||||
logger.info('Rebuilding StockLocation objects')
|
||||
|
||||
from stock.models import StockLocation
|
||||
|
||||
StockLocation.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding StockLocation objects")
|
||||
logger.info('Error rebuilding StockLocation objects')
|
||||
|
||||
# Build model
|
||||
try:
|
||||
print("Rebuilding Build objects")
|
||||
logger.info('Rebuilding Build objects')
|
||||
|
||||
from build.models import Build
|
||||
|
||||
Build.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding Build objects")
|
||||
logger.info('Error rebuilding Build objects')
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.utils import OperationalError, ProgrammingError
|
||||
@@ -26,6 +27,18 @@ class Command(BaseCommand):
|
||||
|
||||
img = model.image
|
||||
|
||||
# Check for image paths
|
||||
img_paths = []
|
||||
|
||||
for x in [model.image, model.image.thumbnail, model.image.preview]:
|
||||
if x and x.path:
|
||||
img_paths.append(x.path)
|
||||
|
||||
if len(img_paths) > 0:
|
||||
if all((os.path.exists(path) for path in img_paths)):
|
||||
# All images exist - skip further work
|
||||
return
|
||||
|
||||
logger.info("Generating thumbnail image for '%s'", img)
|
||||
|
||||
try:
|
||||
@@ -37,20 +50,20 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Rebuild all thumbnail images."""
|
||||
logger.info("Rebuilding Part thumbnails")
|
||||
logger.info('Rebuilding Part thumbnails')
|
||||
|
||||
for part in Part.objects.exclude(image=None):
|
||||
try:
|
||||
self.rebuild_thumbnail(part)
|
||||
except (OperationalError, ProgrammingError):
|
||||
logger.exception("ERROR: Database read error.")
|
||||
logger.exception('ERROR: Database read error.')
|
||||
break
|
||||
|
||||
logger.info("Rebuilding Company thumbnails")
|
||||
logger.info('Rebuilding Company thumbnails')
|
||||
|
||||
for company in Company.objects.exclude(image=None):
|
||||
try:
|
||||
self.rebuild_thumbnail(company)
|
||||
except (OperationalError, ProgrammingError):
|
||||
logger.exception("ERROR: abase read error.")
|
||||
logger.exception('ERROR: abase read error.')
|
||||
break
|
||||
|
||||
@@ -18,7 +18,12 @@ class Command(BaseCommand):
|
||||
if not mail:
|
||||
raise KeyError('A mail is required')
|
||||
user = get_user_model()
|
||||
mfa_user = [*set(user.objects.filter(email=mail) | user.objects.filter(emailaddress__email=mail))]
|
||||
mfa_user = [
|
||||
*set(
|
||||
user.objects.filter(email=mail)
|
||||
| user.objects.filter(emailaddress__email=mail)
|
||||
)
|
||||
]
|
||||
|
||||
if len(mfa_user) == 0:
|
||||
print('No user with this mail associated')
|
||||
|
||||
19
InvenTree/InvenTree/management/commands/runmigrations.py
Normal file
19
InvenTree/InvenTree/management/commands/runmigrations.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Check if there are any pending database migrations, and run them."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from InvenTree.tasks import check_for_migrations
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Check if there are any pending database migrations, and run them."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Check for any pending database migrations."""
|
||||
logger.info('Checking for pending database migrations')
|
||||
check_for_migrations(force=True, reload_registry=False)
|
||||
logger.info('Database migrations complete')
|
||||
@@ -12,12 +12,11 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Wait till the database is ready."""
|
||||
self.stdout.write("Waiting for database...")
|
||||
self.stdout.write('Waiting for database...')
|
||||
|
||||
connected = False
|
||||
|
||||
while not connected:
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
try:
|
||||
@@ -26,12 +25,12 @@ class Command(BaseCommand):
|
||||
connected = True
|
||||
|
||||
except OperationalError as e:
|
||||
self.stdout.write(f"Could not connect to database: {e}")
|
||||
self.stdout.write(f'Could not connect to database: {e}')
|
||||
except ImproperlyConfigured as e:
|
||||
self.stdout.write(f"Improperly configured: {e}")
|
||||
self.stdout.write(f'Improperly configured: {e}')
|
||||
else:
|
||||
if not connection.is_usable():
|
||||
self.stdout.write("Database configuration is not usable")
|
||||
self.stdout.write('Database configuration is not usable')
|
||||
|
||||
if connected:
|
||||
self.stdout.write("Database connection successful!")
|
||||
self.stdout.write('Database connection successful!')
|
||||
|
||||
@@ -7,6 +7,7 @@ from rest_framework.fields import empty
|
||||
from rest_framework.metadata import SimpleMetadata
|
||||
from rest_framework.utils import model_meta
|
||||
|
||||
import common.models
|
||||
import InvenTree.permissions
|
||||
import users.models
|
||||
from InvenTree.helpers import str2bool
|
||||
@@ -45,8 +46,9 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
context = {}
|
||||
|
||||
if str2bool(request.query_params.get('context', False)):
|
||||
|
||||
if hasattr(self, 'serializer') and hasattr(self.serializer, 'get_context_data'):
|
||||
if hasattr(self, 'serializer') and hasattr(
|
||||
self.serializer, 'get_context_data'
|
||||
):
|
||||
context = self.serializer.get_context_data()
|
||||
|
||||
metadata['context'] = context
|
||||
@@ -68,7 +70,7 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
|
||||
metadata['model'] = tbl_label
|
||||
|
||||
table = f"{app_label}_{tbl_label}"
|
||||
table = f'{app_label}_{tbl_label}'
|
||||
|
||||
actions = metadata.get('actions', None)
|
||||
|
||||
@@ -86,12 +88,11 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
}
|
||||
|
||||
# let the view define a custom rolemap
|
||||
if hasattr(view, "rolemap"):
|
||||
if hasattr(view, 'rolemap'):
|
||||
rolemap.update(view.rolemap)
|
||||
|
||||
# Remove any HTTP methods that the user does not have permission for
|
||||
for method, permission in rolemap.items():
|
||||
|
||||
result = check(user, table, permission)
|
||||
|
||||
if method in actions and not result:
|
||||
@@ -123,10 +124,7 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
model_class = None
|
||||
|
||||
# Attributes to copy extra attributes from the model to the field (if they don't exist)
|
||||
extra_attributes = [
|
||||
'help_text',
|
||||
'max_length',
|
||||
]
|
||||
extra_attributes = ['help_text', 'max_length']
|
||||
|
||||
try:
|
||||
model_class = serializer.Meta.model
|
||||
@@ -142,11 +140,8 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
|
||||
# Iterate through simple fields
|
||||
for name, field in model_fields.fields.items():
|
||||
|
||||
if name in serializer_info.keys():
|
||||
|
||||
if field.has_default():
|
||||
|
||||
default = field.default
|
||||
|
||||
if callable(default):
|
||||
@@ -162,13 +157,11 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
|
||||
for attr in extra_attributes:
|
||||
if attr not in serializer_info[name]:
|
||||
|
||||
if hasattr(field, attr):
|
||||
serializer_info[name][attr] = getattr(field, attr)
|
||||
|
||||
# Iterate through relations
|
||||
for name, relation in model_fields.relations.items():
|
||||
|
||||
if name not in serializer_info.keys():
|
||||
# Skip relation not defined in serializer
|
||||
continue
|
||||
@@ -179,11 +172,17 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
|
||||
# Extract and provide the "limit_choices_to" filters
|
||||
# This is used to automatically filter AJAX requests
|
||||
serializer_info[name]['filters'] = relation.model_field.get_limit_choices_to()
|
||||
serializer_info[name]['filters'] = (
|
||||
relation.model_field.get_limit_choices_to()
|
||||
)
|
||||
|
||||
for attr in extra_attributes:
|
||||
if attr not in serializer_info[name] and hasattr(relation.model_field, attr):
|
||||
serializer_info[name][attr] = getattr(relation.model_field, attr)
|
||||
if attr not in serializer_info[name] and hasattr(
|
||||
relation.model_field, attr
|
||||
):
|
||||
serializer_info[name][attr] = getattr(
|
||||
relation.model_field, attr
|
||||
)
|
||||
|
||||
if name in model_default_values:
|
||||
serializer_info[name]['default'] = model_default_values[name]
|
||||
@@ -210,7 +209,10 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
pk = kwargs[field]
|
||||
break
|
||||
|
||||
if pk is not None:
|
||||
if issubclass(model_class, common.models.BaseInvenTreeSetting):
|
||||
instance = model_class.get_setting_object(**kwargs, create=False)
|
||||
|
||||
elif pk is not None:
|
||||
try:
|
||||
instance = model_class.objects.get(pk=pk)
|
||||
except (ValueError, model_class.DoesNotExist):
|
||||
@@ -220,11 +222,9 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
"""If there is an instance associated with this API View, introspect that instance to find any specific API info."""
|
||||
|
||||
if hasattr(instance, 'api_instance_filters'):
|
||||
|
||||
instance_filters = instance.api_instance_filters()
|
||||
|
||||
for field_name, field_filters in instance_filters.items():
|
||||
|
||||
if field_name not in serializer_info.keys():
|
||||
# The field might be missing, but is added later on
|
||||
# This function seems to get called multiple times?
|
||||
@@ -256,17 +256,20 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
|
||||
# Force non-nullable fields to read as "required"
|
||||
# (even if there is a default value!)
|
||||
if not field.allow_null and not (hasattr(field, 'allow_blank') and field.allow_blank):
|
||||
if not field.allow_null and not (
|
||||
hasattr(field, 'allow_blank') and field.allow_blank
|
||||
):
|
||||
field_info['required'] = True
|
||||
|
||||
# Introspect writable related fields
|
||||
if field_info['type'] == 'field' and not field_info['read_only']:
|
||||
|
||||
# If the field is a PrimaryKeyRelatedField, we can extract the model from the queryset
|
||||
if isinstance(field, serializers.PrimaryKeyRelatedField):
|
||||
model = field.queryset.model
|
||||
else:
|
||||
logger.debug("Could not extract model for:", field_info.get('label'), '->', field)
|
||||
logger.debug(
|
||||
'Could not extract model for:', field_info.get('label'), '->', field
|
||||
)
|
||||
model = None
|
||||
|
||||
if model:
|
||||
@@ -287,4 +290,4 @@ class InvenTreeMetadata(SimpleMetadata):
|
||||
return field_info
|
||||
|
||||
|
||||
InvenTreeMetadata.label_lookup[DependentField] = "dependent field"
|
||||
InvenTreeMetadata.label_lookup[DependentField] = 'dependent field'
|
||||
|
||||
@@ -7,16 +7,32 @@ from django.conf import settings
|
||||
from django.contrib.auth.middleware import PersistentRemoteUserMiddleware
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import redirect
|
||||
from django.urls import Resolver404, include, re_path, resolve, reverse_lazy
|
||||
from django.urls import Resolver404, include, path, resolve, reverse_lazy
|
||||
|
||||
from allauth_2fa.middleware import (AllauthTwoFactorMiddleware,
|
||||
BaseRequire2FAMiddleware)
|
||||
from allauth_2fa.middleware import AllauthTwoFactorMiddleware, BaseRequire2FAMiddleware
|
||||
from error_report.middleware import ExceptionProcessor
|
||||
|
||||
from InvenTree.urls import frontendpatterns
|
||||
from users.models import ApiToken
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def get_token_from_request(request):
|
||||
"""Extract token information from a request object."""
|
||||
auth_keys = ['Authorization', 'authorization']
|
||||
token_keys = ['token', 'bearer']
|
||||
|
||||
for k in auth_keys:
|
||||
if auth_header := request.headers.get(k, None):
|
||||
auth_header = auth_header.strip().lower().split()
|
||||
|
||||
if len(auth_header) > 1:
|
||||
if auth_header[0].strip().lower().replace(':', '') in token_keys:
|
||||
token = auth_header[1]
|
||||
return token
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class AuthRequiredMiddleware(object):
|
||||
@@ -26,6 +42,22 @@ class AuthRequiredMiddleware(object):
|
||||
"""Save response object."""
|
||||
self.get_response = get_response
|
||||
|
||||
def check_token(self, request) -> bool:
|
||||
"""Check if the user is authenticated via token."""
|
||||
if token := get_token_from_request(request):
|
||||
# Does the provided token match a valid user?
|
||||
try:
|
||||
token = ApiToken.objects.get(key=token)
|
||||
|
||||
if token.active and token.user:
|
||||
# Provide the user information to the request
|
||||
request.user = token.user
|
||||
return True
|
||||
except ApiToken.DoesNotExist:
|
||||
logger.warning('Access denied for unknown token %s', token)
|
||||
|
||||
return False
|
||||
|
||||
def __call__(self, request):
|
||||
"""Check if user needs to be authenticated and is.
|
||||
|
||||
@@ -64,27 +96,15 @@ class AuthRequiredMiddleware(object):
|
||||
elif request.path_info.startswith('/accounts/'):
|
||||
authorized = True
|
||||
|
||||
elif request.path_info.startswith(f'/{settings.FRONTEND_URL_BASE}/') or request.path_info.startswith('/assets/') or request.path_info == f'/{settings.FRONTEND_URL_BASE}':
|
||||
elif (
|
||||
request.path_info.startswith(f'/{settings.FRONTEND_URL_BASE}/')
|
||||
or request.path_info.startswith('/assets/')
|
||||
or request.path_info == f'/{settings.FRONTEND_URL_BASE}'
|
||||
):
|
||||
authorized = True
|
||||
|
||||
elif 'Authorization' in request.headers.keys() or 'authorization' in request.headers.keys():
|
||||
auth = request.headers.get('Authorization', request.headers.get('authorization')).strip()
|
||||
|
||||
if auth.lower().startswith('token') and len(auth.split()) == 2:
|
||||
token_key = auth.split()[1]
|
||||
|
||||
# Does the provided token match a valid user?
|
||||
try:
|
||||
token = ApiToken.objects.get(key=token_key)
|
||||
|
||||
if token.active and token.user:
|
||||
|
||||
# Provide the user information to the request
|
||||
request.user = token.user
|
||||
authorized = True
|
||||
|
||||
except ApiToken.DoesNotExist:
|
||||
logger.warning("Access denied for unknown token %s", token_key)
|
||||
elif self.check_token(request):
|
||||
authorized = True
|
||||
|
||||
# No authorization was found for the request
|
||||
if not authorized:
|
||||
@@ -99,17 +119,16 @@ class AuthRequiredMiddleware(object):
|
||||
]
|
||||
|
||||
# Do not redirect requests to any of these paths
|
||||
paths_ignore = [
|
||||
'/api/',
|
||||
'/js/',
|
||||
'/media/',
|
||||
'/static/',
|
||||
]
|
||||
paths_ignore = ['/api/', '/js/', '/media/', '/static/']
|
||||
|
||||
if path not in urls and not any(path.startswith(p) for p in paths_ignore):
|
||||
if path not in urls and not any(
|
||||
path.startswith(p) for p in paths_ignore
|
||||
):
|
||||
# Save the 'next' parameter to pass through to the login view
|
||||
|
||||
return redirect(f'{reverse_lazy("account_login")}?next={request.path}')
|
||||
return redirect(
|
||||
f'{reverse_lazy("account_login")}?next={request.path}'
|
||||
)
|
||||
# Return a 401 (Unauthorized) response code for this request
|
||||
return HttpResponse('Unauthorized', status=401)
|
||||
|
||||
@@ -118,11 +137,12 @@ class AuthRequiredMiddleware(object):
|
||||
return response
|
||||
|
||||
|
||||
url_matcher = re_path('', include(frontendpatterns))
|
||||
url_matcher = path('', include(frontendpatterns))
|
||||
|
||||
|
||||
class Check2FAMiddleware(BaseRequire2FAMiddleware):
|
||||
"""Check if user is required to have MFA enabled."""
|
||||
|
||||
def require_2fa(self, request):
|
||||
"""Use setting to check if MFA should be enforced for frontend page."""
|
||||
from common.models import InvenTreeSetting
|
||||
@@ -137,6 +157,7 @@ class Check2FAMiddleware(BaseRequire2FAMiddleware):
|
||||
|
||||
class CustomAllauthTwoFactorMiddleware(AllauthTwoFactorMiddleware):
|
||||
"""This function ensures only frontend code triggers the MFA auth cycle."""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Check if requested url is forntend and enforce MFA check."""
|
||||
try:
|
||||
@@ -148,6 +169,7 @@ class CustomAllauthTwoFactorMiddleware(AllauthTwoFactorMiddleware):
|
||||
|
||||
class InvenTreeRemoteUserMiddleware(PersistentRemoteUserMiddleware):
|
||||
"""Middleware to check if HTTP-header based auth is enabled and to set it up."""
|
||||
|
||||
header = settings.REMOTE_LOGIN_HEADER
|
||||
|
||||
def process_request(self, request):
|
||||
|
||||
@@ -9,60 +9,7 @@ from InvenTree.fields import InvenTreeNotesField
|
||||
from InvenTree.helpers import remove_non_printable_characters, strip_html_tags
|
||||
|
||||
|
||||
class DiffMixin:
|
||||
"""Mixin which can be used to determine which fields have changed, compared to the instance saved to the database."""
|
||||
|
||||
def get_db_instance(self):
|
||||
"""Return the instance of the object saved in the database.
|
||||
|
||||
Returns:
|
||||
object: Instance of the object saved in the database
|
||||
"""
|
||||
|
||||
if self.pk:
|
||||
try:
|
||||
return self.__class__.objects.get(pk=self.pk)
|
||||
except self.__class__.DoesNotExist:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def get_field_deltas(self):
|
||||
"""Return a dict of field deltas.
|
||||
|
||||
Compares the current instance with the instance saved in the database,
|
||||
and returns a dict of fields which have changed.
|
||||
|
||||
Returns:
|
||||
dict: Dict of field deltas
|
||||
"""
|
||||
|
||||
db_instance = self.get_db_instance()
|
||||
|
||||
if db_instance is None:
|
||||
return {}
|
||||
|
||||
deltas = {}
|
||||
|
||||
for field in self._meta.fields:
|
||||
if field.name == 'id':
|
||||
continue
|
||||
|
||||
if getattr(self, field.name) != getattr(db_instance, field.name):
|
||||
deltas[field.name] = {
|
||||
'old': getattr(db_instance, field.name),
|
||||
'new': getattr(self, field.name),
|
||||
}
|
||||
|
||||
return deltas
|
||||
|
||||
def has_field_changed(self, field_name):
|
||||
"""Determine if a particular field has changed."""
|
||||
|
||||
return field_name in self.get_field_deltas()
|
||||
|
||||
|
||||
class CleanMixin():
|
||||
class CleanMixin:
|
||||
"""Model mixin class which cleans inputs using the Mozilla bleach tools."""
|
||||
|
||||
# Define a list of field names which will *not* be cleaned
|
||||
@@ -74,13 +21,17 @@ class CleanMixin():
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_create(serializer)
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
return Response(
|
||||
serializer.data, status=status.HTTP_201_CREATED, headers=headers
|
||||
)
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
"""Override to clean data before processing it."""
|
||||
partial = kwargs.pop('partial', False)
|
||||
instance = self.get_object()
|
||||
serializer = self.get_serializer(instance, data=self.clean_data(request.data), partial=partial)
|
||||
serializer = self.get_serializer(
|
||||
instance, data=self.clean_data(request.data), partial=partial
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_update(serializer)
|
||||
|
||||
@@ -113,9 +64,7 @@ class CleanMixin():
|
||||
field = model._meta.get_field(field)
|
||||
|
||||
# The following field types allow newline characters
|
||||
allow_newline = [
|
||||
InvenTreeNotesField,
|
||||
]
|
||||
allow_newline = [InvenTreeNotesField]
|
||||
|
||||
for field_type in allow_newline:
|
||||
if issubclass(type(field), field_type):
|
||||
@@ -127,7 +76,9 @@ class CleanMixin():
|
||||
except FieldDoesNotExist:
|
||||
pass
|
||||
|
||||
cleaned = remove_non_printable_characters(cleaned, remove_newline=remove_newline)
|
||||
cleaned = remove_non_printable_characters(
|
||||
cleaned, remove_newline=remove_newline
|
||||
)
|
||||
|
||||
return cleaned
|
||||
|
||||
@@ -148,7 +99,6 @@ class CleanMixin():
|
||||
clean_data = {}
|
||||
|
||||
for k, v in data.items():
|
||||
|
||||
if k in self.SAFE_FIELDS:
|
||||
ret = v
|
||||
elif isinstance(v, str):
|
||||
@@ -177,16 +127,19 @@ class CreateAPI(CleanMixin, generics.CreateAPIView):
|
||||
|
||||
class RetrieveAPI(generics.RetrieveAPIView):
|
||||
"""View for retrieve API."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveUpdateAPI(CleanMixin, generics.RetrieveUpdateAPIView):
|
||||
"""View for retrieve and update API."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CustomDestroyModelMixin:
|
||||
"""This mixin was created pass the kwargs from the API to the models."""
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
"""Custom destroy method to pass kwargs."""
|
||||
instance = self.get_object()
|
||||
@@ -198,11 +151,14 @@ class CustomDestroyModelMixin:
|
||||
instance.delete(**kwargs)
|
||||
|
||||
|
||||
class CustomRetrieveUpdateDestroyAPIView(mixins.RetrieveModelMixin,
|
||||
mixins.UpdateModelMixin,
|
||||
CustomDestroyModelMixin,
|
||||
generics.GenericAPIView):
|
||||
class CustomRetrieveUpdateDestroyAPIView(
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.UpdateModelMixin,
|
||||
CustomDestroyModelMixin,
|
||||
generics.GenericAPIView,
|
||||
):
|
||||
"""This APIView was created pass the kwargs from the API to the models."""
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Custom get method to pass kwargs."""
|
||||
return self.retrieve(request, *args, **kwargs)
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime
|
||||
from io import BytesIO
|
||||
|
||||
@@ -30,18 +29,98 @@ from InvenTree.sanitizer import sanitize_svg
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def rename_attachment(instance, filename):
|
||||
"""Function for renaming an attachment file. The subdirectory for the uploaded file is determined by the implementing class.
|
||||
class DiffMixin:
|
||||
"""Mixin which can be used to determine which fields have changed, compared to the instance saved to the database."""
|
||||
|
||||
Args:
|
||||
instance: Instance of a PartAttachment object
|
||||
filename: name of uploaded file
|
||||
def get_db_instance(self):
|
||||
"""Return the instance of the object saved in the database.
|
||||
|
||||
Returns:
|
||||
path to store file, format: '<subdir>/<id>/filename'
|
||||
Returns:
|
||||
object: Instance of the object saved in the database
|
||||
"""
|
||||
if self.pk:
|
||||
try:
|
||||
return self.__class__.objects.get(pk=self.pk)
|
||||
except self.__class__.DoesNotExist:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def get_field_deltas(self):
|
||||
"""Return a dict of field deltas.
|
||||
|
||||
Compares the current instance with the instance saved in the database,
|
||||
and returns a dict of fields which have changed.
|
||||
|
||||
Returns:
|
||||
dict: Dict of field deltas
|
||||
"""
|
||||
db_instance = self.get_db_instance()
|
||||
|
||||
if db_instance is None:
|
||||
return {}
|
||||
|
||||
deltas = {}
|
||||
|
||||
for field in self._meta.fields:
|
||||
if field.name == 'id':
|
||||
continue
|
||||
|
||||
if getattr(self, field.name) != getattr(db_instance, field.name):
|
||||
deltas[field.name] = {
|
||||
'old': getattr(db_instance, field.name),
|
||||
'new': getattr(self, field.name),
|
||||
}
|
||||
|
||||
return deltas
|
||||
|
||||
def has_field_changed(self, field_name):
|
||||
"""Determine if a particular field has changed."""
|
||||
return field_name in self.get_field_deltas()
|
||||
|
||||
|
||||
class PluginValidationMixin(DiffMixin):
|
||||
"""Mixin class which exposes the model instance to plugin validation.
|
||||
|
||||
Any model class which inherits from this mixin will be exposed to the plugin validation system.
|
||||
"""
|
||||
# Construct a path to store a file attachment for a given model type
|
||||
return os.path.join(instance.getSubdir(), filename)
|
||||
|
||||
def run_plugin_validation(self):
|
||||
"""Throw this model against the plugin validation interface."""
|
||||
from plugin.registry import registry
|
||||
|
||||
deltas = self.get_field_deltas()
|
||||
|
||||
for plugin in registry.with_mixin('validation'):
|
||||
try:
|
||||
if plugin.validate_model_instance(self, deltas=deltas) is True:
|
||||
return
|
||||
except ValidationError as exc:
|
||||
raise exc
|
||||
except Exception as exc:
|
||||
# Log the exception to the database
|
||||
import InvenTree.exceptions
|
||||
|
||||
InvenTree.exceptions.log_error(
|
||||
f'plugins.{plugin.slug}.validate_model_instance'
|
||||
)
|
||||
raise ValidationError(_('Error running plugin validation'))
|
||||
|
||||
def full_clean(self, *args, **kwargs):
|
||||
"""Run plugin validation on full model clean.
|
||||
|
||||
Note that plugin validation is performed *after* super.full_clean()
|
||||
"""
|
||||
super().full_clean(*args, **kwargs)
|
||||
self.run_plugin_validation()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Run plugin validation on model save.
|
||||
|
||||
Note that plugin validation is performed *before* super.save()
|
||||
"""
|
||||
self.run_plugin_validation()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
class MetadataMixin(models.Model):
|
||||
@@ -58,6 +137,7 @@ class MetadataMixin(models.Model):
|
||||
|
||||
class Meta:
|
||||
"""Meta for MetadataMixin."""
|
||||
|
||||
abstract = True
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
@@ -78,10 +158,13 @@ class MetadataMixin(models.Model):
|
||||
self.metadata = {}
|
||||
|
||||
if type(self.metadata) is not dict:
|
||||
raise ValidationError({'metadata': _('Metadata must be a python dict object')})
|
||||
raise ValidationError({
|
||||
'metadata': _('Metadata must be a python dict object')
|
||||
})
|
||||
|
||||
metadata = models.JSONField(
|
||||
blank=True, null=True,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('Plugin Metadata'),
|
||||
help_text=_('JSON metadata field, for use by external plugins'),
|
||||
)
|
||||
@@ -91,6 +174,7 @@ class MetadataMixin(models.Model):
|
||||
|
||||
Args:
|
||||
key: String key for requesting metadata. e.g. if a plugin is accessing the metadata, the plugin slug should be used
|
||||
backup_value: Value that should be used if no value is found
|
||||
|
||||
Returns:
|
||||
Python dict object containing requested metadata. If no matching metadata is found, returns None
|
||||
@@ -100,7 +184,9 @@ class MetadataMixin(models.Model):
|
||||
|
||||
return self.metadata.get(key, backup_value)
|
||||
|
||||
def set_metadata(self, key: str, data, commit: bool = True, overwrite: bool = False):
|
||||
def set_metadata(
|
||||
self, key: str, data, commit: bool = True, overwrite: bool = False
|
||||
):
|
||||
"""Save the provided metadata under the provided key.
|
||||
|
||||
Args:
|
||||
@@ -137,7 +223,6 @@ class DataImportMixin(object):
|
||||
fields = cls.IMPORT_FIELDS
|
||||
|
||||
for name, field in fields.items():
|
||||
|
||||
# Attempt to extract base field information from the model
|
||||
base_field = None
|
||||
|
||||
@@ -207,7 +292,10 @@ class ReferenceIndexingMixin(models.Model):
|
||||
|
||||
# import at function level to prevent cyclic imports
|
||||
from common.models import InvenTreeSetting
|
||||
return InvenTreeSetting.get_setting(cls.REFERENCE_PATTERN_SETTING, create=False).strip()
|
||||
|
||||
return InvenTreeSetting.get_setting(
|
||||
cls.REFERENCE_PATTERN_SETTING, create=False
|
||||
).strip()
|
||||
|
||||
@classmethod
|
||||
def get_reference_context(cls):
|
||||
@@ -216,14 +304,11 @@ class ReferenceIndexingMixin(models.Model):
|
||||
- Returns a python dict object which contains the context data for formatting the reference string.
|
||||
- The default implementation provides some default context information
|
||||
"""
|
||||
return {
|
||||
'ref': cls.get_next_reference(),
|
||||
'date': datetime.now(),
|
||||
}
|
||||
return {'ref': cls.get_next_reference(), 'date': datetime.now()}
|
||||
|
||||
@classmethod
|
||||
def get_most_recent_item(cls):
|
||||
"""Return the item which is 'most recent'
|
||||
"""Return the item which is 'most recent'.
|
||||
|
||||
In practice, this means the item with the highest reference value
|
||||
"""
|
||||
@@ -246,7 +331,9 @@ class ReferenceIndexingMixin(models.Model):
|
||||
reference = latest.reference.strip
|
||||
|
||||
try:
|
||||
reference = InvenTree.format.extract_named_group('ref', reference, cls.get_reference_pattern())
|
||||
reference = InvenTree.format.extract_named_group(
|
||||
'ref', reference, cls.get_reference_pattern()
|
||||
)
|
||||
except Exception:
|
||||
# If reference cannot be extracted using the pattern, try just the integer value
|
||||
reference = str(latest.reference_int)
|
||||
@@ -263,7 +350,7 @@ class ReferenceIndexingMixin(models.Model):
|
||||
|
||||
@classmethod
|
||||
def generate_reference(cls):
|
||||
"""Generate the next 'reference' field based on specified pattern"""
|
||||
"""Generate the next 'reference' field based on specified pattern."""
|
||||
fmt = cls.get_reference_pattern()
|
||||
ctx = cls.get_reference_context()
|
||||
|
||||
@@ -296,51 +383,53 @@ class ReferenceIndexingMixin(models.Model):
|
||||
if recent:
|
||||
reference = recent.reference
|
||||
else:
|
||||
reference = ""
|
||||
reference = ''
|
||||
|
||||
return reference
|
||||
|
||||
@classmethod
|
||||
def validate_reference_pattern(cls, pattern):
|
||||
"""Ensure that the provided pattern is valid"""
|
||||
"""Ensure that the provided pattern is valid."""
|
||||
ctx = cls.get_reference_context()
|
||||
|
||||
try:
|
||||
info = InvenTree.format.parse_format_string(pattern)
|
||||
except Exception as exc:
|
||||
raise ValidationError({
|
||||
"value": _("Improperly formatted pattern") + ": " + str(exc)
|
||||
'value': _('Improperly formatted pattern') + ': ' + str(exc)
|
||||
})
|
||||
|
||||
# Check that only 'allowed' keys are provided
|
||||
for key in info.keys():
|
||||
if key not in ctx.keys():
|
||||
raise ValidationError({
|
||||
"value": _("Unknown format key specified") + f": '{key}'"
|
||||
'value': _('Unknown format key specified') + f": '{key}'"
|
||||
})
|
||||
|
||||
# Check that the 'ref' variable is specified
|
||||
if 'ref' not in info.keys():
|
||||
raise ValidationError({
|
||||
'value': _("Missing required format key") + ": 'ref'"
|
||||
'value': _('Missing required format key') + ": 'ref'"
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def validate_reference_field(cls, value):
|
||||
"""Check that the provided 'reference' value matches the requisite pattern"""
|
||||
"""Check that the provided 'reference' value matches the requisite pattern."""
|
||||
pattern = cls.get_reference_pattern()
|
||||
|
||||
value = str(value).strip()
|
||||
|
||||
if len(value) == 0:
|
||||
raise ValidationError(_("Reference field cannot be empty"))
|
||||
raise ValidationError(_('Reference field cannot be empty'))
|
||||
|
||||
# An 'empty' pattern means no further validation is required
|
||||
if not pattern:
|
||||
return
|
||||
|
||||
if not InvenTree.format.validate_string(value, pattern):
|
||||
raise ValidationError(_("Reference must match required pattern") + ": " + pattern)
|
||||
raise ValidationError(
|
||||
_('Reference must match required pattern') + ': ' + pattern
|
||||
)
|
||||
|
||||
# Check that the reference field can be rebuild
|
||||
cls.rebuild_reference_field(value, validate=True)
|
||||
@@ -361,69 +450,61 @@ class ReferenceIndexingMixin(models.Model):
|
||||
"""
|
||||
try:
|
||||
# Extract named group based on provided pattern
|
||||
reference = InvenTree.format.extract_named_group('ref', reference, cls.get_reference_pattern())
|
||||
reference = InvenTree.format.extract_named_group(
|
||||
'ref', reference, cls.get_reference_pattern()
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
reference_int = extract_int(reference)
|
||||
reference_int = InvenTree.helpers.extract_int(reference)
|
||||
|
||||
if validate:
|
||||
if reference_int > models.BigIntegerField.MAX_BIGINT:
|
||||
raise ValidationError({
|
||||
"reference": _("Reference number is too large")
|
||||
})
|
||||
raise ValidationError({'reference': _('Reference number is too large')})
|
||||
|
||||
return reference_int
|
||||
|
||||
reference_int = models.BigIntegerField(default=0)
|
||||
|
||||
|
||||
def extract_int(reference, clip=0x7fffffff, allow_negative=False):
|
||||
"""Extract an integer out of reference."""
|
||||
# Default value if we cannot convert to an integer
|
||||
ref_int = 0
|
||||
class InvenTreeModel(PluginValidationMixin, models.Model):
|
||||
"""Base class for InvenTree models, which provides some common functionality.
|
||||
|
||||
reference = str(reference).strip()
|
||||
Includes the following mixins by default:
|
||||
|
||||
# Ignore empty string
|
||||
if len(reference) == 0:
|
||||
return 0
|
||||
- PluginValidationMixin: Provides a hook for plugins to validate model instances
|
||||
"""
|
||||
|
||||
# Look at the start of the string - can it be "integerized"?
|
||||
result = re.match(r"^(\d+)", reference)
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
if result and len(result.groups()) == 1:
|
||||
ref = result.groups()[0]
|
||||
try:
|
||||
ref_int = int(ref)
|
||||
except Exception:
|
||||
ref_int = 0
|
||||
else:
|
||||
# Look at the "end" of the string
|
||||
result = re.search(r'(\d+)$', reference)
|
||||
|
||||
if result and len(result.groups()) == 1:
|
||||
ref = result.groups()[0]
|
||||
try:
|
||||
ref_int = int(ref)
|
||||
except Exception:
|
||||
ref_int = 0
|
||||
|
||||
# Ensure that the returned values are within the range that can be stored in an IntegerField
|
||||
# Note: This will result in large values being "clipped"
|
||||
if clip is not None:
|
||||
if ref_int > clip:
|
||||
ref_int = clip
|
||||
elif ref_int < -clip:
|
||||
ref_int = -clip
|
||||
|
||||
if not allow_negative and ref_int < 0:
|
||||
ref_int = abs(ref_int)
|
||||
|
||||
return ref_int
|
||||
abstract = True
|
||||
|
||||
|
||||
class InvenTreeAttachment(models.Model):
|
||||
class InvenTreeMetadataModel(MetadataMixin, InvenTreeModel):
|
||||
"""Base class for an InvenTree model which includes a metadata field."""
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
def rename_attachment(instance, filename):
|
||||
"""Function for renaming an attachment file. The subdirectory for the uploaded file is determined by the implementing class.
|
||||
|
||||
Args:
|
||||
instance: Instance of a PartAttachment object
|
||||
filename: name of uploaded file
|
||||
|
||||
Returns:
|
||||
path to store file, format: '<subdir>/<id>/filename'
|
||||
"""
|
||||
# Construct a path to store a file attachment for a given model type
|
||||
return os.path.join(instance.getSubdir(), filename)
|
||||
|
||||
|
||||
class InvenTreeAttachment(InvenTreeModel):
|
||||
"""Provides an abstracted class for managing file attachments.
|
||||
|
||||
An attachment can be either an uploaded file, or an external URL
|
||||
@@ -438,6 +519,7 @@ class InvenTreeAttachment(models.Model):
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options. Abstract ensures no database table is created."""
|
||||
|
||||
abstract = True
|
||||
|
||||
def getSubdir(self):
|
||||
@@ -445,7 +527,7 @@ class InvenTreeAttachment(models.Model):
|
||||
|
||||
Note: Re-implement this for each subclass of InvenTreeAttachment
|
||||
"""
|
||||
return "attachments"
|
||||
return 'attachments'
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Provide better validation error."""
|
||||
@@ -472,28 +554,40 @@ class InvenTreeAttachment(models.Model):
|
||||
return os.path.basename(self.attachment.name)
|
||||
return str(self.link)
|
||||
|
||||
attachment = models.FileField(upload_to=rename_attachment, verbose_name=_('Attachment'),
|
||||
help_text=_('Select file to attach'),
|
||||
blank=True, null=True
|
||||
)
|
||||
|
||||
link = InvenTree.fields.InvenTreeURLField(
|
||||
blank=True, null=True,
|
||||
verbose_name=_('Link'),
|
||||
help_text=_('Link to external URL')
|
||||
attachment = models.FileField(
|
||||
upload_to=rename_attachment,
|
||||
verbose_name=_('Attachment'),
|
||||
help_text=_('Select file to attach'),
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
comment = models.CharField(blank=True, max_length=100, verbose_name=_('Comment'), help_text=_('File comment'))
|
||||
link = InvenTree.fields.InvenTreeURLField(
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('Link'),
|
||||
help_text=_('Link to external URL'),
|
||||
)
|
||||
|
||||
comment = models.CharField(
|
||||
blank=True,
|
||||
max_length=100,
|
||||
verbose_name=_('Comment'),
|
||||
help_text=_('File comment'),
|
||||
)
|
||||
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.SET_NULL,
|
||||
blank=True, null=True,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('User'),
|
||||
help_text=_('User'),
|
||||
)
|
||||
|
||||
upload_date = models.DateField(auto_now_add=True, null=True, blank=True, verbose_name=_('upload date'))
|
||||
upload_date = models.DateField(
|
||||
auto_now_add=True, null=True, blank=True, verbose_name=_('upload date')
|
||||
)
|
||||
|
||||
@property
|
||||
def basename(self):
|
||||
@@ -522,35 +616,59 @@ class InvenTreeAttachment(models.Model):
|
||||
|
||||
# Check that there are no directory tricks going on...
|
||||
if new_file.parent != attachment_dir:
|
||||
logger.error("Attempted to rename attachment outside valid directory: '%s'", new_file)
|
||||
raise ValidationError(_("Invalid attachment directory"))
|
||||
logger.error(
|
||||
"Attempted to rename attachment outside valid directory: '%s'", new_file
|
||||
)
|
||||
raise ValidationError(_('Invalid attachment directory'))
|
||||
|
||||
# Ignore further checks if the filename is not actually being renamed
|
||||
if new_file == old_file:
|
||||
return
|
||||
|
||||
forbidden = ["'", '"', "#", "@", "!", "&", "^", "<", ">", ":", ";", "/", "\\", "|", "?", "*", "%", "~", "`"]
|
||||
forbidden = [
|
||||
"'",
|
||||
'"',
|
||||
'#',
|
||||
'@',
|
||||
'!',
|
||||
'&',
|
||||
'^',
|
||||
'<',
|
||||
'>',
|
||||
':',
|
||||
';',
|
||||
'/',
|
||||
'\\',
|
||||
'|',
|
||||
'?',
|
||||
'*',
|
||||
'%',
|
||||
'~',
|
||||
'`',
|
||||
]
|
||||
|
||||
for c in forbidden:
|
||||
if c in fn:
|
||||
raise ValidationError(_(f"Filename contains illegal character '{c}'"))
|
||||
|
||||
if len(fn.split('.')) < 2:
|
||||
raise ValidationError(_("Filename missing extension"))
|
||||
raise ValidationError(_('Filename missing extension'))
|
||||
|
||||
if not old_file.exists():
|
||||
logger.error("Trying to rename attachment '%s' which does not exist", old_file)
|
||||
logger.error(
|
||||
"Trying to rename attachment '%s' which does not exist", old_file
|
||||
)
|
||||
return
|
||||
|
||||
if new_file.exists():
|
||||
raise ValidationError(_("Attachment with this filename already exists"))
|
||||
raise ValidationError(_('Attachment with this filename already exists'))
|
||||
|
||||
try:
|
||||
os.rename(old_file, new_file)
|
||||
self.attachment.name = os.path.join(self.getSubdir(), fn)
|
||||
self.save()
|
||||
except Exception:
|
||||
raise ValidationError(_("Error renaming file"))
|
||||
raise ValidationError(_('Error renaming file'))
|
||||
|
||||
def fully_qualified_url(self):
|
||||
"""Return a 'fully qualified' URL for this attachment.
|
||||
@@ -568,7 +686,7 @@ class InvenTreeAttachment(models.Model):
|
||||
return ''
|
||||
|
||||
|
||||
class InvenTreeTree(MPTTModel):
|
||||
class InvenTreeTree(MetadataMixin, PluginValidationMixin, MPTTModel):
|
||||
"""Provides an abstracted self-referencing tree model for data categories.
|
||||
|
||||
- Each Category has one parent Category, which can be blank (for a top-level Category).
|
||||
@@ -586,10 +704,12 @@ class InvenTreeTree(MPTTModel):
|
||||
|
||||
class Meta:
|
||||
"""Metaclass defines extra model properties."""
|
||||
|
||||
abstract = True
|
||||
|
||||
class MPTTMeta:
|
||||
"""Set insert order."""
|
||||
|
||||
order_insertion_by = ['name']
|
||||
|
||||
def delete(self, delete_children=False, delete_items=False):
|
||||
@@ -607,13 +727,19 @@ class InvenTreeTree(MPTTModel):
|
||||
self.refresh_from_db()
|
||||
except self.__class__.DoesNotExist:
|
||||
# If the object no longer exists, raise a ValidationError
|
||||
raise ValidationError("Object %s of type %s no longer exists", str(self), str(self.__class__))
|
||||
raise ValidationError(
|
||||
'Object %s of type %s no longer exists', str(self), str(self.__class__)
|
||||
)
|
||||
|
||||
# Cache node ID values for lower nodes, before we delete this one
|
||||
lower_nodes = list(self.get_descendants(include_self=False).values_list('pk', flat=True))
|
||||
lower_nodes = list(
|
||||
self.get_descendants(include_self=False).values_list('pk', flat=True)
|
||||
)
|
||||
|
||||
# 1. Update nodes and items under the current node
|
||||
self.handle_tree_delete(delete_children=delete_children, delete_items=delete_items)
|
||||
self.handle_tree_delete(
|
||||
delete_children=delete_children, delete_items=delete_items
|
||||
)
|
||||
|
||||
# 2. Delete *this* node
|
||||
super().delete()
|
||||
@@ -659,7 +785,6 @@ class InvenTreeTree(MPTTModel):
|
||||
C) delete_children = False and delete_items = True
|
||||
D) delete_children = False and delete_items = False
|
||||
"""
|
||||
|
||||
child_nodes = self.get_descendants(include_self=False)
|
||||
|
||||
# Case A: Delete all child items, and all child nodes.
|
||||
@@ -673,9 +798,7 @@ class InvenTreeTree(MPTTModel):
|
||||
# - Move all items at any lower level to the parent of this item
|
||||
# - Delete all descendant nodes
|
||||
elif delete_children and not delete_items:
|
||||
self.get_items(cascade=True).update(**{
|
||||
self.ITEM_PARENT_KEY: self.parent
|
||||
})
|
||||
self.get_items(cascade=True).update(**{self.ITEM_PARENT_KEY: self.parent})
|
||||
|
||||
self.delete_nodes(child_nodes)
|
||||
|
||||
@@ -690,9 +813,7 @@ class InvenTreeTree(MPTTModel):
|
||||
# - Move all items directly associated with this node up one level
|
||||
# - Move any direct child nodes up one level
|
||||
elif not delete_children and not delete_items:
|
||||
self.get_items(cascade=False).update(**{
|
||||
self.ITEM_PARENT_KEY: self.parent
|
||||
})
|
||||
self.get_items(cascade=False).update(**{self.ITEM_PARENT_KEY: self.parent})
|
||||
self.get_children().update(parent=self.parent)
|
||||
|
||||
def delete_nodes(self, nodes):
|
||||
@@ -706,7 +827,6 @@ class InvenTreeTree(MPTTModel):
|
||||
Arguments:
|
||||
nodes: A queryset of nodes to delete
|
||||
"""
|
||||
|
||||
nodes.update(parent=None)
|
||||
nodes.delete()
|
||||
|
||||
@@ -719,8 +839,7 @@ class InvenTreeTree(MPTTModel):
|
||||
super().validate_unique(exclude)
|
||||
|
||||
results = self.__class__.objects.filter(
|
||||
name=self.name,
|
||||
parent=self.parent
|
||||
name=self.name, parent=self.parent
|
||||
).exclude(pk=self.pk)
|
||||
|
||||
if results.exists():
|
||||
@@ -730,33 +849,24 @@ class InvenTreeTree(MPTTModel):
|
||||
|
||||
def api_instance_filters(self):
|
||||
"""Instance filters for InvenTreeTree models."""
|
||||
return {
|
||||
'parent': {
|
||||
'exclude_tree': self.pk,
|
||||
}
|
||||
}
|
||||
return {'parent': {'exclude_tree': self.pk}}
|
||||
|
||||
def construct_pathstring(self):
|
||||
"""Construct the pathstring for this tree node"""
|
||||
return InvenTree.helpers.constructPathString(
|
||||
[item.name for item in self.path]
|
||||
)
|
||||
"""Construct the pathstring for this tree node."""
|
||||
return InvenTree.helpers.constructPathString([item.name for item in self.path])
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Custom save method for InvenTreeTree abstract model"""
|
||||
"""Custom save method for InvenTreeTree abstract model."""
|
||||
try:
|
||||
super().save(*args, **kwargs)
|
||||
except InvalidMove:
|
||||
# Provide better error for parent selection
|
||||
raise ValidationError({
|
||||
'parent': _("Invalid choice"),
|
||||
})
|
||||
raise ValidationError({'parent': _('Invalid choice')})
|
||||
|
||||
# Re-calculate the 'pathstring' field
|
||||
pathstring = self.construct_pathstring()
|
||||
|
||||
if pathstring != self.pathstring:
|
||||
|
||||
if 'force_insert' in kwargs:
|
||||
del kwargs['force_insert']
|
||||
|
||||
@@ -781,33 +891,29 @@ class InvenTreeTree(MPTTModel):
|
||||
self.__class__.objects.bulk_update(nodes_to_update, ['pathstring'])
|
||||
|
||||
name = models.CharField(
|
||||
blank=False,
|
||||
max_length=100,
|
||||
verbose_name=_("Name"),
|
||||
help_text=_("Name"),
|
||||
blank=False, max_length=100, verbose_name=_('Name'), help_text=_('Name')
|
||||
)
|
||||
|
||||
description = models.CharField(
|
||||
blank=True,
|
||||
max_length=250,
|
||||
verbose_name=_("Description"),
|
||||
help_text=_("Description (optional)")
|
||||
verbose_name=_('Description'),
|
||||
help_text=_('Description (optional)'),
|
||||
)
|
||||
|
||||
# When a category is deleted, graft the children onto its parent
|
||||
parent = TreeForeignKey('self',
|
||||
on_delete=models.DO_NOTHING,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_("parent"),
|
||||
related_name='children')
|
||||
parent = TreeForeignKey(
|
||||
'self',
|
||||
on_delete=models.DO_NOTHING,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_('parent'),
|
||||
related_name='children',
|
||||
)
|
||||
|
||||
# The 'pathstring' field is calculated each time the model is saved
|
||||
pathstring = models.CharField(
|
||||
blank=True,
|
||||
max_length=250,
|
||||
verbose_name=_('Path'),
|
||||
help_text=_('Path')
|
||||
blank=True, max_length=250, verbose_name=_('Path'), help_text=_('Path')
|
||||
)
|
||||
|
||||
def get_items(self, cascade=False):
|
||||
@@ -818,7 +924,7 @@ class InvenTreeTree(MPTTModel):
|
||||
|
||||
The default implementation returns an empty list
|
||||
"""
|
||||
raise NotImplementedError(f"items() method not implemented for {type(self)}")
|
||||
raise NotImplementedError(f'items() method not implemented for {type(self)}')
|
||||
|
||||
def getUniqueParents(self):
|
||||
"""Return a flat set of all parent items that exist above this node.
|
||||
@@ -889,16 +995,11 @@ class InvenTreeTree(MPTTModel):
|
||||
name: <name>,
|
||||
}
|
||||
"""
|
||||
return [
|
||||
{
|
||||
'pk': item.pk,
|
||||
'name': item.name
|
||||
} for item in self.path
|
||||
]
|
||||
return [{'pk': item.pk, 'name': item.name} for item in self.path]
|
||||
|
||||
def __str__(self):
|
||||
"""String representation of a category is the full path to that category."""
|
||||
return f"{self.pathstring} - {self.description}"
|
||||
return f'{self.pathstring} - {self.description}'
|
||||
|
||||
|
||||
class InvenTreeNotesMixin(models.Model):
|
||||
@@ -914,11 +1015,11 @@ class InvenTreeNotesMixin(models.Model):
|
||||
|
||||
Note: abstract must be true, as this is only a mixin, not a separate table
|
||||
"""
|
||||
|
||||
abstract = True
|
||||
|
||||
notes = InvenTree.fields.InvenTreeNotesField(
|
||||
verbose_name=_('Notes'),
|
||||
help_text=_('Markdown notes (optional)'),
|
||||
verbose_name=_('Notes'), help_text=_('Markdown notes (optional)')
|
||||
)
|
||||
|
||||
|
||||
@@ -941,18 +1042,21 @@ class InvenTreeBarcodeMixin(models.Model):
|
||||
|
||||
Note: abstract must be true, as this is only a mixin, not a separate table
|
||||
"""
|
||||
|
||||
abstract = True
|
||||
|
||||
barcode_data = models.CharField(
|
||||
blank=True, max_length=500,
|
||||
blank=True,
|
||||
max_length=500,
|
||||
verbose_name=_('Barcode Data'),
|
||||
help_text=_('Third party barcode data'),
|
||||
)
|
||||
|
||||
barcode_hash = models.CharField(
|
||||
blank=True, max_length=128,
|
||||
blank=True,
|
||||
max_length=128,
|
||||
verbose_name=_('Barcode Hash'),
|
||||
help_text=_('Unique hash of barcode data')
|
||||
help_text=_('Unique hash of barcode data'),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -964,21 +1068,16 @@ class InvenTreeBarcodeMixin(models.Model):
|
||||
def format_barcode(self, **kwargs):
|
||||
"""Return a JSON string for formatting a QR code for this model instance."""
|
||||
return InvenTree.helpers.MakeBarcode(
|
||||
self.__class__.barcode_model_type(),
|
||||
self.pk,
|
||||
**kwargs
|
||||
self.__class__.barcode_model_type(), self.pk, **kwargs
|
||||
)
|
||||
|
||||
def format_matched_response(self):
|
||||
"""Format a standard response for a matched barcode."""
|
||||
|
||||
data = {
|
||||
'pk': self.pk,
|
||||
}
|
||||
data = {'pk': self.pk}
|
||||
|
||||
if hasattr(self, 'get_api_url'):
|
||||
api_url = self.get_api_url()
|
||||
data['api_url'] = f"{api_url}{self.pk}/"
|
||||
data['api_url'] = f'{api_url}{self.pk}/'
|
||||
|
||||
if hasattr(self, 'get_absolute_url'):
|
||||
data['web_url'] = self.get_absolute_url()
|
||||
@@ -987,7 +1086,7 @@ class InvenTreeBarcodeMixin(models.Model):
|
||||
|
||||
@property
|
||||
def barcode(self):
|
||||
"""Format a minimal barcode string (e.g. for label printing)"""
|
||||
"""Format a minimal barcode string (e.g. for label printing)."""
|
||||
return self.format_barcode(brief=True)
|
||||
|
||||
@classmethod
|
||||
@@ -995,7 +1094,9 @@ class InvenTreeBarcodeMixin(models.Model):
|
||||
"""Check if a model instance exists with the specified third-party barcode hash."""
|
||||
return cls.objects.filter(barcode_hash=barcode_hash).first()
|
||||
|
||||
def assign_barcode(self, barcode_hash=None, barcode_data=None, raise_error=True, save=True):
|
||||
def assign_barcode(
|
||||
self, barcode_hash=None, barcode_data=None, raise_error=True, save=True
|
||||
):
|
||||
"""Assign an external (third-party) barcode to this object."""
|
||||
# Must provide either barcode_hash or barcode_data
|
||||
if barcode_hash is None and barcode_data is None:
|
||||
@@ -1008,7 +1109,7 @@ class InvenTreeBarcodeMixin(models.Model):
|
||||
# Check for existing item
|
||||
if self.__class__.lookup_barcode(barcode_hash) is not None:
|
||||
if raise_error:
|
||||
raise ValidationError(_("Existing barcode found"))
|
||||
raise ValidationError(_('Existing barcode found'))
|
||||
else:
|
||||
return False
|
||||
|
||||
@@ -1023,7 +1124,7 @@ class InvenTreeBarcodeMixin(models.Model):
|
||||
return True
|
||||
|
||||
def unassign_barcode(self):
|
||||
"""Unassign custom barcode from this model"""
|
||||
"""Unassign custom barcode from this model."""
|
||||
self.barcode_data = ''
|
||||
self.barcode_hash = ''
|
||||
|
||||
@@ -1044,20 +1145,24 @@ def after_error_logged(sender, instance: Error, created: bool, **kwargs):
|
||||
users = get_user_model().objects.filter(is_staff=True)
|
||||
|
||||
link = InvenTree.helpers_model.construct_absolute_url(
|
||||
reverse('admin:error_report_error_change', kwargs={'object_id': instance.pk})
|
||||
reverse(
|
||||
'admin:error_report_error_change', kwargs={'object_id': instance.pk}
|
||||
)
|
||||
)
|
||||
|
||||
context = {
|
||||
'error': instance,
|
||||
'name': _('Server Error'),
|
||||
'message': _('An error has been logged by the server.'),
|
||||
'link': link
|
||||
'link': link,
|
||||
}
|
||||
|
||||
target_users = []
|
||||
|
||||
for user in users:
|
||||
if common.models.InvenTreeUserSetting.get_setting('NOTIFICATION_ERROR_REPORT', True, user=user):
|
||||
if common.models.InvenTreeUserSetting.get_setting(
|
||||
'NOTIFICATION_ERROR_REPORT', True, user=user
|
||||
):
|
||||
target_users.append(user)
|
||||
|
||||
if len(target_users) > 0:
|
||||
@@ -1066,7 +1171,7 @@ def after_error_logged(sender, instance: Error, created: bool, **kwargs):
|
||||
'inventree.error_log',
|
||||
context=context,
|
||||
targets=target_users,
|
||||
delivery_methods={common.notifications.UIMessageNotification, },
|
||||
delivery_methods={common.notifications.UIMessageNotification},
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
|
||||
@@ -8,7 +8,7 @@ import users.models
|
||||
|
||||
|
||||
def get_model_for_view(view, raise_error=True):
|
||||
"""Attempt to introspect the 'model' type for an API view"""
|
||||
"""Attempt to introspect the 'model' type for an API view."""
|
||||
if hasattr(view, 'get_permission_model'):
|
||||
return view.get_permission_model()
|
||||
|
||||
@@ -18,7 +18,7 @@ def get_model_for_view(view, raise_error=True):
|
||||
if hasattr(view, 'get_serializer_class'):
|
||||
return view.get_serializr_class().Meta.model
|
||||
|
||||
raise AttributeError(f"Serializer class not specified for {view.__class__}")
|
||||
raise AttributeError(f'Serializer class not specified for {view.__class__}')
|
||||
|
||||
|
||||
class RolePermission(permissions.BasePermission):
|
||||
@@ -62,13 +62,17 @@ class RolePermission(permissions.BasePermission):
|
||||
}
|
||||
|
||||
# let the view define a custom rolemap
|
||||
if hasattr(view, "rolemap"):
|
||||
if hasattr(view, 'rolemap'):
|
||||
rolemap.update(view.rolemap)
|
||||
|
||||
permission = rolemap[request.method]
|
||||
|
||||
# The required role may be defined for the view class
|
||||
if role := getattr(view, 'role_required', None):
|
||||
# If the role is specified as "role.permission", split it
|
||||
if '.' in role:
|
||||
role, permission = role.split('.')
|
||||
|
||||
return users.models.check_user_role(user, role, permission)
|
||||
|
||||
try:
|
||||
@@ -78,7 +82,7 @@ class RolePermission(permissions.BasePermission):
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
|
||||
table = f"{app_label}_{model_name}"
|
||||
table = f'{app_label}_{model_name}'
|
||||
except AttributeError:
|
||||
# We will assume that if the serializer class does *not* have a Meta,
|
||||
# then we don't need a permission
|
||||
@@ -100,12 +104,18 @@ class IsStaffOrReadOnly(permissions.IsAdminUser):
|
||||
|
||||
def has_permission(self, request, view):
|
||||
"""Check if the user is a superuser."""
|
||||
return bool(request.user and request.user.is_staff or request.method in permissions.SAFE_METHODS)
|
||||
return bool(
|
||||
request.user
|
||||
and request.user.is_staff
|
||||
or request.method in permissions.SAFE_METHODS
|
||||
)
|
||||
|
||||
|
||||
def auth_exempt(view_func):
|
||||
"""Mark a view function as being exempt from auth requirements."""
|
||||
|
||||
def wrapped_view(*args, **kwargs):
|
||||
return view_func(*args, **kwargs)
|
||||
|
||||
wrapped_view.auth_exempt = True
|
||||
return wraps(view_func)(wrapped_view)
|
||||
|
||||
@@ -10,13 +10,64 @@ def isInTestMode():
|
||||
|
||||
|
||||
def isImportingData():
|
||||
"""Returns True if the database is currently importing data, e.g. 'loaddata' command is performed."""
|
||||
return 'loaddata' in sys.argv
|
||||
"""Returns True if the database is currently importing (or exporting) data, e.g. 'loaddata' command is performed."""
|
||||
return any((x in sys.argv for x in ['flush', 'loaddata', 'dumpdata']))
|
||||
|
||||
|
||||
def isRunningMigrations():
|
||||
"""Return True if the database is currently running migrations."""
|
||||
return 'migrate' in sys.argv or 'makemigrations' in sys.argv
|
||||
return any(
|
||||
(
|
||||
x in sys.argv
|
||||
for x in ['migrate', 'makemigrations', 'showmigrations', 'runmigrations']
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def isRebuildingData():
|
||||
"""Return true if any of the rebuilding commands are being executed."""
|
||||
return any(
|
||||
(
|
||||
x in sys.argv
|
||||
for x in ['prerender', 'rebuild_models', 'rebuild_thumbnails', 'rebuild']
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def isRunningBackup():
|
||||
"""Return true if any of the backup commands are being executed."""
|
||||
return any(
|
||||
(
|
||||
x in sys.argv
|
||||
for x in [
|
||||
'backup',
|
||||
'restore',
|
||||
'dbbackup',
|
||||
'dbresotore',
|
||||
'mediabackup',
|
||||
'mediarestore',
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def isInWorkerThread():
|
||||
"""Returns True if the current thread is a background worker thread."""
|
||||
return 'qcluster' in sys.argv
|
||||
|
||||
|
||||
def isInServerThread():
|
||||
"""Returns True if the current thread is a server thread."""
|
||||
if isInWorkerThread():
|
||||
return False
|
||||
|
||||
if 'runserver' in sys.argv:
|
||||
return True
|
||||
|
||||
if 'gunicorn' in sys.argv[0]:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def isInMainThread():
|
||||
@@ -25,38 +76,44 @@ def isInMainThread():
|
||||
- The RUN_MAIN env is set in that case. However if --noreload is applied, this variable
|
||||
is not set because there are no different threads.
|
||||
"""
|
||||
if "runserver" in sys.argv and "--noreload" not in sys.argv:
|
||||
return os.environ.get('RUN_MAIN', None) == "true"
|
||||
if 'runserver' in sys.argv and '--noreload' not in sys.argv:
|
||||
return os.environ.get('RUN_MAIN', None) == 'true'
|
||||
|
||||
return True
|
||||
return not isInWorkerThread()
|
||||
|
||||
|
||||
def canAppAccessDatabase(allow_test: bool = False, allow_plugins: bool = False, allow_shell: bool = False):
|
||||
def canAppAccessDatabase(
|
||||
allow_test: bool = False, allow_plugins: bool = False, allow_shell: bool = False
|
||||
):
|
||||
"""Returns True if the apps.py file can access database records.
|
||||
|
||||
There are some circumstances where we don't want the ready function in apps.py
|
||||
to touch the database
|
||||
"""
|
||||
# Prevent database access if we are running backups
|
||||
if isRunningBackup():
|
||||
return False
|
||||
|
||||
# Prevent database access if we are importing data
|
||||
if isImportingData():
|
||||
return False
|
||||
|
||||
# Prevent database access if we are rebuilding data
|
||||
if isRebuildingData():
|
||||
return False
|
||||
|
||||
# Prevent database access if we are running migrations
|
||||
if not allow_plugins and isRunningMigrations():
|
||||
return False
|
||||
|
||||
# If any of the following management commands are being executed,
|
||||
# prevent custom "on load" code from running!
|
||||
excluded_commands = [
|
||||
'flush',
|
||||
'loaddata',
|
||||
'dumpdata',
|
||||
'check',
|
||||
'createsuperuser',
|
||||
'wait_for_db',
|
||||
'prerender',
|
||||
'rebuild_models',
|
||||
'rebuild_thumbnails',
|
||||
'makemessages',
|
||||
'compilemessages',
|
||||
'backup',
|
||||
'dbbackup',
|
||||
'mediabackup',
|
||||
'restore',
|
||||
'dbrestore',
|
||||
'mediarestore',
|
||||
]
|
||||
|
||||
if not allow_shell:
|
||||
@@ -67,12 +124,7 @@ def canAppAccessDatabase(allow_test: bool = False, allow_plugins: bool = False,
|
||||
excluded_commands.append('test')
|
||||
|
||||
if not allow_plugins:
|
||||
excluded_commands.extend([
|
||||
'makemigrations',
|
||||
'showmigrations',
|
||||
'migrate',
|
||||
'collectstatic',
|
||||
])
|
||||
excluded_commands.extend(['collectstatic'])
|
||||
|
||||
for cmd in excluded_commands:
|
||||
if cmd in sys.argv:
|
||||
|
||||
@@ -1,49 +1,196 @@
|
||||
"""Functions to sanitize user input files."""
|
||||
|
||||
from bleach import clean
|
||||
from bleach.css_sanitizer import CSSSanitizer
|
||||
|
||||
ALLOWED_ELEMENTS_SVG = [
|
||||
'a', 'animate', 'animateColor', 'animateMotion',
|
||||
'animateTransform', 'circle', 'defs', 'desc', 'ellipse', 'font-face',
|
||||
'font-face-name', 'font-face-src', 'g', 'glyph', 'hkern',
|
||||
'linearGradient', 'line', 'marker', 'metadata', 'missing-glyph',
|
||||
'mpath', 'path', 'polygon', 'polyline', 'radialGradient', 'rect',
|
||||
'set', 'stop', 'svg', 'switch', 'text', 'title', 'tspan', 'use'
|
||||
'a',
|
||||
'animate',
|
||||
'animateColor',
|
||||
'animateMotion',
|
||||
'animateTransform',
|
||||
'circle',
|
||||
'defs',
|
||||
'desc',
|
||||
'ellipse',
|
||||
'font-face',
|
||||
'font-face-name',
|
||||
'font-face-src',
|
||||
'g',
|
||||
'glyph',
|
||||
'hkern',
|
||||
'linearGradient',
|
||||
'line',
|
||||
'marker',
|
||||
'metadata',
|
||||
'missing-glyph',
|
||||
'mpath',
|
||||
'path',
|
||||
'polygon',
|
||||
'polyline',
|
||||
'radialGradient',
|
||||
'rect',
|
||||
'set',
|
||||
'stop',
|
||||
'svg',
|
||||
'switch',
|
||||
'text',
|
||||
'title',
|
||||
'tspan',
|
||||
'use',
|
||||
]
|
||||
|
||||
ALLOWED_ATTRIBUTES_SVG = [
|
||||
'accent-height', 'accumulate', 'additive', 'alphabetic',
|
||||
'arabic-form', 'ascent', 'attributeName', 'attributeType',
|
||||
'baseProfile', 'bbox', 'begin', 'by', 'calcMode', 'cap-height',
|
||||
'class', 'color', 'color-rendering', 'content', 'cx', 'cy', 'd', 'dx',
|
||||
'dy', 'descent', 'display', 'dur', 'end', 'fill', 'fill-opacity',
|
||||
'fill-rule', 'font-family', 'font-size', 'font-stretch', 'font-style',
|
||||
'font-variant', 'font-weight', 'from', 'fx', 'fy', 'g1', 'g2',
|
||||
'glyph-name', 'gradientUnits', 'hanging', 'height', 'horiz-adv-x',
|
||||
'horiz-origin-x', 'id', 'ideographic', 'k', 'keyPoints',
|
||||
'keySplines', 'keyTimes', 'lang', 'marker-end', 'marker-mid',
|
||||
'marker-start', 'markerHeight', 'markerUnits', 'markerWidth',
|
||||
'mathematical', 'max', 'min', 'name', 'offset', 'opacity', 'orient',
|
||||
'origin', 'overline-position', 'overline-thickness', 'panose-1',
|
||||
'path', 'pathLength', 'points', 'preserveAspectRatio', 'r', 'refX',
|
||||
'refY', 'repeatCount', 'repeatDur', 'requiredExtensions',
|
||||
'requiredFeatures', 'restart', 'rotate', 'rx', 'ry', 'slope',
|
||||
'stemh', 'stemv', 'stop-color', 'stop-opacity',
|
||||
'strikethrough-position', 'strikethrough-thickness', 'stroke',
|
||||
'stroke-dasharray', 'stroke-dashoffset', 'stroke-linecap',
|
||||
'stroke-linejoin', 'stroke-miterlimit', 'stroke-opacity',
|
||||
'stroke-width', 'systemLanguage', 'target', 'text-anchor', 'to',
|
||||
'transform', 'type', 'u1', 'u2', 'underline-position',
|
||||
'underline-thickness', 'unicode', 'unicode-range', 'units-per-em',
|
||||
'values', 'version', 'viewBox', 'visibility', 'width', 'widths', 'x',
|
||||
'x-height', 'x1', 'x2', 'xlink:actuate', 'xlink:arcrole',
|
||||
'xlink:href', 'xlink:role', 'xlink:show', 'xlink:title',
|
||||
'xlink:type', 'xml:base', 'xml:lang', 'xml:space', 'xmlns',
|
||||
'xmlns:xlink', 'y', 'y1', 'y2', 'zoomAndPan', 'style'
|
||||
'accent-height',
|
||||
'accumulate',
|
||||
'additive',
|
||||
'alphabetic',
|
||||
'arabic-form',
|
||||
'ascent',
|
||||
'attributeName',
|
||||
'attributeType',
|
||||
'baseProfile',
|
||||
'bbox',
|
||||
'begin',
|
||||
'by',
|
||||
'calcMode',
|
||||
'cap-height',
|
||||
'class',
|
||||
'color',
|
||||
'color-rendering',
|
||||
'content',
|
||||
'cx',
|
||||
'cy',
|
||||
'd',
|
||||
'dx',
|
||||
'dy',
|
||||
'descent',
|
||||
'display',
|
||||
'dur',
|
||||
'end',
|
||||
'fill',
|
||||
'fill-opacity',
|
||||
'fill-rule',
|
||||
'font-family',
|
||||
'font-size',
|
||||
'font-stretch',
|
||||
'font-style',
|
||||
'font-variant',
|
||||
'font-weight',
|
||||
'from',
|
||||
'fx',
|
||||
'fy',
|
||||
'g1',
|
||||
'g2',
|
||||
'glyph-name',
|
||||
'gradientUnits',
|
||||
'hanging',
|
||||
'height',
|
||||
'horiz-adv-x',
|
||||
'horiz-origin-x',
|
||||
'id',
|
||||
'ideographic',
|
||||
'k',
|
||||
'keyPoints',
|
||||
'keySplines',
|
||||
'keyTimes',
|
||||
'lang',
|
||||
'marker-end',
|
||||
'marker-mid',
|
||||
'marker-start',
|
||||
'markerHeight',
|
||||
'markerUnits',
|
||||
'markerWidth',
|
||||
'mathematical',
|
||||
'max',
|
||||
'min',
|
||||
'name',
|
||||
'offset',
|
||||
'opacity',
|
||||
'orient',
|
||||
'origin',
|
||||
'overline-position',
|
||||
'overline-thickness',
|
||||
'panose-1',
|
||||
'path',
|
||||
'pathLength',
|
||||
'points',
|
||||
'preserveAspectRatio',
|
||||
'r',
|
||||
'refX',
|
||||
'refY',
|
||||
'repeatCount',
|
||||
'repeatDur',
|
||||
'requiredExtensions',
|
||||
'requiredFeatures',
|
||||
'restart',
|
||||
'rotate',
|
||||
'rx',
|
||||
'ry',
|
||||
'slope',
|
||||
'stemh',
|
||||
'stemv',
|
||||
'stop-color',
|
||||
'stop-opacity',
|
||||
'strikethrough-position',
|
||||
'strikethrough-thickness',
|
||||
'stroke',
|
||||
'stroke-dasharray',
|
||||
'stroke-dashoffset',
|
||||
'stroke-linecap',
|
||||
'stroke-linejoin',
|
||||
'stroke-miterlimit',
|
||||
'stroke-opacity',
|
||||
'stroke-width',
|
||||
'systemLanguage',
|
||||
'target',
|
||||
'text-anchor',
|
||||
'to',
|
||||
'transform',
|
||||
'type',
|
||||
'u1',
|
||||
'u2',
|
||||
'underline-position',
|
||||
'underline-thickness',
|
||||
'unicode',
|
||||
'unicode-range',
|
||||
'units-per-em',
|
||||
'values',
|
||||
'version',
|
||||
'viewBox',
|
||||
'visibility',
|
||||
'width',
|
||||
'widths',
|
||||
'x',
|
||||
'x-height',
|
||||
'x1',
|
||||
'x2',
|
||||
'xlink:actuate',
|
||||
'xlink:arcrole',
|
||||
'xlink:href',
|
||||
'xlink:role',
|
||||
'xlink:show',
|
||||
'xlink:title',
|
||||
'xlink:type',
|
||||
'xml:base',
|
||||
'xml:lang',
|
||||
'xml:space',
|
||||
'xmlns',
|
||||
'xmlns:xlink',
|
||||
'y',
|
||||
'y1',
|
||||
'y2',
|
||||
'zoomAndPan',
|
||||
'style',
|
||||
]
|
||||
|
||||
|
||||
def sanitize_svg(file_data, strip: bool = True, elements: str = ALLOWED_ELEMENTS_SVG, attributes: str = ALLOWED_ATTRIBUTES_SVG) -> str:
|
||||
def sanitize_svg(
|
||||
file_data,
|
||||
strip: bool = True,
|
||||
elements: str = ALLOWED_ELEMENTS_SVG,
|
||||
attributes: str = ALLOWED_ATTRIBUTES_SVG,
|
||||
) -> str:
|
||||
"""Sanitize a SVG file.
|
||||
|
||||
Args:
|
||||
@@ -65,7 +212,7 @@ def sanitize_svg(file_data, strip: bool = True, elements: str = ALLOWED_ELEMENTS
|
||||
attributes=attributes,
|
||||
strip=strip,
|
||||
strip_comments=strip,
|
||||
css_sanitizer=CSSSanitizer()
|
||||
css_sanitizer=CSSSanitizer(),
|
||||
)
|
||||
|
||||
return cleaned
|
||||
|
||||
@@ -16,7 +16,7 @@ logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def default_sentry_dsn():
|
||||
"""Return the default Sentry.io DSN for InvenTree"""
|
||||
"""Return the default Sentry.io DSN for InvenTree."""
|
||||
return 'https://3928ccdba1d34895abde28031fd00100@o378676.ingest.sentry.io/6494600'
|
||||
|
||||
|
||||
@@ -36,8 +36,8 @@ def sentry_ignore_errors():
|
||||
|
||||
|
||||
def init_sentry(dsn, sample_rate, tags):
|
||||
"""Initialize sentry.io error reporting"""
|
||||
logger.info("Initializing sentry.io integration")
|
||||
"""Initialize sentry.io error reporting."""
|
||||
logger.info('Initializing sentry.io integration')
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=dsn,
|
||||
@@ -46,7 +46,9 @@ def init_sentry(dsn, sample_rate, tags):
|
||||
send_default_pii=True,
|
||||
ignore_errors=sentry_ignore_errors(),
|
||||
release=InvenTree.version.INVENTREE_SW_VERSION,
|
||||
environment='development' if InvenTree.version.isInvenTreeDevelopmentVersion() else 'production'
|
||||
environment='development'
|
||||
if InvenTree.version.isInvenTreeDevelopmentVersion()
|
||||
else 'production',
|
||||
)
|
||||
|
||||
for key, val in tags.items():
|
||||
@@ -60,13 +62,12 @@ def init_sentry(dsn, sample_rate, tags):
|
||||
|
||||
|
||||
def report_exception(exc):
|
||||
"""Report an exception to sentry.io"""
|
||||
"""Report an exception to sentry.io."""
|
||||
if settings.SENTRY_ENABLED and settings.SENTRY_DSN:
|
||||
|
||||
if not any(isinstance(exc, e) for e in sentry_ignore_errors()):
|
||||
logger.info("Reporting exception to sentry.io: %s", exc)
|
||||
logger.info('Reporting exception to sentry.io: %s', exc)
|
||||
|
||||
try:
|
||||
sentry_sdk.capture_exception(exc)
|
||||
except Exception:
|
||||
logger.warning("Failed to report exception to sentry.io")
|
||||
logger.warning('Failed to report exception to sentry.io')
|
||||
|
||||
@@ -7,7 +7,6 @@ from decimal import Decimal
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.sites.models import Site
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -26,7 +25,10 @@ from taggit.serializers import TaggitSerializer
|
||||
import common.models as common_models
|
||||
from common.settings import currency_code_default, currency_code_mappings
|
||||
from InvenTree.fields import InvenTreeRestURLField, InvenTreeURLField
|
||||
from InvenTree.helpers_model import download_image_from_url
|
||||
|
||||
|
||||
class EmptySerializer(serializers.Serializer):
|
||||
"""Empty serializer for use in testing."""
|
||||
|
||||
|
||||
class InvenTreeMoneySerializer(MoneyField):
|
||||
@@ -37,9 +39,9 @@ class InvenTreeMoneySerializer(MoneyField):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Override default values."""
|
||||
kwargs["max_digits"] = kwargs.get("max_digits", 19)
|
||||
self.decimal_places = kwargs["decimal_places"] = kwargs.get("decimal_places", 6)
|
||||
kwargs["required"] = kwargs.get("required", False)
|
||||
kwargs['max_digits'] = kwargs.get('max_digits', 19)
|
||||
self.decimal_places = kwargs['decimal_places'] = kwargs.get('decimal_places', 6)
|
||||
kwargs['required'] = kwargs.get('required', False)
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@@ -57,26 +59,33 @@ class InvenTreeMoneySerializer(MoneyField):
|
||||
amount = Decimal(amount)
|
||||
amount = round(amount, self.decimal_places)
|
||||
except Exception:
|
||||
raise ValidationError({
|
||||
self.field_name: [_("Must be a valid number")],
|
||||
})
|
||||
raise ValidationError({self.field_name: [_('Must be a valid number')]})
|
||||
|
||||
currency = data.get(get_currency_field_name(self.field_name), self.default_currency)
|
||||
currency = data.get(
|
||||
get_currency_field_name(self.field_name), self.default_currency
|
||||
)
|
||||
|
||||
if currency and amount is not None and not isinstance(amount, MONEY_CLASSES) and amount is not empty:
|
||||
if (
|
||||
currency
|
||||
and amount is not None
|
||||
and not isinstance(amount, MONEY_CLASSES)
|
||||
and amount is not empty
|
||||
):
|
||||
return Money(amount, currency)
|
||||
|
||||
return amount
|
||||
|
||||
|
||||
class InvenTreeCurrencySerializer(serializers.ChoiceField):
|
||||
"""Custom serializers for selecting currency option"""
|
||||
"""Custom serializers for selecting currency option."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize the currency serializer"""
|
||||
"""Initialize the currency serializer."""
|
||||
choices = currency_code_mappings()
|
||||
|
||||
allow_blank = kwargs.get('allow_blank', False) or kwargs.get('allow_null', False)
|
||||
allow_blank = kwargs.get('allow_blank', False) or kwargs.get(
|
||||
'allow_null', False
|
||||
)
|
||||
|
||||
if allow_blank:
|
||||
choices = [('', '---------')] + choices
|
||||
@@ -97,6 +106,7 @@ class InvenTreeCurrencySerializer(serializers.ChoiceField):
|
||||
|
||||
class DependentField(serializers.Field):
|
||||
"""A dependent field can be used to dynamically return child fields based on the value of other fields."""
|
||||
|
||||
child = None
|
||||
|
||||
def __init__(self, *args, depends_on, field_serializer, **kwargs):
|
||||
@@ -126,7 +136,7 @@ class DependentField(serializers.Field):
|
||||
|
||||
def get_child(self, raise_exception=False):
|
||||
"""This method tries to extract the child based on the provided data in the request by the client."""
|
||||
data = deepcopy(self.context["request"].data)
|
||||
data = deepcopy(self.context['request'].data)
|
||||
|
||||
def visit_parent(node):
|
||||
"""Recursively extract the data for the parent field/serializer in reverse."""
|
||||
@@ -136,8 +146,9 @@ class DependentField(serializers.Field):
|
||||
visit_parent(node.parent)
|
||||
|
||||
# only do for composite fields and stop right before the current field
|
||||
if hasattr(node, "child") and node is not self and isinstance(data, dict):
|
||||
if hasattr(node, 'child') and node is not self and isinstance(data, dict):
|
||||
data = data.get(node.field_name, None)
|
||||
|
||||
visit_parent(self)
|
||||
|
||||
# ensure that data is a dictionary and that a parent exists
|
||||
@@ -146,13 +157,22 @@ class DependentField(serializers.Field):
|
||||
|
||||
# check if the request data contains the dependent fields, otherwise skip getting the child
|
||||
for f in self.depends_on:
|
||||
if not data.get(f, None):
|
||||
return
|
||||
if data.get(f, None) is None:
|
||||
if (
|
||||
self.parent
|
||||
and (v := getattr(self.parent.fields[f], 'default', None))
|
||||
is not None
|
||||
):
|
||||
data[f] = v
|
||||
else:
|
||||
return
|
||||
|
||||
# partially validate the data for options requests that set raise_exception while calling .get_child(...)
|
||||
if raise_exception:
|
||||
validation_data = {k: v for k, v in data.items() if k in self.depends_on}
|
||||
serializer = self.parent.__class__(context=self.context, data=validation_data, partial=True)
|
||||
serializer = self.parent.__class__(
|
||||
context=self.context, data=validation_data, partial=True
|
||||
)
|
||||
serializer.is_valid(raise_exception=raise_exception)
|
||||
|
||||
# try to get the field serializer
|
||||
@@ -196,7 +216,6 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
|
||||
"""Custom __init__ routine to ensure that *default* values (as specified in the ORM) are used by the DRF serializers, *if* the values are not provided by the user."""
|
||||
# If instance is None, we are creating a new instance
|
||||
if instance is None and data is not empty:
|
||||
|
||||
if data is None:
|
||||
data = OrderedDict()
|
||||
else:
|
||||
@@ -211,7 +230,6 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
|
||||
fields = model_meta.get_field_info(ModelClass)
|
||||
|
||||
for field_name, field in fields.fields.items():
|
||||
|
||||
"""
|
||||
Update the field IF (and ONLY IF):
|
||||
|
||||
@@ -219,7 +237,6 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
|
||||
- The field does not already have a value set
|
||||
"""
|
||||
if field.has_default() and field_name not in data:
|
||||
|
||||
value = field.default
|
||||
|
||||
# Account for callable functions
|
||||
@@ -247,9 +264,7 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
|
||||
fields = model_meta.get_field_info(ModelClass)
|
||||
|
||||
for field_name, field in fields.fields.items():
|
||||
|
||||
if field.has_default() and field_name not in initials:
|
||||
|
||||
value = field.default
|
||||
|
||||
# Account for callable functions
|
||||
@@ -283,7 +298,7 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
|
||||
return self.instance
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Custom create method which supports field adjustment"""
|
||||
"""Custom create method which supports field adjustment."""
|
||||
initial_data = validated_data.copy()
|
||||
|
||||
# Remove any fields which do not exist on the model
|
||||
@@ -337,8 +352,12 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
|
||||
try:
|
||||
instance.full_clean()
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
|
||||
data = exc.message_dict
|
||||
if hasattr(exc, 'message_dict'):
|
||||
data = exc.message_dict
|
||||
elif hasattr(exc, 'message'):
|
||||
data = {'non_field_errors': [str(exc.message)]}
|
||||
else:
|
||||
data = {'non_field_errors': [str(exc)]}
|
||||
|
||||
# Change '__all__' key (django style) to 'non_field_errors' (DRF style)
|
||||
if '__all__' in data:
|
||||
@@ -369,6 +388,7 @@ class InvenTreeTaggitSerializer(TaggitSerializer):
|
||||
|
||||
class InvenTreeTagModelSerializer(InvenTreeTaggitSerializer, InvenTreeModelSerializer):
|
||||
"""Combination of InvenTreeTaggitSerializer and InvenTreeModelSerializer."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -377,38 +397,31 @@ class UserSerializer(InvenTreeModelSerializer):
|
||||
|
||||
class Meta:
|
||||
"""Metaclass defines serializer fields."""
|
||||
model = User
|
||||
fields = [
|
||||
'pk',
|
||||
'username',
|
||||
'first_name',
|
||||
'last_name',
|
||||
'email',
|
||||
]
|
||||
|
||||
read_only_fields = [
|
||||
'username',
|
||||
]
|
||||
model = User
|
||||
fields = ['pk', 'username', 'first_name', 'last_name', 'email']
|
||||
|
||||
read_only_fields = ['username']
|
||||
|
||||
|
||||
class ExendedUserSerializer(UserSerializer):
|
||||
"""Serializer for a User with a bit more info."""
|
||||
|
||||
from users.serializers import GroupSerializer
|
||||
|
||||
groups = GroupSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta(UserSerializer.Meta):
|
||||
"""Metaclass defines serializer fields."""
|
||||
|
||||
fields = UserSerializer.Meta.fields + [
|
||||
'groups',
|
||||
'is_staff',
|
||||
'is_superuser',
|
||||
'is_active'
|
||||
'is_active',
|
||||
]
|
||||
|
||||
read_only_fields = UserSerializer.Meta.read_only_fields + [
|
||||
'groups',
|
||||
]
|
||||
read_only_fields = UserSerializer.Meta.read_only_fields + ['groups']
|
||||
|
||||
def validate(self, attrs):
|
||||
"""Expanded validation for changing user role."""
|
||||
@@ -424,17 +437,20 @@ class ExendedUserSerializer(UserSerializer):
|
||||
# Staff can change any role except is_superuser
|
||||
pass
|
||||
else:
|
||||
raise PermissionDenied(_("You do not have permission to change this user role."))
|
||||
raise PermissionDenied(
|
||||
_('You do not have permission to change this user role.')
|
||||
)
|
||||
return super().validate(attrs)
|
||||
|
||||
|
||||
class UserCreateSerializer(ExendedUserSerializer):
|
||||
"""Serializer for creating a new User."""
|
||||
|
||||
def validate(self, attrs):
|
||||
"""Expanded valiadation for auth."""
|
||||
# Check that the user trying to create a new user is a superuser
|
||||
if not self.context['request'].user.is_superuser:
|
||||
raise serializers.ValidationError(_("Only superusers can create new users"))
|
||||
raise serializers.ValidationError(_('Only superusers can create new users'))
|
||||
|
||||
# Generate a random password
|
||||
password = User.objects.make_random_password(length=14)
|
||||
@@ -443,17 +459,27 @@ class UserCreateSerializer(ExendedUserSerializer):
|
||||
|
||||
def create(self, validated_data):
|
||||
"""Send an e email to the user after creation."""
|
||||
from InvenTree.helpers_model import get_base_url
|
||||
|
||||
base_url = get_base_url()
|
||||
|
||||
instance = super().create(validated_data)
|
||||
|
||||
# Make sure the user cannot login until they have set a password
|
||||
instance.set_unusable_password()
|
||||
# Send the user an onboarding email (from current site)
|
||||
current_site = Site.objects.get_current()
|
||||
domain = current_site.domain
|
||||
instance.email_user(
|
||||
subject=_(f"Welcome to {current_site.name}"),
|
||||
message=_(f"Your account has been created.\n\nPlease use the password reset function to get access (at https://{domain})."),
|
||||
|
||||
message = (
|
||||
_('Your account has been created.')
|
||||
+ '\n\n'
|
||||
+ _('Please use the password reset function to login')
|
||||
)
|
||||
|
||||
if base_url:
|
||||
message += f'\n\nURL: {base_url}'
|
||||
|
||||
# Send the user an onboarding email (from current site)
|
||||
instance.email_user(subject=_('Welcome to InvenTree'), message=message)
|
||||
|
||||
return instance
|
||||
|
||||
|
||||
@@ -490,7 +516,7 @@ class InvenTreeAttachmentSerializer(InvenTreeModelSerializer):
|
||||
|
||||
@staticmethod
|
||||
def attachment_fields(extra_fields=None):
|
||||
"""Default set of fields for an attachment serializer"""
|
||||
"""Default set of fields for an attachment serializer."""
|
||||
fields = [
|
||||
'pk',
|
||||
'attachment',
|
||||
@@ -509,17 +535,11 @@ class InvenTreeAttachmentSerializer(InvenTreeModelSerializer):
|
||||
|
||||
user_detail = UserSerializer(source='user', read_only=True, many=False)
|
||||
|
||||
attachment = InvenTreeAttachmentSerializerField(
|
||||
required=False,
|
||||
allow_null=False,
|
||||
)
|
||||
attachment = InvenTreeAttachmentSerializerField(required=False, allow_null=False)
|
||||
|
||||
# The 'filename' field must be present in the serializer
|
||||
filename = serializers.CharField(
|
||||
label=_('Filename'),
|
||||
required=False,
|
||||
source='basename',
|
||||
allow_blank=False,
|
||||
label=_('Filename'), required=False, source='basename', allow_blank=False
|
||||
)
|
||||
|
||||
upload_date = serializers.DateField(read_only=True)
|
||||
@@ -553,7 +573,7 @@ class InvenTreeDecimalField(serializers.FloatField):
|
||||
try:
|
||||
return Decimal(str(data))
|
||||
except Exception:
|
||||
raise serializers.ValidationError(_("Invalid value"))
|
||||
raise serializers.ValidationError(_('Invalid value'))
|
||||
|
||||
|
||||
class DataFileUploadSerializer(serializers.Serializer):
|
||||
@@ -570,13 +590,11 @@ class DataFileUploadSerializer(serializers.Serializer):
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
fields = [
|
||||
'data_file',
|
||||
]
|
||||
fields = ['data_file']
|
||||
|
||||
data_file = serializers.FileField(
|
||||
label=_("Data File"),
|
||||
help_text=_("Select data file for upload"),
|
||||
label=_('Data File'),
|
||||
help_text=_('Select data file for upload'),
|
||||
required=True,
|
||||
allow_empty_file=False,
|
||||
)
|
||||
@@ -590,20 +608,16 @@ class DataFileUploadSerializer(serializers.Serializer):
|
||||
# Remove the leading . from the extension
|
||||
ext = ext[1:]
|
||||
|
||||
accepted_file_types = [
|
||||
'xls', 'xlsx',
|
||||
'csv', 'tsv',
|
||||
'xml',
|
||||
]
|
||||
accepted_file_types = ['xls', 'xlsx', 'csv', 'tsv', 'xml']
|
||||
|
||||
if ext not in accepted_file_types:
|
||||
raise serializers.ValidationError(_("Unsupported file type"))
|
||||
raise serializers.ValidationError(_('Unsupported file type'))
|
||||
|
||||
# Impose a 50MB limit on uploaded BOM files
|
||||
max_upload_file_size = 50 * 1024 * 1024
|
||||
|
||||
if data_file.size > max_upload_file_size:
|
||||
raise serializers.ValidationError(_("File is too large"))
|
||||
raise serializers.ValidationError(_('File is too large'))
|
||||
|
||||
# Read file data into memory (bytes object)
|
||||
try:
|
||||
@@ -624,10 +638,10 @@ class DataFileUploadSerializer(serializers.Serializer):
|
||||
raise serializers.ValidationError(str(e))
|
||||
|
||||
if len(self.dataset.headers) == 0:
|
||||
raise serializers.ValidationError(_("No columns found in file"))
|
||||
raise serializers.ValidationError(_('No columns found in file'))
|
||||
|
||||
if len(self.dataset) == 0:
|
||||
raise serializers.ValidationError(_("No data rows found in file"))
|
||||
raise serializers.ValidationError(_('No data rows found in file'))
|
||||
|
||||
return data_file
|
||||
|
||||
@@ -721,24 +735,14 @@ class DataFileExtractSerializer(serializers.Serializer):
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
fields = [
|
||||
'columns',
|
||||
'rows',
|
||||
]
|
||||
fields = ['columns', 'rows']
|
||||
|
||||
# Mapping of columns
|
||||
columns = serializers.ListField(
|
||||
child=serializers.CharField(
|
||||
allow_blank=True,
|
||||
),
|
||||
)
|
||||
columns = serializers.ListField(child=serializers.CharField(allow_blank=True))
|
||||
|
||||
rows = serializers.ListField(
|
||||
child=serializers.ListField(
|
||||
child=serializers.CharField(
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
),
|
||||
child=serializers.CharField(allow_blank=True, allow_null=True)
|
||||
)
|
||||
)
|
||||
|
||||
@@ -750,10 +754,10 @@ class DataFileExtractSerializer(serializers.Serializer):
|
||||
self.rows = data.get('rows', [])
|
||||
|
||||
if len(self.rows) == 0:
|
||||
raise serializers.ValidationError(_("No data rows provided"))
|
||||
raise serializers.ValidationError(_('No data rows provided'))
|
||||
|
||||
if len(self.columns) == 0:
|
||||
raise serializers.ValidationError(_("No data columns supplied"))
|
||||
raise serializers.ValidationError(_('No data columns supplied'))
|
||||
|
||||
self.validate_extracted_columns()
|
||||
|
||||
@@ -776,16 +780,9 @@ class DataFileExtractSerializer(serializers.Serializer):
|
||||
processed_row = self.process_row(self.row_to_dict(row))
|
||||
|
||||
if processed_row:
|
||||
rows.append({
|
||||
"original": row,
|
||||
"data": processed_row,
|
||||
})
|
||||
rows.append({'original': row, 'data': processed_row})
|
||||
|
||||
return {
|
||||
'fields': model_fields,
|
||||
'columns': self.columns,
|
||||
'rows': rows,
|
||||
}
|
||||
return {'fields': model_fields, 'columns': self.columns, 'rows': rows}
|
||||
|
||||
def process_row(self, row):
|
||||
"""Process a 'row' of data, which is a mapped column:value dict.
|
||||
@@ -799,12 +796,9 @@ class DataFileExtractSerializer(serializers.Serializer):
|
||||
|
||||
def row_to_dict(self, row):
|
||||
"""Convert a "row" to a named data dict."""
|
||||
row_dict = {
|
||||
'errors': {},
|
||||
}
|
||||
row_dict = {'errors': {}}
|
||||
|
||||
for idx, value in enumerate(row):
|
||||
|
||||
if idx < len(self.columns):
|
||||
col = self.columns[idx]
|
||||
|
||||
@@ -824,16 +818,16 @@ class DataFileExtractSerializer(serializers.Serializer):
|
||||
cols_seen = set()
|
||||
|
||||
for name, field in model_fields.items():
|
||||
|
||||
required = field.get('required', False)
|
||||
|
||||
# Check for missing required columns
|
||||
if required:
|
||||
if name not in self.columns:
|
||||
raise serializers.ValidationError(_(f"Missing required column: '{name}'"))
|
||||
raise serializers.ValidationError(
|
||||
_(f"Missing required column: '{name}'")
|
||||
)
|
||||
|
||||
for col in self.columns:
|
||||
|
||||
if not col:
|
||||
continue
|
||||
|
||||
@@ -855,17 +849,15 @@ class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
|
||||
"""
|
||||
|
||||
def skip_create_fields(self):
|
||||
"""Ensure the 'remote_image' field is skipped when creating a new instance"""
|
||||
return [
|
||||
'remote_image',
|
||||
]
|
||||
"""Ensure the 'remote_image' field is skipped when creating a new instance."""
|
||||
return ['remote_image']
|
||||
|
||||
remote_image = serializers.URLField(
|
||||
required=False,
|
||||
allow_blank=False,
|
||||
write_only=True,
|
||||
label=_("Remote Image"),
|
||||
help_text=_("URL of remote image file"),
|
||||
label=_('Remote Image'),
|
||||
help_text=_('URL of remote image file'),
|
||||
)
|
||||
|
||||
def validate_remote_image(self, url):
|
||||
@@ -874,11 +866,17 @@ class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
|
||||
- Attempt to download the image and store it against this object instance
|
||||
- Catches and re-throws any errors
|
||||
"""
|
||||
from InvenTree.helpers_model import download_image_from_url
|
||||
|
||||
if not url:
|
||||
return
|
||||
|
||||
if not common_models.InvenTreeSetting.get_setting('INVENTREE_DOWNLOAD_FROM_URL'):
|
||||
raise ValidationError(_("Downloading images from remote URL is not enabled"))
|
||||
if not common_models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_DOWNLOAD_FROM_URL'
|
||||
):
|
||||
raise ValidationError(
|
||||
_('Downloading images from remote URL is not enabled')
|
||||
)
|
||||
|
||||
try:
|
||||
self.remote_image_file = download_image_from_url(url)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,37 +1,37 @@
|
||||
"""API endpoints for social authentication with allauth."""
|
||||
|
||||
import logging
|
||||
from importlib import import_module
|
||||
|
||||
from django.urls import include, path, reverse
|
||||
from django.urls import NoReverseMatch, include, path, reverse
|
||||
|
||||
from allauth.account.models import EmailAddress
|
||||
from allauth.socialaccount import providers
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from allauth.socialaccount.providers.keycloak.views import \
|
||||
KeycloakOAuth2Adapter
|
||||
from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter,
|
||||
OAuth2LoginView)
|
||||
from allauth.socialaccount.providers.oauth2.views import OAuth2Adapter, OAuth2LoginView
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import NotFound
|
||||
from rest_framework.permissions import AllowAny, IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
|
||||
import InvenTree.sso
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree.mixins import CreateAPI, ListAPI, ListCreateAPI
|
||||
from InvenTree.serializers import InvenTreeModelSerializer
|
||||
from InvenTree.serializers import EmptySerializer, InvenTreeModelSerializer
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class GenericOAuth2ApiLoginView(OAuth2LoginView):
|
||||
"""Api view to login a user with a social account"""
|
||||
"""Api view to login a user with a social account."""
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Dispatch the regular login view directly."""
|
||||
return self.login(request, *args, **kwargs)
|
||||
|
||||
|
||||
class GenericOAuth2ApiConnectView(GenericOAuth2ApiLoginView):
|
||||
"""Api view to connect a social account to the current user"""
|
||||
"""Api view to connect a social account to the current user."""
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Dispatch the connect request directly."""
|
||||
@@ -46,16 +46,16 @@ class GenericOAuth2ApiConnectView(GenericOAuth2ApiLoginView):
|
||||
def handle_oauth2(adapter: OAuth2Adapter):
|
||||
"""Define urls for oauth2 endpoints."""
|
||||
return [
|
||||
path('login/', GenericOAuth2ApiLoginView.adapter_view(adapter), name=f'{provider.id}_api_login'),
|
||||
path('connect/', GenericOAuth2ApiConnectView.adapter_view(adapter), name=f'{provider.id}_api_connect'),
|
||||
]
|
||||
|
||||
|
||||
def handle_keycloak():
|
||||
"""Define urls for keycloak."""
|
||||
return [
|
||||
path('login/', GenericOAuth2ApiLoginView.adapter_view(KeycloakOAuth2Adapter), name='keycloak_api_login'),
|
||||
path('connect/', GenericOAuth2ApiConnectView.adapter_view(KeycloakOAuth2Adapter), name='keycloak_api_connet'),
|
||||
path(
|
||||
'login/',
|
||||
GenericOAuth2ApiLoginView.adapter_view(adapter),
|
||||
name=f'{provider.id}_api_login',
|
||||
),
|
||||
path(
|
||||
'connect/',
|
||||
GenericOAuth2ApiConnectView.adapter_view(adapter),
|
||||
name=f'{provider.id}_api_connect',
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -72,14 +72,22 @@ legacy = {
|
||||
social_auth_urlpatterns = []
|
||||
|
||||
provider_urlpatterns = []
|
||||
for provider in providers.registry.get_list():
|
||||
|
||||
for name, provider in providers.registry.provider_map.items():
|
||||
try:
|
||||
prov_mod = import_module(provider.get_package() + ".views")
|
||||
prov_mod = import_module(provider.get_package() + '.views')
|
||||
except ImportError:
|
||||
logger.exception('Could not import authentication provider %s', name)
|
||||
continue
|
||||
|
||||
# Try to extract the adapter class
|
||||
adapters = [cls for cls in prov_mod.__dict__.values() if isinstance(cls, type) and not cls == OAuth2Adapter and issubclass(cls, OAuth2Adapter)]
|
||||
adapters = [
|
||||
cls
|
||||
for cls in prov_mod.__dict__.values()
|
||||
if isinstance(cls, type)
|
||||
and not cls == OAuth2Adapter
|
||||
and issubclass(cls, OAuth2Adapter)
|
||||
]
|
||||
|
||||
# Get urls
|
||||
urls = []
|
||||
@@ -87,12 +95,17 @@ for provider in providers.registry.get_list():
|
||||
urls = handle_oauth2(adapter=adapters[0])
|
||||
else:
|
||||
if provider.id in legacy:
|
||||
logger.warning('`%s` is not supported on platform UI. Use `%s` instead.', provider.id, legacy[provider.id])
|
||||
logger.warning(
|
||||
'`%s` is not supported on platform UI. Use `%s` instead.',
|
||||
provider.id,
|
||||
legacy[provider.id],
|
||||
)
|
||||
continue
|
||||
elif provider.id == 'keycloak':
|
||||
urls = handle_keycloak()
|
||||
else:
|
||||
logger.error('Found handler that is not yet ready for platform UI: `%s`. Open an feature request on GitHub if you need it implemented.', provider.id)
|
||||
logger.error(
|
||||
'Found handler that is not yet ready for platform UI: `%s`. Open an feature request on GitHub if you need it implemented.',
|
||||
provider.id,
|
||||
)
|
||||
continue
|
||||
provider_urlpatterns += [path(f'{provider.id}/', include(urls))]
|
||||
|
||||
@@ -100,35 +113,76 @@ for provider in providers.registry.get_list():
|
||||
social_auth_urlpatterns += provider_urlpatterns
|
||||
|
||||
|
||||
class SocialProviderListResponseSerializer(serializers.Serializer):
|
||||
"""Serializer for the SocialProviderListView."""
|
||||
|
||||
class SocialProvider(serializers.Serializer):
|
||||
"""Serializer for the SocialProviderListResponseSerializer."""
|
||||
|
||||
id = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
configured = serializers.BooleanField()
|
||||
login = serializers.URLField()
|
||||
connect = serializers.URLField()
|
||||
display_name = serializers.CharField()
|
||||
|
||||
sso_enabled = serializers.BooleanField()
|
||||
sso_registration = serializers.BooleanField()
|
||||
mfa_required = serializers.BooleanField()
|
||||
providers = SocialProvider(many=True)
|
||||
registration_enabled = serializers.BooleanField()
|
||||
password_forgotten_enabled = serializers.BooleanField()
|
||||
|
||||
|
||||
class SocialProviderListView(ListAPI):
|
||||
"""List of available social providers."""
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
permission_classes = (AllowAny,)
|
||||
serializer_class = EmptySerializer
|
||||
|
||||
@extend_schema(
|
||||
responses={200: OpenApiResponse(response=SocialProviderListResponseSerializer)}
|
||||
)
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Get the list of providers."""
|
||||
provider_list = []
|
||||
for provider in providers.registry.get_list():
|
||||
for provider in providers.registry.provider_map.values():
|
||||
provider_data = {
|
||||
'id': provider.id,
|
||||
'name': provider.name,
|
||||
'login': request.build_absolute_uri(reverse(f'{provider.id}_api_login')),
|
||||
'connect': request.build_absolute_uri(reverse(f'{provider.id}_api_connect')),
|
||||
'configured': False
|
||||
'configured': False,
|
||||
}
|
||||
|
||||
try:
|
||||
provider_app = provider.get_app(request)
|
||||
provider_data['display_name'] = provider_app.name
|
||||
provider_data['configured'] = True
|
||||
except SocialApp.DoesNotExist:
|
||||
provider_data['display_name'] = provider.name
|
||||
provider_data['login'] = request.build_absolute_uri(
|
||||
reverse(f'{provider.id}_api_login')
|
||||
)
|
||||
except NoReverseMatch:
|
||||
provider_data['login'] = None
|
||||
|
||||
try:
|
||||
provider_data['connect'] = request.build_absolute_uri(
|
||||
reverse(f'{provider.id}_api_connect')
|
||||
)
|
||||
except NoReverseMatch:
|
||||
provider_data['connect'] = None
|
||||
|
||||
provider_data['configured'] = InvenTree.sso.check_provider(provider)
|
||||
provider_data['display_name'] = InvenTree.sso.provider_display_name(
|
||||
provider
|
||||
)
|
||||
|
||||
provider_list.append(provider_data)
|
||||
|
||||
data = {
|
||||
'sso_enabled': InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO'),
|
||||
'sso_registration': InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO_REG'),
|
||||
'sso_enabled': InvenTree.sso.login_enabled(),
|
||||
'sso_registration': InvenTree.sso.registration_enabled(),
|
||||
'mfa_required': InvenTreeSetting.get_setting('LOGIN_ENFORCE_MFA'),
|
||||
'providers': provider_list
|
||||
'providers': provider_list,
|
||||
'registration_enabled': InvenTreeSetting.get_setting('LOGIN_ENABLE_REG'),
|
||||
'password_forgotten_enabled': InvenTreeSetting.get_setting(
|
||||
'LOGIN_ENABLE_PWD_FORGOT'
|
||||
),
|
||||
}
|
||||
return Response(data)
|
||||
|
||||
@@ -155,6 +209,7 @@ class EmptyEmailAddressSerializer(InvenTreeModelSerializer):
|
||||
|
||||
class EmailListView(ListCreateAPI):
|
||||
"""List of registered email addresses for current users."""
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
serializer_class = EmailAddressSerializer
|
||||
|
||||
@@ -165,12 +220,15 @@ class EmailListView(ListCreateAPI):
|
||||
|
||||
class EmailActionMixin(CreateAPI):
|
||||
"""Mixin to modify email addresses for current users."""
|
||||
|
||||
serializer_class = EmptyEmailAddressSerializer
|
||||
permission_classes = (IsAuthenticated,)
|
||||
|
||||
def get_queryset(self):
|
||||
"""Filter queryset for current user."""
|
||||
return EmailAddress.objects.filter(user=self.request.user, pk=self.kwargs['pk']).first()
|
||||
return EmailAddress.objects.filter(
|
||||
user=self.request.user, pk=self.kwargs['pk']
|
||||
).first()
|
||||
|
||||
@extend_schema(responses={200: OpenApiResponse(response=EmailAddressSerializer)})
|
||||
def post(self, request, *args, **kwargs):
|
||||
|
||||
77
InvenTree/InvenTree/sso.py
Normal file
77
InvenTree/InvenTree/sso.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""Helper functions for Single Sign On functionality."""
|
||||
|
||||
import logging
|
||||
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree.helpers import str2bool
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def get_provider_app(provider):
|
||||
"""Return the SocialApp object for the given provider."""
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
|
||||
try:
|
||||
apps = SocialApp.objects.filter(provider__iexact=provider.id)
|
||||
except SocialApp.DoesNotExist:
|
||||
logger.warning("SSO SocialApp not found for provider '%s'", provider.id)
|
||||
return None
|
||||
|
||||
if apps.count() > 1:
|
||||
logger.warning("Multiple SocialApps found for provider '%s'", provider.id)
|
||||
|
||||
if apps.count() == 0:
|
||||
logger.warning("SSO SocialApp not found for provider '%s'", provider.id)
|
||||
|
||||
return apps.first()
|
||||
|
||||
|
||||
def check_provider(provider, raise_error=False):
|
||||
"""Check if the given provider is correctly configured.
|
||||
|
||||
To be correctly configured, the following must be true:
|
||||
|
||||
- Provider must either have a registered SocialApp
|
||||
- Must have at least one site enabled
|
||||
"""
|
||||
import allauth.app_settings
|
||||
|
||||
# First, check that the provider is enabled
|
||||
app = get_provider_app(provider)
|
||||
|
||||
if not app:
|
||||
return False
|
||||
|
||||
if allauth.app_settings.SITES_ENABLED:
|
||||
# At least one matching site must be specified
|
||||
if not app.sites.exists():
|
||||
logger.error('SocialApp %s has no sites configured', app)
|
||||
return False
|
||||
|
||||
# At this point, we assume that the provider is correctly configured
|
||||
return True
|
||||
|
||||
|
||||
def provider_display_name(provider):
|
||||
"""Return the 'display name' for the given provider."""
|
||||
if app := get_provider_app(provider):
|
||||
return app.name
|
||||
|
||||
# Fallback value if app not found
|
||||
return provider.name
|
||||
|
||||
|
||||
def login_enabled() -> bool:
|
||||
"""Return True if SSO login is enabled."""
|
||||
return str2bool(InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO'))
|
||||
|
||||
|
||||
def registration_enabled() -> bool:
|
||||
"""Return True if SSO registration is enabled."""
|
||||
return str2bool(InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO_REG'))
|
||||
|
||||
|
||||
def auto_registration_enabled() -> bool:
|
||||
"""Return True if SSO auto-registration is enabled."""
|
||||
return str2bool(InvenTreeSetting.get_setting('LOGIN_SIGNUP_SSO_AUTO'))
|
||||
@@ -13,7 +13,7 @@ from django_q.status import Stat
|
||||
import InvenTree.email
|
||||
import InvenTree.ready
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def is_worker_running(**kwargs):
|
||||
@@ -33,9 +33,7 @@ def is_worker_running(**kwargs):
|
||||
now = timezone.now()
|
||||
past = now - timedelta(minutes=10)
|
||||
|
||||
results = Success.objects.filter(
|
||||
started__gte=past
|
||||
)
|
||||
results = Success.objects.filter(started__gte=past)
|
||||
|
||||
# If any results are returned, then the background worker is running!
|
||||
try:
|
||||
@@ -65,13 +63,13 @@ def check_system_health(**kwargs):
|
||||
|
||||
if not is_worker_running(**kwargs): # pragma: no cover
|
||||
result = False
|
||||
logger.warning(_("Background worker check failed"))
|
||||
logger.warning(_('Background worker check failed'))
|
||||
|
||||
if not InvenTree.email.is_email_configured(): # pragma: no cover
|
||||
result = False
|
||||
logger.warning(_("Email backend not configured"))
|
||||
logger.warning(_('Email backend not configured'))
|
||||
|
||||
if not result: # pragma: no cover
|
||||
logger.warning(_("InvenTree system health checks failed"))
|
||||
logger.warning(_('InvenTree system health checks failed'))
|
||||
|
||||
return result
|
||||
|
||||
@@ -9,68 +9,68 @@ class PurchaseOrderStatus(StatusCode):
|
||||
"""Defines a set of status codes for a PurchaseOrder."""
|
||||
|
||||
# Order status codes
|
||||
PENDING = 10, _("Pending"), 'secondary' # Order is pending (not yet placed)
|
||||
PLACED = 20, _("Placed"), 'primary' # Order has been placed with supplier
|
||||
COMPLETE = 30, _("Complete"), 'success' # Order has been completed
|
||||
CANCELLED = 40, _("Cancelled"), 'danger' # Order was cancelled
|
||||
LOST = 50, _("Lost"), 'warning' # Order was lost
|
||||
RETURNED = 60, _("Returned"), 'warning' # Order was returned
|
||||
PENDING = 10, _('Pending'), 'secondary' # Order is pending (not yet placed)
|
||||
PLACED = 20, _('Placed'), 'primary' # Order has been placed with supplier
|
||||
COMPLETE = 30, _('Complete'), 'success' # Order has been completed
|
||||
CANCELLED = 40, _('Cancelled'), 'danger' # Order was cancelled
|
||||
LOST = 50, _('Lost'), 'warning' # Order was lost
|
||||
RETURNED = 60, _('Returned'), 'warning' # Order was returned
|
||||
|
||||
|
||||
class PurchaseOrderStatusGroups:
|
||||
"""Groups for PurchaseOrderStatus codes."""
|
||||
|
||||
# Open orders
|
||||
OPEN = [
|
||||
PurchaseOrderStatus.PENDING.value,
|
||||
PurchaseOrderStatus.PLACED.value,
|
||||
]
|
||||
OPEN = [PurchaseOrderStatus.PENDING.value, PurchaseOrderStatus.PLACED.value]
|
||||
|
||||
# Failed orders
|
||||
FAILED = [
|
||||
PurchaseOrderStatus.CANCELLED.value,
|
||||
PurchaseOrderStatus.LOST.value,
|
||||
PurchaseOrderStatus.RETURNED.value
|
||||
PurchaseOrderStatus.RETURNED.value,
|
||||
]
|
||||
|
||||
|
||||
class SalesOrderStatus(StatusCode):
|
||||
"""Defines a set of status codes for a SalesOrder."""
|
||||
|
||||
PENDING = 10, _("Pending"), 'secondary' # Order is pending
|
||||
IN_PROGRESS = 15, _("In Progress"), 'primary' # Order has been issued, and is in progress
|
||||
SHIPPED = 20, _("Shipped"), 'success' # Order has been shipped to customer
|
||||
CANCELLED = 40, _("Cancelled"), 'danger' # Order has been cancelled
|
||||
LOST = 50, _("Lost"), 'warning' # Order was lost
|
||||
RETURNED = 60, _("Returned"), 'warning' # Order was returned
|
||||
PENDING = 10, _('Pending'), 'secondary' # Order is pending
|
||||
IN_PROGRESS = (
|
||||
15,
|
||||
_('In Progress'),
|
||||
'primary',
|
||||
) # Order has been issued, and is in progress
|
||||
SHIPPED = 20, _('Shipped'), 'success' # Order has been shipped to customer
|
||||
CANCELLED = 40, _('Cancelled'), 'danger' # Order has been cancelled
|
||||
LOST = 50, _('Lost'), 'warning' # Order was lost
|
||||
RETURNED = 60, _('Returned'), 'warning' # Order was returned
|
||||
|
||||
|
||||
class SalesOrderStatusGroups:
|
||||
"""Groups for SalesOrderStatus codes."""
|
||||
|
||||
# Open orders
|
||||
OPEN = [
|
||||
SalesOrderStatus.PENDING.value,
|
||||
SalesOrderStatus.IN_PROGRESS.value,
|
||||
]
|
||||
OPEN = [SalesOrderStatus.PENDING.value, SalesOrderStatus.IN_PROGRESS.value]
|
||||
|
||||
# Completed orders
|
||||
COMPLETE = [
|
||||
SalesOrderStatus.SHIPPED.value,
|
||||
]
|
||||
COMPLETE = [SalesOrderStatus.SHIPPED.value]
|
||||
|
||||
|
||||
class StockStatus(StatusCode):
|
||||
"""Status codes for Stock."""
|
||||
|
||||
OK = 10, _("OK"), 'success' # Item is OK
|
||||
ATTENTION = 50, _("Attention needed"), 'warning' # Item requires attention
|
||||
DAMAGED = 55, _("Damaged"), 'warning' # Item is damaged
|
||||
DESTROYED = 60, _("Destroyed"), 'danger' # Item is destroyed
|
||||
REJECTED = 65, _("Rejected"), 'danger' # Item is rejected
|
||||
LOST = 70, _("Lost"), 'dark' # Item has been lost
|
||||
QUARANTINED = 75, _("Quarantined"), 'info' # Item has been quarantined and is unavailable
|
||||
RETURNED = 85, _("Returned"), 'warning' # Item has been returned from a customer
|
||||
OK = 10, _('OK'), 'success' # Item is OK
|
||||
ATTENTION = 50, _('Attention needed'), 'warning' # Item requires attention
|
||||
DAMAGED = 55, _('Damaged'), 'warning' # Item is damaged
|
||||
DESTROYED = 60, _('Destroyed'), 'danger' # Item is destroyed
|
||||
REJECTED = 65, _('Rejected'), 'danger' # Item is rejected
|
||||
LOST = 70, _('Lost'), 'dark' # Item has been lost
|
||||
QUARANTINED = (
|
||||
75,
|
||||
_('Quarantined'),
|
||||
'info',
|
||||
) # Item has been quarantined and is unavailable
|
||||
RETURNED = 85, _('Returned'), 'warning' # Item has been returned from a customer
|
||||
|
||||
|
||||
class StockStatusGroups:
|
||||
@@ -129,7 +129,7 @@ class StockHistoryCode(StatusCode):
|
||||
BUILD_CONSUMED = 57, _('Consumed by build order')
|
||||
|
||||
# Sales order codes
|
||||
SHIPPED_AGAINST_SALES_ORDER = 60, _("Shipped against Sales Order")
|
||||
SHIPPED_AGAINST_SALES_ORDER = 60, _('Shipped against Sales Order')
|
||||
|
||||
# Purchase order codes
|
||||
RECEIVED_AGAINST_PURCHASE_ORDER = 70, _('Received against Purchase Order')
|
||||
@@ -145,59 +145,53 @@ class StockHistoryCode(StatusCode):
|
||||
class BuildStatus(StatusCode):
|
||||
"""Build status codes."""
|
||||
|
||||
PENDING = 10, _("Pending"), 'secondary' # Build is pending / active
|
||||
PRODUCTION = 20, _("Production"), 'primary' # BuildOrder is in production
|
||||
CANCELLED = 30, _("Cancelled"), 'danger' # Build was cancelled
|
||||
COMPLETE = 40, _("Complete"), 'success' # Build is complete
|
||||
PENDING = 10, _('Pending'), 'secondary' # Build is pending / active
|
||||
PRODUCTION = 20, _('Production'), 'primary' # BuildOrder is in production
|
||||
CANCELLED = 30, _('Cancelled'), 'danger' # Build was cancelled
|
||||
COMPLETE = 40, _('Complete'), 'success' # Build is complete
|
||||
|
||||
|
||||
class BuildStatusGroups:
|
||||
"""Groups for BuildStatus codes."""
|
||||
|
||||
ACTIVE_CODES = [
|
||||
BuildStatus.PENDING.value,
|
||||
BuildStatus.PRODUCTION.value,
|
||||
]
|
||||
ACTIVE_CODES = [BuildStatus.PENDING.value, BuildStatus.PRODUCTION.value]
|
||||
|
||||
|
||||
class ReturnOrderStatus(StatusCode):
|
||||
"""Defines a set of status codes for a ReturnOrder"""
|
||||
"""Defines a set of status codes for a ReturnOrder."""
|
||||
|
||||
# Order is pending, waiting for receipt of items
|
||||
PENDING = 10, _("Pending"), 'secondary'
|
||||
PENDING = 10, _('Pending'), 'secondary'
|
||||
|
||||
# Items have been received, and are being inspected
|
||||
IN_PROGRESS = 20, _("In Progress"), 'primary'
|
||||
IN_PROGRESS = 20, _('In Progress'), 'primary'
|
||||
|
||||
COMPLETE = 30, _("Complete"), 'success'
|
||||
CANCELLED = 40, _("Cancelled"), 'danger'
|
||||
COMPLETE = 30, _('Complete'), 'success'
|
||||
CANCELLED = 40, _('Cancelled'), 'danger'
|
||||
|
||||
|
||||
class ReturnOrderStatusGroups:
|
||||
"""Groups for ReturnOrderStatus codes."""
|
||||
|
||||
OPEN = [
|
||||
ReturnOrderStatus.PENDING.value,
|
||||
ReturnOrderStatus.IN_PROGRESS.value,
|
||||
]
|
||||
OPEN = [ReturnOrderStatus.PENDING.value, ReturnOrderStatus.IN_PROGRESS.value]
|
||||
|
||||
|
||||
class ReturnOrderLineStatus(StatusCode):
|
||||
"""Defines a set of status codes for a ReturnOrderLineItem"""
|
||||
"""Defines a set of status codes for a ReturnOrderLineItem."""
|
||||
|
||||
PENDING = 10, _("Pending"), 'secondary'
|
||||
PENDING = 10, _('Pending'), 'secondary'
|
||||
|
||||
# Item is to be returned to customer, no other action
|
||||
RETURN = 20, _("Return"), 'success'
|
||||
RETURN = 20, _('Return'), 'success'
|
||||
|
||||
# Item is to be repaired, and returned to customer
|
||||
REPAIR = 30, _("Repair"), 'primary'
|
||||
REPAIR = 30, _('Repair'), 'primary'
|
||||
|
||||
# Item is to be replaced (new item shipped)
|
||||
REPLACE = 40, _("Replace"), 'warning'
|
||||
REPLACE = 40, _('Replace'), 'warning'
|
||||
|
||||
# Item is to be refunded (cannot be repaired)
|
||||
REFUND = 50, _("Refund"), 'info'
|
||||
REFUND = 50, _('Refund'), 'info'
|
||||
|
||||
# Item is rejected
|
||||
REJECT = 60, _("Reject"), 'danger'
|
||||
REJECT = 60, _('Reject'), 'danger'
|
||||
|
||||
@@ -9,24 +9,29 @@ import time
|
||||
import warnings
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, List
|
||||
from typing import Callable
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import AppRegistryNotReady
|
||||
from django.core.management import call_command
|
||||
from django.db import DEFAULT_DB_ALIAS, connections
|
||||
from django.db.migrations.executor import MigrationExecutor
|
||||
from django.db.utils import (NotSupportedError, OperationalError,
|
||||
ProgrammingError)
|
||||
from django.db.utils import NotSupportedError, OperationalError, ProgrammingError
|
||||
from django.utils import timezone
|
||||
|
||||
import requests
|
||||
from maintenance_mode.core import (get_maintenance_mode, maintenance_mode_on,
|
||||
set_maintenance_mode)
|
||||
from maintenance_mode.core import (
|
||||
get_maintenance_mode,
|
||||
maintenance_mode_on,
|
||||
set_maintenance_mode,
|
||||
)
|
||||
|
||||
from InvenTree.config import get_setting
|
||||
from plugin import registry
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
from .version import isInvenTreeUpToDate
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def schedule_task(taskname, **kwargs):
|
||||
@@ -41,7 +46,7 @@ def schedule_task(taskname, **kwargs):
|
||||
try:
|
||||
from django_q.models import Schedule
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
logger.info("Could not start background tasks - App registry not ready")
|
||||
logger.info('Could not start background tasks - App registry not ready')
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -54,11 +59,7 @@ def schedule_task(taskname, **kwargs):
|
||||
else:
|
||||
logger.info("Creating scheduled task '%s'", taskname)
|
||||
|
||||
Schedule.objects.create(
|
||||
name=taskname,
|
||||
func=taskname,
|
||||
**kwargs
|
||||
)
|
||||
Schedule.objects.create(name=taskname, func=taskname, **kwargs)
|
||||
except (OperationalError, ProgrammingError): # pragma: no cover
|
||||
# Required if the DB is not ready yet
|
||||
pass
|
||||
@@ -77,8 +78,8 @@ def check_daily_holdoff(task_name: str, n_days: int = 1) -> bool:
|
||||
"""Check if a periodic task should be run, based on the provided setting name.
|
||||
|
||||
Arguments:
|
||||
task_name: The name of the task being run, e.g. 'dummy_task'
|
||||
setting_name: The name of the global setting, e.g. 'INVENTREE_DUMMY_TASK_INTERVAL'
|
||||
task_name (str): The name of the task being run, e.g. 'dummy_task'
|
||||
n_days (int): The number of days between task runs (default = 1)
|
||||
|
||||
Returns:
|
||||
bool: If the task should be run *now*, or wait another day
|
||||
@@ -93,7 +94,9 @@ def check_daily_holdoff(task_name: str, n_days: int = 1) -> bool:
|
||||
from InvenTree.ready import isInTestMode
|
||||
|
||||
if n_days <= 0:
|
||||
logger.info("Specified interval for task '%s' < 1 - task will not run", task_name)
|
||||
logger.info(
|
||||
"Specified interval for task '%s' < 1 - task will not run", task_name
|
||||
)
|
||||
return False
|
||||
|
||||
# Sleep a random number of seconds to prevent worker conflict
|
||||
@@ -116,7 +119,9 @@ def check_daily_holdoff(task_name: str, n_days: int = 1) -> bool:
|
||||
threshold = datetime.now() - timedelta(days=n_days)
|
||||
|
||||
if last_success > threshold:
|
||||
logger.info("Last successful run for '%s' was too recent - skipping task", task_name)
|
||||
logger.info(
|
||||
"Last successful run for '%s' was too recent - skipping task", task_name
|
||||
)
|
||||
return False
|
||||
|
||||
# Check for any information we have about this task
|
||||
@@ -133,7 +138,9 @@ def check_daily_holdoff(task_name: str, n_days: int = 1) -> bool:
|
||||
threshold = datetime.now() - timedelta(hours=12)
|
||||
|
||||
if last_attempt > threshold:
|
||||
logger.info("Last attempt for '%s' was too recent - skipping task", task_name)
|
||||
logger.info(
|
||||
"Last attempt for '%s' was too recent - skipping task", task_name
|
||||
)
|
||||
return False
|
||||
|
||||
# Record this attempt
|
||||
@@ -144,22 +151,28 @@ def check_daily_holdoff(task_name: str, n_days: int = 1) -> bool:
|
||||
|
||||
|
||||
def record_task_attempt(task_name: str):
|
||||
"""Record that a multi-day task has been attempted *now*"""
|
||||
"""Record that a multi-day task has been attempted *now*."""
|
||||
from common.models import InvenTreeSetting
|
||||
|
||||
logger.info("Logging task attempt for '%s'", task_name)
|
||||
|
||||
InvenTreeSetting.set_setting(f'_{task_name}_ATTEMPT', datetime.now().isoformat(), None)
|
||||
InvenTreeSetting.set_setting(
|
||||
f'_{task_name}_ATTEMPT', datetime.now().isoformat(), None
|
||||
)
|
||||
|
||||
|
||||
def record_task_success(task_name: str):
|
||||
"""Record that a multi-day task was successful *now*"""
|
||||
"""Record that a multi-day task was successful *now*."""
|
||||
from common.models import InvenTreeSetting
|
||||
|
||||
InvenTreeSetting.set_setting(f'_{task_name}_SUCCESS', datetime.now().isoformat(), None)
|
||||
InvenTreeSetting.set_setting(
|
||||
f'_{task_name}_SUCCESS', datetime.now().isoformat(), None
|
||||
)
|
||||
|
||||
|
||||
def offload_task(taskname, *args, force_async=False, force_sync=False, **kwargs) -> bool:
|
||||
def offload_task(
|
||||
taskname, *args, force_async=False, force_sync=False, **kwargs
|
||||
) -> bool:
|
||||
"""Create an AsyncTask if workers are running. This is different to a 'scheduled' task, in that it only runs once!
|
||||
|
||||
If workers are not running or force_sync flag, is set then the task is ran synchronously.
|
||||
@@ -202,7 +215,6 @@ def offload_task(taskname, *args, force_async=False, force_sync=False, **kwargs)
|
||||
raise_warning(f"WARNING: '{taskname}' not offloaded due to {str(exc)}")
|
||||
return False
|
||||
else:
|
||||
|
||||
if callable(taskname):
|
||||
# function was passed - use that
|
||||
_func = taskname
|
||||
@@ -212,14 +224,18 @@ def offload_task(taskname, *args, force_async=False, force_sync=False, **kwargs)
|
||||
app, mod, func = taskname.split('.')
|
||||
app_mod = app + '.' + mod
|
||||
except ValueError:
|
||||
raise_warning(f"WARNING: '{taskname}' not started - Malformed function path")
|
||||
raise_warning(
|
||||
f"WARNING: '{taskname}' not started - Malformed function path"
|
||||
)
|
||||
return False
|
||||
|
||||
# Import module from app
|
||||
try:
|
||||
_mod = importlib.import_module(app_mod)
|
||||
except ModuleNotFoundError:
|
||||
raise_warning(f"WARNING: '{taskname}' not started - No module named '{app_mod}'")
|
||||
raise_warning(
|
||||
f"WARNING: '{taskname}' not started - No module named '{app_mod}'"
|
||||
)
|
||||
return False
|
||||
|
||||
# Retrieve function
|
||||
@@ -233,7 +249,9 @@ def offload_task(taskname, *args, force_async=False, force_sync=False, **kwargs)
|
||||
if not _func:
|
||||
_func = eval(func) # pragma: no cover
|
||||
except NameError:
|
||||
raise_warning(f"WARNING: '{taskname}' not started - No function named '{func}'")
|
||||
raise_warning(
|
||||
f"WARNING: '{taskname}' not started - No function named '{func}'"
|
||||
)
|
||||
return False
|
||||
|
||||
# Workers are not running: run it as synchronous task
|
||||
@@ -260,19 +278,20 @@ class ScheduledTask:
|
||||
interval: str
|
||||
minutes: int = None
|
||||
|
||||
MINUTES = "I"
|
||||
HOURLY = "H"
|
||||
DAILY = "D"
|
||||
WEEKLY = "W"
|
||||
MONTHLY = "M"
|
||||
QUARTERLY = "Q"
|
||||
YEARLY = "Y"
|
||||
MINUTES = 'I'
|
||||
HOURLY = 'H'
|
||||
DAILY = 'D'
|
||||
WEEKLY = 'W'
|
||||
MONTHLY = 'M'
|
||||
QUARTERLY = 'Q'
|
||||
YEARLY = 'Y'
|
||||
TYPE = [MINUTES, HOURLY, DAILY, WEEKLY, MONTHLY, QUARTERLY, YEARLY]
|
||||
|
||||
|
||||
class TaskRegister:
|
||||
"""Registry for periodic tasks."""
|
||||
task_list: List[ScheduledTask] = []
|
||||
|
||||
task_list: list[ScheduledTask] = []
|
||||
|
||||
def register(self, task, schedule, minutes: int = None):
|
||||
"""Register a task with the que."""
|
||||
@@ -317,6 +336,7 @@ def scheduled_task(interval: str, minutes: int = None, tasklist: TaskRegister =
|
||||
_tasks.register(admin_class, interval, minutes=minutes)
|
||||
|
||||
return admin_class
|
||||
|
||||
return _task_wrapper
|
||||
|
||||
|
||||
@@ -327,9 +347,9 @@ def heartbeat():
|
||||
(There is probably a less "hacky" way of achieving this)?
|
||||
"""
|
||||
try:
|
||||
from django_q.models import Success
|
||||
from django_q.models import OrmQ, Success
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
logger.info("Could not perform heartbeat task - App registry not ready")
|
||||
logger.info('Could not perform heartbeat task - App registry not ready')
|
||||
return
|
||||
|
||||
threshold = timezone.now() - timedelta(minutes=30)
|
||||
@@ -337,16 +357,20 @@ def heartbeat():
|
||||
# Delete heartbeat results more than half an hour old,
|
||||
# otherwise they just create extra noise
|
||||
heartbeats = Success.objects.filter(
|
||||
func='InvenTree.tasks.heartbeat',
|
||||
started__lte=threshold
|
||||
func='InvenTree.tasks.heartbeat', started__lte=threshold
|
||||
)
|
||||
|
||||
heartbeats.delete()
|
||||
|
||||
# Clear out any other pending heartbeat tasks
|
||||
for task in OrmQ.objects.all():
|
||||
if task.func() == 'InvenTree.tasks.heartbeat':
|
||||
task.delete()
|
||||
|
||||
|
||||
@scheduled_task(ScheduledTask.DAILY)
|
||||
def delete_successful_tasks():
|
||||
"""Delete successful task logs which are older than a specified period"""
|
||||
"""Delete successful task logs which are older than a specified period."""
|
||||
try:
|
||||
from django_q.models import Success
|
||||
|
||||
@@ -356,21 +380,21 @@ def delete_successful_tasks():
|
||||
threshold = timezone.now() - timedelta(days=days)
|
||||
|
||||
# Delete successful tasks
|
||||
results = Success.objects.filter(
|
||||
started__lte=threshold
|
||||
)
|
||||
results = Success.objects.filter(started__lte=threshold)
|
||||
|
||||
if results.count() > 0:
|
||||
logger.info("Deleting %s successful task records", results.count())
|
||||
logger.info('Deleting %s successful task records', results.count())
|
||||
results.delete()
|
||||
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
logger.info("Could not perform 'delete_successful_tasks' - App registry not ready")
|
||||
logger.info(
|
||||
"Could not perform 'delete_successful_tasks' - App registry not ready"
|
||||
)
|
||||
|
||||
|
||||
@scheduled_task(ScheduledTask.DAILY)
|
||||
def delete_failed_tasks():
|
||||
"""Delete failed task logs which are older than a specified period"""
|
||||
"""Delete failed task logs which are older than a specified period."""
|
||||
try:
|
||||
from django_q.models import Failure
|
||||
|
||||
@@ -380,12 +404,10 @@ def delete_failed_tasks():
|
||||
threshold = timezone.now() - timedelta(days=days)
|
||||
|
||||
# Delete failed tasks
|
||||
results = Failure.objects.filter(
|
||||
started__lte=threshold
|
||||
)
|
||||
results = Failure.objects.filter(started__lte=threshold)
|
||||
|
||||
if results.count() > 0:
|
||||
logger.info("Deleting %s failed task records", results.count())
|
||||
logger.info('Deleting %s failed task records', results.count())
|
||||
results.delete()
|
||||
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
@@ -403,47 +425,48 @@ def delete_old_error_logs():
|
||||
days = InvenTreeSetting.get_setting('INVENTREE_DELETE_ERRORS_DAYS', 30)
|
||||
threshold = timezone.now() - timedelta(days=days)
|
||||
|
||||
errors = Error.objects.filter(
|
||||
when__lte=threshold,
|
||||
)
|
||||
errors = Error.objects.filter(when__lte=threshold)
|
||||
|
||||
if errors.count() > 0:
|
||||
logger.info("Deleting %s old error logs", errors.count())
|
||||
logger.info('Deleting %s old error logs', errors.count())
|
||||
errors.delete()
|
||||
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
# Apps not yet loaded
|
||||
logger.info("Could not perform 'delete_old_error_logs' - App registry not ready")
|
||||
logger.info(
|
||||
"Could not perform 'delete_old_error_logs' - App registry not ready"
|
||||
)
|
||||
|
||||
|
||||
@scheduled_task(ScheduledTask.DAILY)
|
||||
def delete_old_notifications():
|
||||
"""Delete old notification logs"""
|
||||
"""Delete old notification logs."""
|
||||
try:
|
||||
from common.models import (InvenTreeSetting, NotificationEntry,
|
||||
NotificationMessage)
|
||||
from common.models import (
|
||||
InvenTreeSetting,
|
||||
NotificationEntry,
|
||||
NotificationMessage,
|
||||
)
|
||||
|
||||
days = InvenTreeSetting.get_setting('INVENTREE_DELETE_NOTIFICATIONS_DAYS', 30)
|
||||
threshold = timezone.now() - timedelta(days=days)
|
||||
|
||||
items = NotificationEntry.objects.filter(
|
||||
updated__lte=threshold
|
||||
)
|
||||
items = NotificationEntry.objects.filter(updated__lte=threshold)
|
||||
|
||||
if items.count() > 0:
|
||||
logger.info("Deleted %s old notification entries", items.count())
|
||||
logger.info('Deleted %s old notification entries', items.count())
|
||||
items.delete()
|
||||
|
||||
items = NotificationMessage.objects.filter(
|
||||
creation__lte=threshold
|
||||
)
|
||||
items = NotificationMessage.objects.filter(creation__lte=threshold)
|
||||
|
||||
if items.count() > 0:
|
||||
logger.info("Deleted %s old notification messages", items.count())
|
||||
logger.info('Deleted %s old notification messages', items.count())
|
||||
items.delete()
|
||||
|
||||
except AppRegistryNotReady:
|
||||
logger.info("Could not perform 'delete_old_notifications' - App registry not ready")
|
||||
logger.info(
|
||||
"Could not perform 'delete_old_notifications' - App registry not ready"
|
||||
)
|
||||
|
||||
|
||||
@scheduled_task(ScheduledTask.DAILY)
|
||||
@@ -451,18 +474,23 @@ def check_for_updates():
|
||||
"""Check if there is an update for InvenTree."""
|
||||
try:
|
||||
import common.models
|
||||
from common.notifications import trigger_superuser_notification
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
# Apps not yet loaded!
|
||||
logger.info("Could not perform 'check_for_updates' - App registry not ready")
|
||||
return
|
||||
|
||||
interval = int(common.models.InvenTreeSetting.get_setting('INVENTREE_UPDATE_CHECK_INTERVAL', 7, cache=False))
|
||||
interval = int(
|
||||
common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_UPDATE_CHECK_INTERVAL', 7, cache=False
|
||||
)
|
||||
)
|
||||
|
||||
# Check if we should check for updates *today*
|
||||
if not check_daily_holdoff('check_for_updates', interval):
|
||||
return
|
||||
|
||||
logger.info("Checking for InvenTree software updates")
|
||||
logger.info('Checking for InvenTree software updates')
|
||||
|
||||
headers = {}
|
||||
|
||||
@@ -471,15 +499,17 @@ def check_for_updates():
|
||||
token = os.getenv('GITHUB_TOKEN', None)
|
||||
|
||||
if token:
|
||||
headers['Authorization'] = f"Bearer {token}"
|
||||
headers['Authorization'] = f'Bearer {token}'
|
||||
|
||||
response = requests.get(
|
||||
'https://api.github.com/repos/inventree/inventree/releases/latest',
|
||||
headers=headers
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise ValueError(f'Unexpected status code from GitHub API: {response.status_code}') # pragma: no cover
|
||||
raise ValueError(
|
||||
f'Unexpected status code from GitHub API: {response.status_code}'
|
||||
) # pragma: no cover
|
||||
|
||||
data = json.loads(response.text)
|
||||
|
||||
@@ -488,7 +518,7 @@ def check_for_updates():
|
||||
if not tag:
|
||||
raise ValueError("'tag_name' missing from GitHub response") # pragma: no cover
|
||||
|
||||
match = re.match(r"^.*(\d+)\.(\d+)\.(\d+).*$", tag)
|
||||
match = re.match(r'^.*(\d+)\.(\d+)\.(\d+).*$', tag)
|
||||
|
||||
if len(match.groups()) != 3: # pragma: no cover
|
||||
logger.warning("Version '%s' did not match expected pattern", tag)
|
||||
@@ -502,19 +532,32 @@ def check_for_updates():
|
||||
logger.info("Latest InvenTree version: '%s'", tag)
|
||||
|
||||
# Save the version to the database
|
||||
common.models.InvenTreeSetting.set_setting(
|
||||
'_INVENTREE_LATEST_VERSION',
|
||||
tag,
|
||||
None
|
||||
)
|
||||
common.models.InvenTreeSetting.set_setting('_INVENTREE_LATEST_VERSION', tag, None)
|
||||
|
||||
# Record that this task was successful
|
||||
record_task_success('check_for_updates')
|
||||
|
||||
# Send notification if there is a new version
|
||||
if not isInvenTreeUpToDate():
|
||||
logger.warning('InvenTree is not up-to-date, sending notification')
|
||||
|
||||
plg = registry.get_plugin('InvenTreeCoreNotificationsPlugin')
|
||||
if not plg:
|
||||
logger.warning('Cannot send notification - plugin not found')
|
||||
return
|
||||
plg = plg.plugin_config()
|
||||
if not plg:
|
||||
logger.warning('Cannot send notification - plugin config not found')
|
||||
return
|
||||
# Send notification
|
||||
trigger_superuser_notification(
|
||||
plg, f'An update for InvenTree to version {tag} is available'
|
||||
)
|
||||
|
||||
|
||||
@scheduled_task(ScheduledTask.DAILY)
|
||||
def update_exchange_rates(force: bool = False):
|
||||
"""Update currency exchange rates
|
||||
"""Update currency exchange rates.
|
||||
|
||||
Arguments:
|
||||
force: If True, force the update to run regardless of the last update time
|
||||
@@ -527,17 +570,21 @@ def update_exchange_rates(force: bool = False):
|
||||
from InvenTree.exchange import InvenTreeExchange
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
# Apps not yet loaded!
|
||||
logger.info("Could not perform 'update_exchange_rates' - App registry not ready")
|
||||
logger.info(
|
||||
"Could not perform 'update_exchange_rates' - App registry not ready"
|
||||
)
|
||||
return
|
||||
except Exception as exc: # pragma: no cover
|
||||
logger.info("Could not perform 'update_exchange_rates' - %s", exc)
|
||||
return
|
||||
|
||||
if not force:
|
||||
interval = int(InvenTreeSetting.get_setting('CURRENCY_UPDATE_INTERVAL', 1, cache=False))
|
||||
interval = int(
|
||||
InvenTreeSetting.get_setting('CURRENCY_UPDATE_INTERVAL', 1, cache=False)
|
||||
)
|
||||
|
||||
if not check_daily_holdoff('update_exchange_rates', interval):
|
||||
logger.info("Skipping exchange rate update (interval not reached)")
|
||||
logger.info('Skipping exchange rate update (interval not reached)')
|
||||
return
|
||||
|
||||
backend = InvenTreeExchange()
|
||||
@@ -548,15 +595,17 @@ def update_exchange_rates(force: bool = False):
|
||||
backend.update_rates(base_currency=base)
|
||||
|
||||
# Remove any exchange rates which are not in the provided currencies
|
||||
Rate.objects.filter(backend="InvenTreeExchange").exclude(currency__in=currency_codes()).delete()
|
||||
Rate.objects.filter(backend='InvenTreeExchange').exclude(
|
||||
currency__in=currency_codes()
|
||||
).delete()
|
||||
|
||||
# Record successful task execution
|
||||
record_task_success('update_exchange_rates')
|
||||
|
||||
except (AppRegistryNotReady, OperationalError, ProgrammingError):
|
||||
logger.warning("Could not update exchange rates - database not ready")
|
||||
logger.warning('Could not update exchange rates - database not ready')
|
||||
except Exception as e: # pragma: no cover
|
||||
logger.exception("Error updating exchange rates: %s", str(type(e)))
|
||||
logger.exception('Error updating exchange rates: %s', str(type(e)))
|
||||
|
||||
|
||||
@scheduled_task(ScheduledTask.DAILY)
|
||||
@@ -568,16 +617,20 @@ def run_backup():
|
||||
# Backups are not enabled - exit early
|
||||
return
|
||||
|
||||
interval = int(InvenTreeSetting.get_setting('INVENTREE_BACKUP_DAYS', 1, cache=False))
|
||||
interval = int(
|
||||
InvenTreeSetting.get_setting('INVENTREE_BACKUP_DAYS', 1, cache=False)
|
||||
)
|
||||
|
||||
# Check if should run this task *today*
|
||||
if not check_daily_holdoff('run_backup', interval):
|
||||
return
|
||||
|
||||
logger.info("Performing automated database backup task")
|
||||
logger.info('Performing automated database backup task')
|
||||
|
||||
call_command("dbbackup", noinput=True, clean=True, compress=True, interactive=False)
|
||||
call_command("mediabackup", noinput=True, clean=True, compress=True, interactive=False)
|
||||
call_command('dbbackup', noinput=True, clean=True, compress=True, interactive=False)
|
||||
call_command(
|
||||
'mediabackup', noinput=True, clean=True, compress=True, interactive=False
|
||||
)
|
||||
|
||||
# Record that this task was successful
|
||||
record_task_success('run_backup')
|
||||
@@ -591,7 +644,7 @@ def get_migration_plan():
|
||||
|
||||
|
||||
@scheduled_task(ScheduledTask.DAILY)
|
||||
def check_for_migrations():
|
||||
def check_for_migrations(force: bool = False, reload_registry: bool = True):
|
||||
"""Checks if migrations are needed.
|
||||
|
||||
If the setting auto_update is enabled we will start updating.
|
||||
@@ -600,15 +653,15 @@ def check_for_migrations():
|
||||
from plugin import registry
|
||||
|
||||
def set_pending_migrations(n: int):
|
||||
"""Helper function to inform the user about pending migrations"""
|
||||
|
||||
"""Helper function to inform the user about pending migrations."""
|
||||
logger.info('There are %s pending migrations', n)
|
||||
InvenTreeSetting.set_setting('_PENDING_MIGRATIONS', n, None)
|
||||
|
||||
logger.info("Checking for pending database migrations")
|
||||
logger.info('Checking for pending database migrations')
|
||||
|
||||
# Force plugin registry reload
|
||||
registry.check_reload()
|
||||
if reload_registry:
|
||||
# Force plugin registry reload
|
||||
registry.check_reload()
|
||||
|
||||
plan = get_migration_plan()
|
||||
|
||||
@@ -622,13 +675,13 @@ def check_for_migrations():
|
||||
set_pending_migrations(n)
|
||||
|
||||
# Test if auto-updates are enabled
|
||||
if not get_setting('INVENTREE_AUTO_UPDATE', 'auto_update'):
|
||||
logger.info("Auto-update is disabled - skipping migrations")
|
||||
if not force and not get_setting('INVENTREE_AUTO_UPDATE', 'auto_update'):
|
||||
logger.info('Auto-update is disabled - skipping migrations')
|
||||
return
|
||||
|
||||
# Log open migrations
|
||||
for migration in plan:
|
||||
logger.info("- %s", str(migration[0]))
|
||||
logger.info('- %s', str(migration[0]))
|
||||
|
||||
# Set the application to maintenance mode - no access from now on.
|
||||
set_maintenance_mode(True)
|
||||
@@ -646,14 +699,15 @@ def check_for_migrations():
|
||||
else:
|
||||
set_pending_migrations(0)
|
||||
|
||||
logger.info("Completed %s migrations", n)
|
||||
logger.info('Completed %s migrations', n)
|
||||
|
||||
# Make sure we are out of maintenance mode
|
||||
if get_maintenance_mode():
|
||||
logger.warning("Maintenance mode was not disabled - forcing it now")
|
||||
logger.warning('Maintenance mode was not disabled - forcing it now')
|
||||
set_maintenance_mode(False)
|
||||
logger.info("Manually released maintenance mode")
|
||||
logger.info('Manually released maintenance mode')
|
||||
|
||||
# We should be current now - triggering full reload to make sure all models
|
||||
# are loaded fully in their new state.
|
||||
registry.reload_plugins(full_reload=True, force_reload=True, collect=True)
|
||||
if reload_registry:
|
||||
# We should be current now - triggering full reload to make sure all models
|
||||
# are loaded fully in their new state.
|
||||
registry.reload_plugins(full_reload=True, force_reload=True, collect=True)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Custom template loader for InvenTree"""
|
||||
"""Custom template loader for InvenTree."""
|
||||
|
||||
import os
|
||||
|
||||
@@ -8,7 +8,7 @@ from django.template.loaders.cached import Loader as CachedLoader
|
||||
|
||||
|
||||
class InvenTreeTemplateLoader(CachedLoader):
|
||||
"""Custom template loader which bypasses cache for PDF export"""
|
||||
"""Custom template loader which bypasses cache for PDF export."""
|
||||
|
||||
def get_template(self, template_name, skip=None):
|
||||
"""Return a template object for the given template name.
|
||||
|
||||
1
InvenTree/InvenTree/templatetags/__init__.py
Normal file
1
InvenTree/InvenTree/templatetags/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Custom InvenTree template tags for HTML template rendering."""
|
||||
@@ -17,7 +17,7 @@ register = template.Library()
|
||||
|
||||
@register.simple_tag()
|
||||
def translation_stats(lang_code):
|
||||
"""Return the translation percentage for the given language code"""
|
||||
"""Return the translation percentage for the given language code."""
|
||||
if lang_code is None:
|
||||
return None
|
||||
|
||||
@@ -25,10 +25,18 @@ def translation_stats(lang_code):
|
||||
|
||||
|
||||
class CustomTranslateNode(TranslateNode):
|
||||
"""Custom translation node class, which sanitizes the translated strings for javascript use"""
|
||||
"""Custom translation node class, which sanitizes the translated strings for javascript use."""
|
||||
|
||||
def __init__(self, filter_expression, noop, asvar, message_context, escape=False):
|
||||
"""Custom constructor for TranslateNode class.
|
||||
|
||||
- Adds an 'escape' argument, which is passed to the render function
|
||||
"""
|
||||
super().__init__(filter_expression, noop, asvar, message_context)
|
||||
self.escape = escape
|
||||
|
||||
def render(self, context):
|
||||
"""Custom render function overrides / extends default behaviour"""
|
||||
"""Custom render function overrides / extends default behaviour."""
|
||||
result = super().render(context)
|
||||
|
||||
result = bleach.clean(result)
|
||||
@@ -41,20 +49,35 @@ class CustomTranslateNode(TranslateNode):
|
||||
for c in ['\\', '`', ';', '|', '&']:
|
||||
result = result.replace(c, '')
|
||||
|
||||
# Escape any quotes contained in the string
|
||||
result = result.replace("'", r"\'")
|
||||
result = result.replace('"', r'\"')
|
||||
# Escape any quotes contained in the string, if the request is for a javascript file
|
||||
request = context.get('request', None)
|
||||
|
||||
template = getattr(context, 'template_name', None)
|
||||
request = context.get('request', None)
|
||||
|
||||
escape = self.escape
|
||||
|
||||
if template and str(template).endswith('.js'):
|
||||
escape = True
|
||||
|
||||
if request and str(request.path).endswith('.js'):
|
||||
escape = True
|
||||
|
||||
if escape:
|
||||
result = result.replace("'", r'\'')
|
||||
result = result.replace('"', r'\"')
|
||||
|
||||
# Return the 'clean' resulting string
|
||||
return result
|
||||
|
||||
|
||||
@register.tag("translate")
|
||||
@register.tag("trans")
|
||||
@register.tag('translate')
|
||||
@register.tag('trans')
|
||||
def do_translate(parser, token):
|
||||
"""Custom translation function, lifted from https://github.com/django/django/blob/main/django/templatetags/i18n.py
|
||||
"""Custom translation function.
|
||||
|
||||
The only difference is that we pass this to our custom rendering node class
|
||||
- Lifted from https://github.com/django/django/blob/main/django/templatetags/i18n.py.
|
||||
- The only difference is that we pass this to our custom rendering node class
|
||||
"""
|
||||
bits = token.split_contents()
|
||||
if len(bits) < 2:
|
||||
@@ -62,21 +85,22 @@ def do_translate(parser, token):
|
||||
message_string = parser.compile_filter(bits[1])
|
||||
remaining = bits[2:]
|
||||
|
||||
escape = False
|
||||
noop = False
|
||||
asvar = None
|
||||
message_context = None
|
||||
seen = set()
|
||||
invalid_context = {"as", "noop"}
|
||||
invalid_context = {'as', 'noop'}
|
||||
|
||||
while remaining:
|
||||
option = remaining.pop(0)
|
||||
if option in seen:
|
||||
raise TemplateSyntaxError(
|
||||
"The '%s' option was specified more than once." % option,
|
||||
"The '%s' option was specified more than once." % option
|
||||
)
|
||||
elif option == "noop":
|
||||
elif option == 'noop':
|
||||
noop = True
|
||||
elif option == "context":
|
||||
elif option == 'context':
|
||||
try:
|
||||
value = remaining.pop(0)
|
||||
except IndexError:
|
||||
@@ -87,10 +111,10 @@ def do_translate(parser, token):
|
||||
if value in invalid_context:
|
||||
raise TemplateSyntaxError(
|
||||
"Invalid argument '%s' provided to the '%s' tag for the context "
|
||||
"option" % (value, bits[0]),
|
||||
'option' % (value, bits[0])
|
||||
)
|
||||
message_context = parser.compile_filter(value)
|
||||
elif option == "as":
|
||||
elif option == 'as':
|
||||
try:
|
||||
value = remaining.pop(0)
|
||||
except IndexError:
|
||||
@@ -98,33 +122,42 @@ def do_translate(parser, token):
|
||||
"No argument provided to the '%s' tag for the as option." % bits[0]
|
||||
)
|
||||
asvar = value
|
||||
elif option == 'escape':
|
||||
escape = True
|
||||
else:
|
||||
raise TemplateSyntaxError(
|
||||
"Unknown argument for '%s' tag: '%s'. The only options "
|
||||
"available are 'noop', 'context' \"xxx\", and 'as VAR'."
|
||||
% (
|
||||
bits[0],
|
||||
option,
|
||||
)
|
||||
% (bits[0], option)
|
||||
)
|
||||
seen.add(option)
|
||||
|
||||
return CustomTranslateNode(message_string, noop, asvar, message_context)
|
||||
return CustomTranslateNode(
|
||||
message_string, noop, asvar, message_context, escape=escape
|
||||
)
|
||||
|
||||
|
||||
# Re-register tags which we have not explicitly overridden
|
||||
register.tag("blocktrans", django.templatetags.i18n.do_block_translate)
|
||||
register.tag("blocktranslate", django.templatetags.i18n.do_block_translate)
|
||||
register.tag('blocktrans', django.templatetags.i18n.do_block_translate)
|
||||
register.tag('blocktranslate', django.templatetags.i18n.do_block_translate)
|
||||
|
||||
register.tag("language", django.templatetags.i18n.language)
|
||||
register.tag('language', django.templatetags.i18n.language)
|
||||
|
||||
register.tag("get_available_languages", django.templatetags.i18n.do_get_available_languages)
|
||||
register.tag("get_language_info", django.templatetags.i18n.do_get_language_info)
|
||||
register.tag("get_language_info_list", django.templatetags.i18n.do_get_language_info_list)
|
||||
register.tag("get_current_language", django.templatetags.i18n.do_get_current_language)
|
||||
register.tag("get_current_language_bidi", django.templatetags.i18n.do_get_current_language_bidi)
|
||||
register.tag(
|
||||
'get_available_languages', django.templatetags.i18n.do_get_available_languages
|
||||
)
|
||||
register.tag('get_language_info', django.templatetags.i18n.do_get_language_info)
|
||||
register.tag(
|
||||
'get_language_info_list', django.templatetags.i18n.do_get_language_info_list
|
||||
)
|
||||
register.tag('get_current_language', django.templatetags.i18n.do_get_current_language)
|
||||
register.tag(
|
||||
'get_current_language_bidi', django.templatetags.i18n.do_get_current_language_bidi
|
||||
)
|
||||
|
||||
register.filter("language_name", django.templatetags.i18n.language_name)
|
||||
register.filter("language_name_translated", django.templatetags.i18n.language_name_translated)
|
||||
register.filter("language_name_local", django.templatetags.i18n.language_name_local)
|
||||
register.filter("language_bidi", django.templatetags.i18n.language_bidi)
|
||||
register.filter('language_name', django.templatetags.i18n.language_name)
|
||||
register.filter(
|
||||
'language_name_translated', django.templatetags.i18n.language_name_translated
|
||||
)
|
||||
register.filter('language_name_local', django.templatetags.i18n.language_name_local)
|
||||
register.filter('language_bidi', django.templatetags.i18n.language_bidi)
|
||||
@@ -55,7 +55,6 @@ def render_date(context, date_object):
|
||||
return None
|
||||
|
||||
if isinstance(date_object, str):
|
||||
|
||||
date_object = date_object.strip()
|
||||
|
||||
# Check for empty string
|
||||
@@ -66,29 +65,25 @@ def render_date(context, date_object):
|
||||
try:
|
||||
date_object = date.fromisoformat(date_object)
|
||||
except ValueError:
|
||||
logger.warning("Tried to convert invalid date string: %s", date_object)
|
||||
logger.warning('Tried to convert invalid date string: %s', date_object)
|
||||
return None
|
||||
|
||||
# We may have already pre-cached the date format by calling this already!
|
||||
user_date_format = context.get('user_date_format', None)
|
||||
|
||||
if user_date_format is None:
|
||||
|
||||
user = context.get('user', None)
|
||||
|
||||
if user and user.is_authenticated:
|
||||
# User is specified - look for their date display preference
|
||||
user_date_format = common.models.InvenTreeUserSetting.get_setting('DATE_DISPLAY_FORMAT', user=user)
|
||||
user_date_format = common.models.InvenTreeUserSetting.get_setting(
|
||||
'DATE_DISPLAY_FORMAT', user=user
|
||||
)
|
||||
else:
|
||||
user_date_format = 'YYYY-MM-DD'
|
||||
|
||||
# Convert the format string to Pythonic equivalent
|
||||
replacements = [
|
||||
('YYYY', '%Y'),
|
||||
('MMM', '%b'),
|
||||
('MM', '%m'),
|
||||
('DD', '%d'),
|
||||
]
|
||||
replacements = [('YYYY', '%Y'), ('MMM', '%b'), ('MM', '%m'), ('DD', '%d')]
|
||||
|
||||
for o, n in replacements:
|
||||
user_date_format = user_date_format.replace(o, n)
|
||||
@@ -103,7 +98,7 @@ def render_date(context, date_object):
|
||||
|
||||
@register.simple_tag
|
||||
def render_currency(money, **kwargs):
|
||||
"""Render a currency / Money object"""
|
||||
"""Render a currency / Money object."""
|
||||
return InvenTree.helpers_model.render_currency(money, **kwargs)
|
||||
|
||||
|
||||
@@ -127,7 +122,7 @@ def to_list(*args):
|
||||
|
||||
@register.simple_tag()
|
||||
def part_allocation_count(build, part, *args, **kwargs):
|
||||
"""Return the total number of <part> allocated to <build>"""
|
||||
"""Return the total number of <part> allocated to <build>."""
|
||||
return InvenTree.helpers.decimal2string(build.getAllocatedQuantity(part))
|
||||
|
||||
|
||||
@@ -160,6 +155,12 @@ def plugins_enabled(*args, **kwargs):
|
||||
return djangosettings.PLUGINS_ENABLED
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def plugins_install_disabled(*args, **kwargs):
|
||||
"""Return True if plugin install is disabled for the server instance."""
|
||||
return djangosettings.PLUGINS_INSTALL_DISABLED
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def plugins_info(*args, **kwargs):
|
||||
"""Return information about activated plugins."""
|
||||
@@ -171,11 +172,8 @@ def plugins_info(*args, **kwargs):
|
||||
plug_list = [plg for plg in registry.plugins.values() if plg.plugin_config().active]
|
||||
# Format list
|
||||
return [
|
||||
{
|
||||
'name': plg.name,
|
||||
'slug': plg.slug,
|
||||
'version': plg.version
|
||||
} for plg in plug_list
|
||||
{'name': plg.name, 'slug': plg.slug, 'version': plg.version}
|
||||
for plg in plug_list
|
||||
]
|
||||
|
||||
|
||||
@@ -193,7 +191,7 @@ def inventree_instance_name(*args, **kwargs):
|
||||
|
||||
@register.simple_tag()
|
||||
def inventree_title(*args, **kwargs):
|
||||
"""Return the title for the current instance - respecting the settings"""
|
||||
"""Return the title for the current instance - respecting the settings."""
|
||||
return version.inventreeInstanceTitle()
|
||||
|
||||
|
||||
@@ -214,7 +212,7 @@ def inventree_splash(**kwargs):
|
||||
|
||||
@register.simple_tag()
|
||||
def inventree_base_url(*args, **kwargs):
|
||||
"""Return the base URL of the InvenTree server"""
|
||||
"""Return the base URL of the InvenTree server."""
|
||||
return InvenTree.helpers_model.get_base_url()
|
||||
|
||||
|
||||
@@ -228,25 +226,25 @@ def python_version(*args, **kwargs):
|
||||
def inventree_version(shortstring=False, *args, **kwargs):
|
||||
"""Return InvenTree version string."""
|
||||
if shortstring:
|
||||
return _(f"{version.inventreeInstanceTitle()} v{version.inventreeVersion()}")
|
||||
return f'{version.inventreeInstanceTitle()} v{version.inventreeVersion()}'
|
||||
return version.inventreeVersion()
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def inventree_is_development(*args, **kwargs):
|
||||
"""Returns True if this is a development version of InvenTree"""
|
||||
"""Returns True if this is a development version of InvenTree."""
|
||||
return version.isInvenTreeDevelopmentVersion()
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def inventree_is_release(*args, **kwargs):
|
||||
"""Returns True if this is a release version of InvenTree"""
|
||||
"""Returns True if this is a release version of InvenTree."""
|
||||
return not version.isInvenTreeDevelopmentVersion()
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def inventree_docs_version(*args, **kwargs):
|
||||
"""Returns the InvenTree documentation version"""
|
||||
"""Returns the InvenTree documentation version."""
|
||||
return version.inventreeDocsVersion()
|
||||
|
||||
|
||||
@@ -342,15 +340,24 @@ def setting_object(key, *args, **kwargs):
|
||||
|
||||
plg = kwargs['plugin']
|
||||
if issubclass(plg.__class__, InvenTreePlugin):
|
||||
plg = plg.plugin_config()
|
||||
try:
|
||||
plg = plg.plugin_config()
|
||||
except plugin.models.PluginConfig.DoesNotExist:
|
||||
return None
|
||||
|
||||
return plugin.models.PluginSetting.get_setting_object(key, plugin=plg, cache=cache)
|
||||
return plugin.models.PluginSetting.get_setting_object(
|
||||
key, plugin=plg, cache=cache
|
||||
)
|
||||
|
||||
elif 'method' in kwargs:
|
||||
return plugin.models.NotificationUserSetting.get_setting_object(key, user=kwargs['user'], method=kwargs['method'], cache=cache)
|
||||
return plugin.models.NotificationUserSetting.get_setting_object(
|
||||
key, user=kwargs['user'], method=kwargs['method'], cache=cache
|
||||
)
|
||||
|
||||
elif 'user' in kwargs:
|
||||
return common.models.InvenTreeUserSetting.get_setting_object(key, user=kwargs['user'], cache=cache)
|
||||
return common.models.InvenTreeUserSetting.get_setting_object(
|
||||
key, user=kwargs['user'], cache=cache
|
||||
)
|
||||
|
||||
else:
|
||||
return common.models.InvenTreeSetting.get_setting_object(key, cache=cache)
|
||||
@@ -360,7 +367,9 @@ def setting_object(key, *args, **kwargs):
|
||||
def settings_value(key, *args, **kwargs):
|
||||
"""Return a settings value specified by the given key."""
|
||||
if 'user' in kwargs:
|
||||
if not kwargs['user'] or (kwargs['user'] and kwargs['user'].is_authenticated is False):
|
||||
if not kwargs['user'] or (
|
||||
kwargs['user'] and kwargs['user'].is_authenticated is False
|
||||
):
|
||||
return common.models.InvenTreeUserSetting.get_setting(key)
|
||||
return common.models.InvenTreeUserSetting.get_setting(key, user=kwargs['user'])
|
||||
|
||||
@@ -418,7 +427,7 @@ def progress_bar(val, max_val, *args, **kwargs):
|
||||
style_tags.append(f'max-width: {max_width};')
|
||||
|
||||
html = f"""
|
||||
<div id='{item_id}' class='progress' style='{" ".join(style_tags)}'>
|
||||
<div id='{item_id}' class='progress' style='{' '.join(style_tags)}'>
|
||||
<div class='progress-bar {style}' role='progressbar' aria-valuemin='0' aria-valuemax='100' style='width:{percent}%'></div>
|
||||
<div class='progress-value'>{val} / {max_val}</div>
|
||||
</div>
|
||||
@@ -429,7 +438,7 @@ def progress_bar(val, max_val, *args, **kwargs):
|
||||
|
||||
@register.simple_tag()
|
||||
def get_color_theme_css(username):
|
||||
"""Return the custom theme .css file for the selected user"""
|
||||
"""Return the custom theme .css file for the selected user."""
|
||||
user_theme_name = get_user_color_theme(username)
|
||||
# Build path to CSS sheet
|
||||
inventree_css_sheet = os.path.join('css', 'color-themes', user_theme_name + '.css')
|
||||
@@ -443,7 +452,6 @@ def get_color_theme_css(username):
|
||||
@register.simple_tag()
|
||||
def get_user_color_theme(username):
|
||||
"""Get current user color theme."""
|
||||
|
||||
from common.models import ColorTheme
|
||||
|
||||
try:
|
||||
@@ -465,10 +473,7 @@ def get_available_themes(*args, **kwargs):
|
||||
from common.models import ColorTheme
|
||||
|
||||
for key, name in ColorTheme.get_color_themes_choices():
|
||||
themes.append({
|
||||
'key': key,
|
||||
'name': name
|
||||
})
|
||||
themes.append({'key': key, 'name': name})
|
||||
|
||||
return themes
|
||||
|
||||
@@ -491,7 +496,7 @@ def primitive_to_javascript(primitive):
|
||||
|
||||
@register.simple_tag()
|
||||
def js_bool(val):
|
||||
"""Return a javascript boolean value (true or false)"""
|
||||
"""Return a javascript boolean value (true or false)."""
|
||||
if val:
|
||||
return 'true'
|
||||
return 'false'
|
||||
@@ -507,17 +512,6 @@ def keyvalue(dict, key):
|
||||
return dict.get(key)
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def call_method(obj, method_name, *args):
|
||||
"""Enables calling model methods / functions from templates with arguments.
|
||||
|
||||
Usage:
|
||||
{% call_method model_object 'fnc_name' argument1 %}
|
||||
"""
|
||||
method = getattr(obj, method_name)
|
||||
return method(*args)
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def authorized_owners(group):
|
||||
"""Return authorized owners."""
|
||||
@@ -539,8 +533,11 @@ def authorized_owners(group):
|
||||
@register.simple_tag()
|
||||
def object_link(url_name, pk, ref):
|
||||
"""Return highlighted link to object."""
|
||||
ref_url = reverse(url_name, kwargs={'pk': pk})
|
||||
return mark_safe(f'<b><a href="{ref_url}">{ref}</a></b>')
|
||||
try:
|
||||
ref_url = reverse(url_name, kwargs={'pk': pk})
|
||||
return mark_safe(f'<b><a href="{ref_url}">{ref}</a></b>')
|
||||
except NoReverseMatch:
|
||||
return None
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
@@ -570,7 +567,6 @@ class I18nStaticNode(StaticNode):
|
||||
self.original = self.path.var
|
||||
|
||||
if hasattr(context, 'request'):
|
||||
|
||||
# Convert the "requested" language code to a standard format
|
||||
language_code = context.request.LANGUAGE_CODE.lower().strip()
|
||||
language_code = language_code.replace('_', '-')
|
||||
@@ -579,16 +575,11 @@ class I18nStaticNode(StaticNode):
|
||||
# - First, try the original requested code, e.g. 'pt-br'
|
||||
# - Next, try a simpler version of the code e.g. 'pt'
|
||||
# - Finally, fall back to english
|
||||
options = [
|
||||
language_code,
|
||||
language_code.split('-')[0],
|
||||
'en',
|
||||
]
|
||||
options = [language_code, language_code.split('-')[0], 'en']
|
||||
|
||||
for lng in options:
|
||||
lng_file = os.path.join(
|
||||
djangosettings.STATIC_ROOT,
|
||||
self.original.format(lng=lng)
|
||||
djangosettings.STATIC_ROOT, self.original.format(lng=lng)
|
||||
)
|
||||
|
||||
if os.path.exists(lng_file):
|
||||
@@ -605,14 +596,14 @@ if settings.DEBUG:
|
||||
|
||||
@register.simple_tag()
|
||||
def i18n_static(url_name):
|
||||
"""Simple tag to enable {% url %} functionality instead of {% static %}"""
|
||||
"""Simple tag to enable {% url %} functionality instead of {% static %}."""
|
||||
return reverse(url_name)
|
||||
|
||||
else: # pragma: no cover
|
||||
|
||||
@register.tag('i18n_static')
|
||||
def do_i18n_static(parser, token):
|
||||
"""Overrides normal static, adds language - lookup for prerenderd files #1485
|
||||
"""Overrides normal static, adds language - lookup for prerenderd files #1485.
|
||||
|
||||
Usage (like static):
|
||||
{% i18n_static path [as varname] %}
|
||||
@@ -629,8 +620,7 @@ else: # pragma: no cover
|
||||
|
||||
@register.simple_tag()
|
||||
def admin_index(user):
|
||||
"""Return a URL for the admin interface"""
|
||||
|
||||
"""Return a URL for the admin interface."""
|
||||
if not djangosettings.INVENTREE_ADMIN_ENABLED:
|
||||
return ''
|
||||
|
||||
@@ -648,24 +638,23 @@ def admin_url(user, table, pk):
|
||||
- If the user is not a staff user, an empty URL is returned
|
||||
- If the user does not have the correct permission, an empty URL is returned
|
||||
"""
|
||||
|
||||
app, model = table.strip().split('.')
|
||||
|
||||
from django.urls import reverse
|
||||
|
||||
if not djangosettings.INVENTREE_ADMIN_ENABLED:
|
||||
return ""
|
||||
return ''
|
||||
|
||||
if not user.is_staff:
|
||||
return ""
|
||||
return ''
|
||||
|
||||
# Check the user has the correct permission
|
||||
perm_string = f"{app}.change_{model}"
|
||||
perm_string = f'{app}.change_{model}'
|
||||
if not user.has_perm(perm_string):
|
||||
return ''
|
||||
|
||||
# Fallback URL
|
||||
url = reverse(f"admin:{app}_{model}_changelist")
|
||||
url = reverse(f'admin:{app}_{model}_changelist')
|
||||
|
||||
if pk:
|
||||
try:
|
||||
31
InvenTree/InvenTree/templatetags/sso.py
Normal file
31
InvenTree/InvenTree/templatetags/sso.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""This module provides template tags pertaining to SSO functionality."""
|
||||
|
||||
from django import template
|
||||
|
||||
import InvenTree.sso
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def sso_login_enabled():
|
||||
"""Return True if single-sign-on is enabled."""
|
||||
return InvenTree.sso.login_enabled()
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def sso_reg_enabled():
|
||||
"""Return True if single-sign-on is enabled for self-registration."""
|
||||
return InvenTree.sso.registration_enabled()
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def sso_auto_enabled():
|
||||
"""Return True if single-sign-on is enabled for auto-registration."""
|
||||
return InvenTree.sso.auto_registration_enabled()
|
||||
|
||||
|
||||
@register.simple_tag()
|
||||
def sso_check_provider(provider):
|
||||
"""Return True if the given provider is correctly configured."""
|
||||
return InvenTree.sso.check_provider(provider)
|
||||
@@ -17,6 +17,7 @@ class HTMLAPITests(InvenTreeTestCase):
|
||||
which raised an AssertionError when using the HTML API interface,
|
||||
while the regular JSON interface continued to work as expected.
|
||||
"""
|
||||
|
||||
roles = 'all'
|
||||
|
||||
def test_part_api(self):
|
||||
@@ -24,7 +25,7 @@ class HTMLAPITests(InvenTreeTestCase):
|
||||
url = reverse('api-part-list')
|
||||
|
||||
# Check JSON response
|
||||
response = self.client.get(url, HTTP_ACCEPT='application/json')
|
||||
response = self.client.get(url, headers={'accept': 'application/json'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_build_api(self):
|
||||
@@ -32,7 +33,7 @@ class HTMLAPITests(InvenTreeTestCase):
|
||||
url = reverse('api-build-list')
|
||||
|
||||
# Check JSON response
|
||||
response = self.client.get(url, HTTP_ACCEPT='application/json')
|
||||
response = self.client.get(url, headers={'accept': 'application/json'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_stock_api(self):
|
||||
@@ -40,7 +41,7 @@ class HTMLAPITests(InvenTreeTestCase):
|
||||
url = reverse('api-stock-list')
|
||||
|
||||
# Check JSON response
|
||||
response = self.client.get(url, HTTP_ACCEPT='application/json')
|
||||
response = self.client.get(url, headers={'accept': 'application/json'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_company_list(self):
|
||||
@@ -48,7 +49,7 @@ class HTMLAPITests(InvenTreeTestCase):
|
||||
url = reverse('api-company-list')
|
||||
|
||||
# Check JSON response
|
||||
response = self.client.get(url, HTTP_ACCEPT='application/json')
|
||||
response = self.client.get(url, headers={'accept': 'application/json'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
def test_not_found(self):
|
||||
@@ -60,12 +61,7 @@ class HTMLAPITests(InvenTreeTestCase):
|
||||
class APITests(InvenTreeAPITestCase):
|
||||
"""Tests for the InvenTree API."""
|
||||
|
||||
fixtures = [
|
||||
'location',
|
||||
'category',
|
||||
'part',
|
||||
'stock'
|
||||
]
|
||||
fixtures = ['location', 'category', 'part', 'stock']
|
||||
token = None
|
||||
auto_login = False
|
||||
|
||||
@@ -73,11 +69,11 @@ class APITests(InvenTreeAPITestCase):
|
||||
"""Helper function to use basic auth."""
|
||||
# Use basic authentication
|
||||
|
||||
authstring = bytes("{u}:{p}".format(u=self.username, p=self.password), "ascii")
|
||||
authstring = bytes('{u}:{p}'.format(u=self.username, p=self.password), 'ascii')
|
||||
|
||||
# Use "basic" auth by default
|
||||
auth = b64encode(authstring).decode("ascii")
|
||||
self.client.credentials(HTTP_AUTHORIZATION="Basic {auth}".format(auth=auth))
|
||||
auth = b64encode(authstring).decode('ascii')
|
||||
self.client.credentials(HTTP_AUTHORIZATION='Basic {auth}'.format(auth=auth))
|
||||
|
||||
def tokenAuth(self):
|
||||
"""Helper function to use token auth."""
|
||||
@@ -261,48 +257,35 @@ class APITests(InvenTreeAPITestCase):
|
||||
|
||||
|
||||
class BulkDeleteTests(InvenTreeAPITestCase):
|
||||
"""Unit tests for the BulkDelete endpoints"""
|
||||
"""Unit tests for the BulkDelete endpoints."""
|
||||
|
||||
superuser = True
|
||||
|
||||
def test_errors(self):
|
||||
"""Test that the correct errors are thrown"""
|
||||
"""Test that the correct errors are thrown."""
|
||||
url = reverse('api-stock-test-result-list')
|
||||
|
||||
# DELETE without any of the required fields
|
||||
response = self.delete(
|
||||
url,
|
||||
{},
|
||||
expected_code=400
|
||||
)
|
||||
response = self.delete(url, {}, expected_code=400)
|
||||
|
||||
self.assertIn('List of items or filters must be provided for bulk deletion', str(response.data))
|
||||
self.assertIn(
|
||||
'List of items or filters must be provided for bulk deletion',
|
||||
str(response.data),
|
||||
)
|
||||
|
||||
# DELETE with invalid 'items'
|
||||
response = self.delete(
|
||||
url,
|
||||
{
|
||||
'items': {"hello": "world"},
|
||||
},
|
||||
expected_code=400,
|
||||
)
|
||||
response = self.delete(url, {'items': {'hello': 'world'}}, expected_code=400)
|
||||
|
||||
self.assertIn("'items' must be supplied as a list object", str(response.data))
|
||||
|
||||
# DELETE with invalid 'filters'
|
||||
response = self.delete(
|
||||
url,
|
||||
{
|
||||
'filters': [1, 2, 3],
|
||||
},
|
||||
expected_code=400,
|
||||
)
|
||||
response = self.delete(url, {'filters': [1, 2, 3]}, expected_code=400)
|
||||
|
||||
self.assertIn("'filters' must be supplied as a dict object", str(response.data))
|
||||
|
||||
|
||||
class SearchTests(InvenTreeAPITestCase):
|
||||
"""Unit tests for global search endpoint"""
|
||||
"""Unit tests for global search endpoint."""
|
||||
|
||||
fixtures = [
|
||||
'category',
|
||||
@@ -316,28 +299,19 @@ class SearchTests(InvenTreeAPITestCase):
|
||||
]
|
||||
|
||||
def test_empty(self):
|
||||
"""Test empty request"""
|
||||
data = [
|
||||
'',
|
||||
None,
|
||||
{},
|
||||
]
|
||||
"""Test empty request."""
|
||||
data = ['', None, {}]
|
||||
|
||||
for d in data:
|
||||
response = self.post(reverse('api-search'), d, expected_code=400)
|
||||
self.assertIn('Search term must be provided', str(response.data))
|
||||
|
||||
def test_results(self):
|
||||
"""Test individual result types"""
|
||||
"""Test individual result types."""
|
||||
response = self.post(
|
||||
reverse('api-search'),
|
||||
{
|
||||
'search': 'chair',
|
||||
'limit': 3,
|
||||
'part': {},
|
||||
'build': {},
|
||||
},
|
||||
expected_code=200
|
||||
{'search': 'chair', 'limit': 3, 'part': {}, 'build': {}},
|
||||
expected_code=200,
|
||||
)
|
||||
|
||||
# No build results
|
||||
@@ -354,12 +328,7 @@ class SearchTests(InvenTreeAPITestCase):
|
||||
# Search for orders
|
||||
response = self.post(
|
||||
reverse('api-search'),
|
||||
{
|
||||
'search': '01',
|
||||
'limit': 2,
|
||||
'purchaseorder': {},
|
||||
'salesorder': {},
|
||||
},
|
||||
{'search': '01', 'limit': 2, 'purchaseorder': {}, 'salesorder': {}},
|
||||
expected_code=200,
|
||||
)
|
||||
|
||||
@@ -370,7 +339,7 @@ class SearchTests(InvenTreeAPITestCase):
|
||||
self.assertNotIn('build', response.data)
|
||||
|
||||
def test_permissions(self):
|
||||
"""Test that users with insufficient permissions are handled correctly"""
|
||||
"""Test that users with insufficient permissions are handled correctly."""
|
||||
# First, remove all roles
|
||||
for ruleset in self.group.rule_sets.all():
|
||||
ruleset.can_view = False
|
||||
@@ -392,33 +361,25 @@ class SearchTests(InvenTreeAPITestCase):
|
||||
'salesorder',
|
||||
]
|
||||
|
||||
query = {
|
||||
'search': 'c',
|
||||
'limit': 3,
|
||||
}
|
||||
query = {'search': 'c', 'limit': 3}
|
||||
|
||||
for mdl in models:
|
||||
query[mdl] = {}
|
||||
|
||||
response = self.post(
|
||||
reverse('api-search'),
|
||||
query,
|
||||
expected_code=200
|
||||
)
|
||||
response = self.post(reverse('api-search'), query, expected_code=200)
|
||||
|
||||
# Check for 'permission denied' error
|
||||
for mdl in models:
|
||||
self.assertEqual(response.data[mdl]['error'], 'User does not have permission to view this model')
|
||||
self.assertEqual(
|
||||
response.data[mdl]['error'],
|
||||
'User does not have permission to view this model',
|
||||
)
|
||||
|
||||
# Assign view roles for some parts
|
||||
self.assignRole('build.view')
|
||||
self.assignRole('part.view')
|
||||
|
||||
response = self.post(
|
||||
reverse('api-search'),
|
||||
query,
|
||||
expected_code=200
|
||||
)
|
||||
response = self.post(reverse('api-search'), query, expected_code=200)
|
||||
|
||||
# Check for expected results, based on permissions
|
||||
# We expect results to be returned for the following model types
|
||||
@@ -438,4 +399,6 @@ class SearchTests(InvenTreeAPITestCase):
|
||||
self.assertIn('count', result)
|
||||
else:
|
||||
self.assertIn('error', result)
|
||||
self.assertEqual(result['error'], 'User does not have permission to view this model')
|
||||
self.assertEqual(
|
||||
result['error'], 'User does not have permission to view this model'
|
||||
)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Tests for api_version."""
|
||||
|
||||
|
||||
from django.urls import reverse
|
||||
|
||||
from InvenTree.api_version import INVENTREE_API_VERSION
|
||||
@@ -19,6 +18,11 @@ class ApiVersionTests(InvenTreeAPITestCase):
|
||||
|
||||
self.assertEqual(len(data), 10)
|
||||
|
||||
response = self.client.get(reverse('api-version'), format='json').json()
|
||||
self.assertIn('version', response)
|
||||
self.assertIn('dev', response)
|
||||
self.assertIn('up_to_date', response)
|
||||
|
||||
def test_inventree_api_text(self):
|
||||
"""Test that the inventreeApiText function works expected."""
|
||||
# Normal run
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import Http404
|
||||
from django.test import tag
|
||||
from django.urls import reverse
|
||||
|
||||
from error_report.models import Error
|
||||
@@ -10,12 +11,16 @@ from InvenTree.exceptions import log_error
|
||||
from InvenTree.unit_test import InvenTreeTestCase
|
||||
|
||||
|
||||
# TODO change test to not rely on CUI
|
||||
@tag('cui')
|
||||
class MiddlewareTests(InvenTreeTestCase):
|
||||
"""Test for middleware functions."""
|
||||
|
||||
def check_path(self, url, code=200, **kwargs):
|
||||
"""Helper function to run a request."""
|
||||
response = self.client.get(url, HTTP_ACCEPT='application/json', **kwargs)
|
||||
response = self.client.get(
|
||||
url, headers={'accept': 'application/json'}, **kwargs
|
||||
)
|
||||
self.assertEqual(response.status_code, code)
|
||||
return response
|
||||
|
||||
@@ -66,6 +71,7 @@ class MiddlewareTests(InvenTreeTestCase):
|
||||
|
||||
def test_error_exceptions(self):
|
||||
"""Test that ignored errors are not logged."""
|
||||
|
||||
def check(excpected_nbr=0):
|
||||
# Check that errors are empty
|
||||
errors = Error.objects.all()
|
||||
|
||||
@@ -67,16 +67,24 @@ class InvenTreeTaskTests(TestCase):
|
||||
|
||||
# Error runs
|
||||
# Malformed taskname
|
||||
with self.assertWarnsMessage(UserWarning, "WARNING: 'InvenTree' not started - Malformed function path"):
|
||||
with self.assertWarnsMessage(
|
||||
UserWarning, "WARNING: 'InvenTree' not started - Malformed function path"
|
||||
):
|
||||
InvenTree.tasks.offload_task('InvenTree')
|
||||
|
||||
# Non existent app
|
||||
with self.assertWarnsMessage(UserWarning, "WARNING: 'InvenTreeABC.test_tasks.doesnotmatter' not started - No module named 'InvenTreeABC.test_tasks'"):
|
||||
with self.assertWarnsMessage(
|
||||
UserWarning,
|
||||
"WARNING: 'InvenTreeABC.test_tasks.doesnotmatter' not started - No module named 'InvenTreeABC.test_tasks'",
|
||||
):
|
||||
InvenTree.tasks.offload_task('InvenTreeABC.test_tasks.doesnotmatter')
|
||||
|
||||
# Non existent function
|
||||
with self.assertWarnsMessage(UserWarning, "WARNING: 'InvenTree.test_tasks.doesnotexsist' not started - No function named 'doesnotexsist'"):
|
||||
InvenTree.tasks.offload_task('InvenTree.test_tasks.doesnotexsist')
|
||||
with self.assertWarnsMessage(
|
||||
UserWarning,
|
||||
"WARNING: 'InvenTree.test_tasks.doesnotexist' not started - No function named 'doesnotexist'",
|
||||
):
|
||||
InvenTree.tasks.offload_task('InvenTree.test_tasks.doesnotexist')
|
||||
|
||||
def test_task_hearbeat(self):
|
||||
"""Test the task heartbeat."""
|
||||
@@ -86,7 +94,9 @@ class InvenTreeTaskTests(TestCase):
|
||||
"""Test the task delete_successful_tasks."""
|
||||
from django_q.models import Success
|
||||
|
||||
Success.objects.create(name='abc', func='abc', stopped=threshold, started=threshold_low)
|
||||
Success.objects.create(
|
||||
name='abc', func='abc', stopped=threshold, started=threshold_low
|
||||
)
|
||||
InvenTree.tasks.offload_task(InvenTree.tasks.delete_successful_tasks)
|
||||
results = Success.objects.filter(started__lte=threshold)
|
||||
self.assertEqual(len(results), 0)
|
||||
@@ -99,14 +109,14 @@ class InvenTreeTaskTests(TestCase):
|
||||
error_obj.save()
|
||||
|
||||
# Check that it is not empty
|
||||
errors = Error.objects.filter(when__lte=threshold,)
|
||||
errors = Error.objects.filter(when__lte=threshold)
|
||||
self.assertNotEqual(len(errors), 0)
|
||||
|
||||
# Run action
|
||||
InvenTree.tasks.offload_task(InvenTree.tasks.delete_old_error_logs)
|
||||
|
||||
# Check that it is empty again
|
||||
errors = Error.objects.filter(when__lte=threshold,)
|
||||
errors = Error.objects.filter(when__lte=threshold)
|
||||
self.assertEqual(len(errors), 0)
|
||||
|
||||
def test_task_check_for_updates(self):
|
||||
@@ -146,7 +156,9 @@ class InvenTreeTaskTests(TestCase):
|
||||
# Cleanup
|
||||
try:
|
||||
migration_name = InvenTree.tasks.get_migration_plan()[0][0].name + '.py'
|
||||
migration_path = settings.BASE_DIR / 'InvenTree' / 'migrations' / migration_name
|
||||
migration_path = (
|
||||
settings.BASE_DIR / 'InvenTree' / 'migrations' / migration_name
|
||||
)
|
||||
migration_path.unlink()
|
||||
except IndexError: # pragma: no cover
|
||||
pass
|
||||
|
||||
@@ -4,10 +4,11 @@ import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from django.test import TestCase
|
||||
from django.test import TestCase, tag
|
||||
from django.urls import reverse
|
||||
|
||||
|
||||
@tag('cui')
|
||||
class URLTest(TestCase):
|
||||
"""Test all files for broken url tags."""
|
||||
|
||||
@@ -44,7 +45,7 @@ class URLTest(TestCase):
|
||||
('part', 'templates'),
|
||||
('report', 'templates'),
|
||||
('stock', 'templates'),
|
||||
('templates', ),
|
||||
('templates',),
|
||||
]
|
||||
|
||||
template_files = []
|
||||
@@ -53,11 +54,9 @@ class URLTest(TestCase):
|
||||
tld = os.path.join(here, '..')
|
||||
|
||||
for directory in template_dirs:
|
||||
|
||||
template_dir = os.path.join(tld, *directory)
|
||||
|
||||
for path in Path(template_dir).rglob(suffix):
|
||||
|
||||
f = os.path.abspath(path)
|
||||
|
||||
if f not in template_files:
|
||||
@@ -69,25 +68,18 @@ class URLTest(TestCase):
|
||||
"""Search for all instances of {% url %} in supplied template file."""
|
||||
urls = []
|
||||
|
||||
pattern = "{% url ['\"]([^'\"]+)['\"]([^%]*)%}"
|
||||
pattern = '{% url [\'"]([^\'"]+)[\'"]([^%]*)%}'
|
||||
|
||||
with open(input_file, 'r') as f:
|
||||
|
||||
data = f.read()
|
||||
|
||||
results = re.findall(pattern, data)
|
||||
|
||||
for result in results:
|
||||
if len(result) == 2:
|
||||
urls.append([
|
||||
result[0].strip(),
|
||||
result[1].strip()
|
||||
])
|
||||
urls.append([result[0].strip(), result[1].strip()])
|
||||
elif len(result) == 1: # pragma: no cover
|
||||
urls.append([
|
||||
result[0].strip(),
|
||||
''
|
||||
])
|
||||
urls.append([result[0].strip(), ''])
|
||||
|
||||
return urls
|
||||
|
||||
@@ -100,16 +92,16 @@ class URLTest(TestCase):
|
||||
pk = None
|
||||
|
||||
# TODO: Handle reverse lookup of admin URLs!
|
||||
if url.startswith("admin:"):
|
||||
if url.startswith('admin:'):
|
||||
return
|
||||
|
||||
# TODO can this be more elegant?
|
||||
if url.startswith("account_"):
|
||||
if url.startswith('account_'):
|
||||
return
|
||||
|
||||
if pk:
|
||||
# We will assume that there is at least one item in the database
|
||||
reverse(url, kwargs={"pk": 1})
|
||||
reverse(url, kwargs={'pk': 1})
|
||||
else:
|
||||
reverse(url)
|
||||
|
||||
@@ -122,14 +114,14 @@ class URLTest(TestCase):
|
||||
|
||||
def test_html_templates(self):
|
||||
"""Test all HTML templates for broken url tags."""
|
||||
template_files = self.find_files("*.html")
|
||||
template_files = self.find_files('*.html')
|
||||
|
||||
for f in template_files:
|
||||
self.check_file(f)
|
||||
|
||||
def test_js_templates(self):
|
||||
"""Test all JS templates for broken url tags."""
|
||||
template_files = self.find_files("*.js")
|
||||
template_files = self.find_files('*.js')
|
||||
|
||||
for f in template_files:
|
||||
self.check_file(f)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import os
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.test import tag
|
||||
from django.urls import reverse
|
||||
|
||||
from InvenTree.unit_test import InvenTreeTestCase
|
||||
@@ -23,18 +24,19 @@ class ViewTests(InvenTreeTestCase):
|
||||
|
||||
def test_index_redirect(self):
|
||||
"""Top-level URL should redirect to "index" page."""
|
||||
response = self.client.get("/")
|
||||
response = self.client.get('/')
|
||||
|
||||
self.assertEqual(response.status_code, 302)
|
||||
|
||||
def get_index_page(self):
|
||||
"""Retrieve the index page (used for subsequent unit tests)"""
|
||||
response = self.client.get("/index/")
|
||||
"""Retrieve the index page (used for subsequent unit tests)."""
|
||||
response = self.client.get('/index/')
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
return str(response.content.decode())
|
||||
|
||||
@tag('cui')
|
||||
def test_panels(self):
|
||||
"""Test that the required 'panels' are present."""
|
||||
content = self.get_index_page()
|
||||
@@ -43,8 +45,9 @@ class ViewTests(InvenTreeTestCase):
|
||||
|
||||
# TODO: In future, run the javascript and ensure that the panels get created!
|
||||
|
||||
@tag('cui')
|
||||
def test_settings_page(self):
|
||||
"""Test that the 'settings' page loads correctly"""
|
||||
"""Test that the 'settings' page loads correctly."""
|
||||
# Settings page loads
|
||||
url = reverse('settings')
|
||||
|
||||
@@ -60,35 +63,20 @@ class ViewTests(InvenTreeTestCase):
|
||||
self.assertEqual(response.status_code, 200)
|
||||
content = response.content.decode()
|
||||
|
||||
user_panels = [
|
||||
'account',
|
||||
'user-display',
|
||||
'user-home',
|
||||
'user-reports',
|
||||
]
|
||||
user_panels = ['account', 'user-display', 'user-home', 'user-reports']
|
||||
|
||||
staff_panels = [
|
||||
'server',
|
||||
'login',
|
||||
'barcodes',
|
||||
'pricing',
|
||||
'parts',
|
||||
'stock',
|
||||
]
|
||||
staff_panels = ['server', 'login', 'barcodes', 'pricing', 'parts', 'stock']
|
||||
|
||||
plugin_panels = [
|
||||
'plugin',
|
||||
]
|
||||
plugin_panels = ['plugin']
|
||||
|
||||
# Default user has staff access, so all panels will be present
|
||||
for panel in user_panels + staff_panels + plugin_panels:
|
||||
self.assertIn(f"select-{panel}", content)
|
||||
self.assertIn(f"panel-{panel}", content)
|
||||
self.assertIn(f'select-{panel}', content)
|
||||
self.assertIn(f'panel-{panel}', content)
|
||||
|
||||
# Now create a user who does not have staff access
|
||||
pleb_user = get_user_model().objects.create_user(
|
||||
username='pleb',
|
||||
password='notstaff',
|
||||
username='pleb', password='notstaff'
|
||||
)
|
||||
|
||||
pleb_user.groups.add(self.group)
|
||||
@@ -98,10 +86,7 @@ class ViewTests(InvenTreeTestCase):
|
||||
|
||||
self.client.logout()
|
||||
|
||||
result = self.client.login(
|
||||
username='pleb',
|
||||
password='notstaff',
|
||||
)
|
||||
result = self.client.login(username='pleb', password='notstaff')
|
||||
|
||||
self.assertTrue(result)
|
||||
|
||||
@@ -111,22 +96,26 @@ class ViewTests(InvenTreeTestCase):
|
||||
|
||||
# Normal user still has access to user-specific panels
|
||||
for panel in user_panels:
|
||||
self.assertIn(f"select-{panel}", content)
|
||||
self.assertIn(f"panel-{panel}", content)
|
||||
self.assertIn(f'select-{panel}', content)
|
||||
self.assertIn(f'panel-{panel}', content)
|
||||
|
||||
# Normal user does NOT have access to global or plugin settings
|
||||
for panel in staff_panels + plugin_panels:
|
||||
self.assertNotIn(f"select-{panel}", content)
|
||||
self.assertNotIn(f"panel-{panel}", content)
|
||||
self.assertNotIn(f'select-{panel}', content)
|
||||
self.assertNotIn(f'panel-{panel}', content)
|
||||
|
||||
# TODO: Replace this with a PUI test
|
||||
@tag('cui')
|
||||
def test_url_login(self):
|
||||
"""Test logging in via arguments"""
|
||||
"""Test logging in via arguments."""
|
||||
# Log out
|
||||
self.client.logout()
|
||||
response = self.client.get("/index/")
|
||||
response = self.client.get('/index/')
|
||||
self.assertEqual(response.status_code, 302)
|
||||
|
||||
# Try login with url
|
||||
response = self.client.get(f"/accounts/login/?next=/&login={self.username}&password={self.password}")
|
||||
response = self.client.get(
|
||||
f'/accounts/login/?next=/&login={self.username}&password={self.password}'
|
||||
)
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertEqual(response.url, '/')
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
146
InvenTree/InvenTree/tracing.py
Normal file
146
InvenTree/InvenTree/tracing.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""OpenTelemetry setup functions."""
|
||||
|
||||
import base64
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from opentelemetry import metrics, trace
|
||||
from opentelemetry.instrumentation.django import DjangoInstrumentor
|
||||
from opentelemetry.instrumentation.redis import RedisInstrumentor
|
||||
from opentelemetry.instrumentation.requests import RequestsInstrumentor
|
||||
from opentelemetry.sdk import _logs as logs
|
||||
from opentelemetry.sdk import resources
|
||||
from opentelemetry.sdk._logs import export as logs_export
|
||||
from opentelemetry.sdk.metrics import MeterProvider
|
||||
from opentelemetry.sdk.metrics.export import (
|
||||
ConsoleMetricExporter,
|
||||
PeriodicExportingMetricReader,
|
||||
)
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
|
||||
|
||||
import InvenTree.ready
|
||||
from InvenTree.version import inventreeVersion
|
||||
|
||||
# Logger configuration
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def setup_tracing(
|
||||
endpoint: str,
|
||||
headers: dict,
|
||||
resources_input: Optional[dict] = None,
|
||||
console: bool = False,
|
||||
auth: Optional[dict] = None,
|
||||
is_http: bool = False,
|
||||
append_http: bool = True,
|
||||
):
|
||||
"""Set up tracing for the application in the current context.
|
||||
|
||||
Args:
|
||||
endpoint: The endpoint to send the traces to.
|
||||
headers: The headers to send with the traces.
|
||||
resources_input: The resources to send with the traces.
|
||||
console: Whether to output the traces to the console.
|
||||
"""
|
||||
if InvenTree.ready.isImportingData() or InvenTree.ready.isRunningMigrations():
|
||||
return
|
||||
|
||||
if resources_input is None:
|
||||
resources_input = {}
|
||||
if auth is None:
|
||||
auth = {}
|
||||
|
||||
# Setup the auth headers
|
||||
if 'basic' in auth:
|
||||
basic_auth = auth['basic']
|
||||
if 'username' in basic_auth and 'password' in basic_auth:
|
||||
auth_raw = f'{basic_auth["username"]}:{basic_auth["password"]}'
|
||||
auth_token = base64.b64encode(auth_raw.encode('utf-8')).decode('utf-8')
|
||||
headers['Authorization'] = f'Basic {auth_token}'
|
||||
else:
|
||||
logger.warning('Basic auth is missing username or password')
|
||||
|
||||
# Clean up headers
|
||||
headers = {k: v for k, v in headers.items() if v is not None}
|
||||
|
||||
# Initialize the OTLP Resource
|
||||
resource = resources.Resource(
|
||||
attributes={
|
||||
resources.SERVICE_NAME: 'BACKEND',
|
||||
resources.SERVICE_NAMESPACE: 'INVENTREE',
|
||||
resources.SERVICE_VERSION: inventreeVersion(),
|
||||
**resources_input,
|
||||
}
|
||||
)
|
||||
|
||||
# Import the OTLP exporters
|
||||
if is_http:
|
||||
from opentelemetry.exporter.otlp.proto.http._log_exporter import OTLPLogExporter
|
||||
from opentelemetry.exporter.otlp.proto.http.metric_exporter import (
|
||||
OTLPMetricExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
||||
OTLPSpanExporter,
|
||||
)
|
||||
else:
|
||||
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter
|
||||
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import (
|
||||
OTLPMetricExporter,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
|
||||
OTLPSpanExporter,
|
||||
)
|
||||
|
||||
# Spans / Tracs
|
||||
span_exporter = OTLPSpanExporter(
|
||||
headers=headers,
|
||||
endpoint=endpoint if not (is_http and append_http) else f'{endpoint}/v1/traces',
|
||||
)
|
||||
trace_processor = BatchSpanProcessor(span_exporter)
|
||||
trace_provider = TracerProvider(resource=resource)
|
||||
trace.set_tracer_provider(trace_provider)
|
||||
trace_provider.add_span_processor(trace_processor)
|
||||
# For debugging purposes, export the traces to the console
|
||||
if console:
|
||||
trace_provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter()))
|
||||
|
||||
# Metrics
|
||||
metric_perodic_reader = PeriodicExportingMetricReader(
|
||||
OTLPMetricExporter(
|
||||
headers=headers,
|
||||
endpoint=endpoint
|
||||
if not (is_http and append_http)
|
||||
else f'{endpoint}/v1/metrics',
|
||||
)
|
||||
)
|
||||
metric_readers = [metric_perodic_reader]
|
||||
|
||||
# For debugging purposes, export the metrics to the console
|
||||
if console:
|
||||
console_metric_exporter = ConsoleMetricExporter()
|
||||
console_metric_reader = PeriodicExportingMetricReader(console_metric_exporter)
|
||||
metric_readers.append(console_metric_reader)
|
||||
|
||||
meter_provider = MeterProvider(resource=resource, metric_readers=metric_readers)
|
||||
metrics.set_meter_provider(meter_provider)
|
||||
|
||||
# Logs
|
||||
log_exporter = OTLPLogExporter(
|
||||
headers=headers,
|
||||
endpoint=endpoint if not (is_http and append_http) else f'{endpoint}/v1/logs',
|
||||
)
|
||||
log_provider = logs.LoggerProvider(resource=resource)
|
||||
log_provider.add_log_record_processor(
|
||||
logs_export.BatchLogRecordProcessor(log_exporter)
|
||||
)
|
||||
handler = logs.LoggingHandler(level=logging.INFO, logger_provider=log_provider)
|
||||
logger = logging.getLogger('inventree')
|
||||
logger.addHandler(handler)
|
||||
|
||||
|
||||
def setup_instruments():
|
||||
"""Run auto-insturmentation for OpenTelemetry tracing."""
|
||||
DjangoInstrumentor().instrument()
|
||||
RedisInstrumentor().instrument()
|
||||
RequestsInstrumentor().instrument()
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Translation helper functions"""
|
||||
"""Translation helper functions."""
|
||||
|
||||
import json
|
||||
|
||||
@@ -9,7 +9,7 @@ _translation_stats = None
|
||||
|
||||
|
||||
def reload_translation_stats():
|
||||
"""Reload the translation stats from the compiled file"""
|
||||
"""Reload the translation stats from the compiled file."""
|
||||
global _translation_stats
|
||||
|
||||
STATS_FILE = settings.BASE_DIR.joinpath('InvenTree/locale_stats.json').absolute()
|
||||
@@ -24,7 +24,7 @@ def reload_translation_stats():
|
||||
keys = _translation_stats.keys()
|
||||
|
||||
# Note that the names used in the stats file may not align 100%
|
||||
for (code, _lang) in settings.LANGUAGES:
|
||||
for code, _lang in settings.LANGUAGES:
|
||||
if code in keys:
|
||||
# Direct match, move on
|
||||
continue
|
||||
@@ -39,7 +39,7 @@ def reload_translation_stats():
|
||||
|
||||
|
||||
def get_translation_percent(lang_code):
|
||||
"""Return the translation percentage for the given language code"""
|
||||
"""Return the translation percentage for the given language code."""
|
||||
if _translation_stats is None:
|
||||
reload_translation_stats()
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Helper functions for unit testing / CI"""
|
||||
"""Helper functions for unit testing / CI."""
|
||||
|
||||
import csv
|
||||
import io
|
||||
@@ -39,7 +39,7 @@ def getMigrationFileNames(app):
|
||||
files = local_dir.joinpath('..', app, 'migrations').iterdir()
|
||||
|
||||
# Regex pattern for migration files
|
||||
regex = re.compile(r"^[\d]+_.*\.py$")
|
||||
regex = re.compile(r'^[\d]+_.*\.py$')
|
||||
|
||||
migration_files = []
|
||||
|
||||
@@ -56,7 +56,6 @@ def getOldestMigrationFile(app, exclude_extension=True, ignore_initial=True):
|
||||
oldest_file = None
|
||||
|
||||
for f in getMigrationFileNames(app):
|
||||
|
||||
if ignore_initial and f.startswith('0001_initial'):
|
||||
continue
|
||||
|
||||
@@ -110,14 +109,12 @@ class UserMixin:
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
"""Run setup for all tests in a given class"""
|
||||
"""Run setup for all tests in a given class."""
|
||||
super().setUpTestData()
|
||||
|
||||
# Create a user to log in with
|
||||
cls.user = get_user_model().objects.create_user(
|
||||
username=cls.username,
|
||||
password=cls.password,
|
||||
email=cls.email
|
||||
username=cls.username, password=cls.password, email=cls.email
|
||||
)
|
||||
|
||||
# Create a group for the user
|
||||
@@ -142,7 +139,7 @@ class UserMixin:
|
||||
cls.assignRole(role=role, group=cls.group)
|
||||
|
||||
def setUp(self):
|
||||
"""Run setup for individual test methods"""
|
||||
"""Run setup for individual test methods."""
|
||||
if self.auto_login:
|
||||
self.client.login(username=self.username, password=self.password)
|
||||
|
||||
@@ -163,15 +160,15 @@ class UserMixin:
|
||||
raise TypeError('assignRole: assign_all must be a boolean value')
|
||||
|
||||
if not role and not assign_all:
|
||||
raise ValueError('assignRole: either role must be provided, or assign_all must be set')
|
||||
raise ValueError(
|
||||
'assignRole: either role must be provided, or assign_all must be set'
|
||||
)
|
||||
|
||||
if not assign_all and role:
|
||||
rule, perm = role.split('.')
|
||||
|
||||
for ruleset in group.rule_sets.all():
|
||||
|
||||
if assign_all or ruleset.name == rule:
|
||||
|
||||
if assign_all or perm == 'view':
|
||||
ruleset.can_view = True
|
||||
elif assign_all or perm == 'change':
|
||||
@@ -201,41 +198,28 @@ class PluginMixin:
|
||||
|
||||
|
||||
class ExchangeRateMixin:
|
||||
"""Mixin class for generating exchange rate data"""
|
||||
"""Mixin class for generating exchange rate data."""
|
||||
|
||||
def generate_exchange_rates(self):
|
||||
"""Helper function which generates some exchange rates to work with"""
|
||||
rates = {
|
||||
'AUD': 1.5,
|
||||
'CAD': 1.7,
|
||||
'GBP': 0.9,
|
||||
'USD': 1.0,
|
||||
}
|
||||
"""Helper function which generates some exchange rates to work with."""
|
||||
rates = {'AUD': 1.5, 'CAD': 1.7, 'GBP': 0.9, 'USD': 1.0}
|
||||
|
||||
# Create a dummy backend
|
||||
ExchangeBackend.objects.create(
|
||||
name='InvenTreeExchange',
|
||||
base_currency='USD',
|
||||
)
|
||||
ExchangeBackend.objects.create(name='InvenTreeExchange', base_currency='USD')
|
||||
|
||||
backend = ExchangeBackend.objects.get(name='InvenTreeExchange')
|
||||
|
||||
items = []
|
||||
|
||||
for currency, rate in rates.items():
|
||||
items.append(
|
||||
Rate(
|
||||
currency=currency,
|
||||
value=rate,
|
||||
backend=backend,
|
||||
)
|
||||
)
|
||||
items.append(Rate(currency=currency, value=rate, backend=backend))
|
||||
|
||||
Rate.objects.bulk_create(items)
|
||||
|
||||
|
||||
class InvenTreeTestCase(ExchangeRateMixin, UserMixin, TestCase):
|
||||
"""Testcase with user setup buildin."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -243,7 +227,9 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
"""Base class for running InvenTree API tests."""
|
||||
|
||||
@contextmanager
|
||||
def assertNumQueriesLessThan(self, value, using='default', verbose=False, debug=False):
|
||||
def assertNumQueriesLessThan(
|
||||
self, value, using='default', verbose=False, debug=False
|
||||
):
|
||||
"""Context manager to check that the number of queries is less than a certain value.
|
||||
|
||||
Example:
|
||||
@@ -252,29 +238,30 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
Ref: https://stackoverflow.com/questions/1254170/django-is-there-a-way-to-count-sql-queries-from-an-unit-test/59089020#59089020
|
||||
"""
|
||||
with CaptureQueriesContext(connections[using]) as context:
|
||||
yield # your test will be run here
|
||||
yield # your test will be run here
|
||||
|
||||
if verbose:
|
||||
msg = "\r\n%s" % json.dumps(context.captured_queries, indent=4)
|
||||
msg = '\r\n%s' % json.dumps(context.captured_queries, indent=4)
|
||||
else:
|
||||
msg = None
|
||||
|
||||
n = len(context.captured_queries)
|
||||
|
||||
if debug:
|
||||
print(f"Expected less than {value} queries, got {n} queries")
|
||||
print(f'Expected less than {value} queries, got {n} queries')
|
||||
|
||||
self.assertLess(n, value, msg=msg)
|
||||
|
||||
def checkResponse(self, url, method, expected_code, response):
|
||||
"""Debug output for an unexpected response"""
|
||||
"""Debug output for an unexpected response."""
|
||||
# No expected code, return
|
||||
if expected_code is None:
|
||||
return
|
||||
|
||||
if expected_code != response.status_code:
|
||||
|
||||
print(f"Unexpected {method} response at '{url}': status_code = {response.status_code}")
|
||||
print(
|
||||
f"Unexpected {method} response at '{url}': status_code = {response.status_code}"
|
||||
)
|
||||
|
||||
if hasattr(response, 'data'):
|
||||
print('data:', response.data)
|
||||
@@ -300,66 +287,68 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
|
||||
return actions
|
||||
|
||||
def get(self, url, data=None, expected_code=200, format='json'):
|
||||
def get(self, url, data=None, expected_code=200, format='json', **kwargs):
|
||||
"""Issue a GET request."""
|
||||
# Set default - see B006
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
response = self.client.get(url, data, format=format)
|
||||
response = self.client.get(url, data, format=format, **kwargs)
|
||||
|
||||
self.checkResponse(url, 'GET', expected_code, response)
|
||||
|
||||
return response
|
||||
|
||||
def post(self, url, data=None, expected_code=None, format='json'):
|
||||
def post(self, url, data=None, expected_code=None, format='json', **kwargs):
|
||||
"""Issue a POST request."""
|
||||
# Set default value - see B006
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
response = self.client.post(url, data=data, format=format)
|
||||
response = self.client.post(url, data=data, format=format, **kwargs)
|
||||
|
||||
self.checkResponse(url, 'POST', expected_code, response)
|
||||
|
||||
return response
|
||||
|
||||
def delete(self, url, data=None, expected_code=None, format='json'):
|
||||
def delete(self, url, data=None, expected_code=None, format='json', **kwargs):
|
||||
"""Issue a DELETE request."""
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
response = self.client.delete(url, data=data, format=format)
|
||||
response = self.client.delete(url, data=data, format=format, **kwargs)
|
||||
|
||||
self.checkResponse(url, 'DELETE', expected_code, response)
|
||||
|
||||
return response
|
||||
|
||||
def patch(self, url, data, expected_code=None, format='json'):
|
||||
def patch(self, url, data, expected_code=None, format='json', **kwargs):
|
||||
"""Issue a PATCH request."""
|
||||
response = self.client.patch(url, data=data, format=format)
|
||||
response = self.client.patch(url, data=data, format=format, **kwargs)
|
||||
|
||||
self.checkResponse(url, 'PATCH', expected_code, response)
|
||||
|
||||
return response
|
||||
|
||||
def put(self, url, data, expected_code=None, format='json'):
|
||||
def put(self, url, data, expected_code=None, format='json', **kwargs):
|
||||
"""Issue a PUT request."""
|
||||
response = self.client.put(url, data=data, format=format)
|
||||
response = self.client.put(url, data=data, format=format, **kwargs)
|
||||
|
||||
self.checkResponse(url, 'PUT', expected_code, response)
|
||||
|
||||
return response
|
||||
|
||||
def options(self, url, expected_code=None):
|
||||
def options(self, url, expected_code=None, **kwargs):
|
||||
"""Issue an OPTIONS request."""
|
||||
response = self.client.options(url, format='json')
|
||||
response = self.client.options(url, format='json', **kwargs)
|
||||
|
||||
self.checkResponse(url, 'OPTIONS', expected_code, response)
|
||||
|
||||
return response
|
||||
|
||||
def download_file(self, url, data, expected_code=None, expected_fn=None, decode=True):
|
||||
def download_file(
|
||||
self, url, data, expected_code=None, expected_fn=None, decode=True
|
||||
):
|
||||
"""Download a file from the server, and return an in-memory file."""
|
||||
response = self.client.get(url, data=data, format='json')
|
||||
|
||||
@@ -367,7 +356,9 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
|
||||
# Check that the response is of the correct type
|
||||
if not isinstance(response, StreamingHttpResponse):
|
||||
raise ValueError("Response is not a StreamingHttpResponse object as expected")
|
||||
raise ValueError(
|
||||
'Response is not a StreamingHttpResponse object as expected'
|
||||
)
|
||||
|
||||
# Extract filename
|
||||
disposition = response.headers['Content-Disposition']
|
||||
@@ -394,7 +385,14 @@ class InvenTreeAPITestCase(ExchangeRateMixin, UserMixin, APITestCase):
|
||||
|
||||
return file
|
||||
|
||||
def process_csv(self, file_object, delimiter=',', required_cols=None, excluded_cols=None, required_rows=None):
|
||||
def process_csv(
|
||||
self,
|
||||
file_object,
|
||||
delimiter=',',
|
||||
required_cols=None,
|
||||
excluded_cols=None,
|
||||
required_rows=None,
|
||||
):
|
||||
"""Helper function to process and validate a downloaded csv file."""
|
||||
# Check that the correct object type has been passed
|
||||
self.assertTrue(isinstance(file_object, io.StringIO))
|
||||
|
||||
@@ -10,9 +10,11 @@ from django.urls import include, path, re_path
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.views.generic.base import RedirectView
|
||||
|
||||
from dj_rest_auth.registration.views import (ConfirmEmailView,
|
||||
SocialAccountDisconnectView,
|
||||
SocialAccountListView)
|
||||
from dj_rest_auth.registration.views import (
|
||||
ConfirmEmailView,
|
||||
SocialAccountDisconnectView,
|
||||
SocialAccountListView,
|
||||
)
|
||||
from drf_spectacular.views import SpectacularAPIView, SpectacularRedocView
|
||||
from sesame.views import LoginView
|
||||
|
||||
@@ -20,6 +22,7 @@ import build.api
|
||||
import common.api
|
||||
import company.api
|
||||
import label.api
|
||||
import machine.api
|
||||
import order.api
|
||||
import part.api
|
||||
import plugin.api
|
||||
@@ -28,199 +31,402 @@ import stock.api
|
||||
import users.api
|
||||
from build.urls import build_urls
|
||||
from common.urls import common_urls
|
||||
from company.urls import (company_urls, manufacturer_part_urls,
|
||||
supplier_part_urls)
|
||||
from company.urls import company_urls, manufacturer_part_urls, supplier_part_urls
|
||||
from order.urls import order_urls
|
||||
from part.urls import part_urls
|
||||
from plugin.urls import get_plugin_urls
|
||||
from stock.urls import stock_urls
|
||||
from web.urls import api_urls as web_api_urls
|
||||
from web.urls import urlpatterns as platform_urls
|
||||
|
||||
from .api import (APISearchView, InfoView, NotFoundView, VersionTextView,
|
||||
VersionView)
|
||||
from .api import APISearchView, InfoView, NotFoundView, VersionTextView, VersionView
|
||||
from .magic_login import GetSimpleLoginView
|
||||
from .social_auth_urls import (EmailListView, EmailPrimaryView,
|
||||
EmailRemoveView, EmailVerifyView,
|
||||
SocialProviderListView, social_auth_urlpatterns)
|
||||
from .views import (AboutView, AppearanceSelectView, CustomConnectionsView,
|
||||
CustomEmailView, CustomLoginView,
|
||||
CustomPasswordResetFromKeyView,
|
||||
CustomSessionDeleteOtherView, CustomSessionDeleteView,
|
||||
DatabaseStatsView, DynamicJsView, EditUserView, IndexView,
|
||||
NotificationsView, SearchView, SetPasswordView,
|
||||
SettingsView, auth_request)
|
||||
from .social_auth_urls import (
|
||||
EmailListView,
|
||||
EmailPrimaryView,
|
||||
EmailRemoveView,
|
||||
EmailVerifyView,
|
||||
SocialProviderListView,
|
||||
social_auth_urlpatterns,
|
||||
)
|
||||
from .views import (
|
||||
AboutView,
|
||||
AppearanceSelectView,
|
||||
CustomConnectionsView,
|
||||
CustomEmailView,
|
||||
CustomLoginView,
|
||||
CustomPasswordResetFromKeyView,
|
||||
CustomSessionDeleteOtherView,
|
||||
CustomSessionDeleteView,
|
||||
DatabaseStatsView,
|
||||
DynamicJsView,
|
||||
EditUserView,
|
||||
IndexView,
|
||||
NotificationsView,
|
||||
SearchView,
|
||||
SetPasswordView,
|
||||
SettingsView,
|
||||
auth_request,
|
||||
)
|
||||
|
||||
admin.site.site_header = "InvenTree Admin"
|
||||
admin.site.site_header = 'InvenTree Admin'
|
||||
|
||||
|
||||
apipatterns = [
|
||||
|
||||
# Global search
|
||||
path('search/', APISearchView.as_view(), name='api-search'),
|
||||
|
||||
re_path(r'^settings/', include(common.api.settings_api_urls)),
|
||||
re_path(r'^part/', include(part.api.part_api_urls)),
|
||||
re_path(r'^bom/', include(part.api.bom_api_urls)),
|
||||
re_path(r'^company/', include(company.api.company_api_urls)),
|
||||
re_path(r'^stock/', include(stock.api.stock_api_urls)),
|
||||
re_path(r'^build/', include(build.api.build_api_urls)),
|
||||
re_path(r'^order/', include(order.api.order_api_urls)),
|
||||
re_path(r'^label/', include(label.api.label_api_urls)),
|
||||
re_path(r'^report/', include(report.api.report_api_urls)),
|
||||
re_path(r'^user/', include(users.api.user_urls)),
|
||||
re_path(r'^admin/', include(common.api.admin_api_urls)),
|
||||
|
||||
path('settings/', include(common.api.settings_api_urls)),
|
||||
path('part/', include(part.api.part_api_urls)),
|
||||
path('bom/', include(part.api.bom_api_urls)),
|
||||
path('company/', include(company.api.company_api_urls)),
|
||||
path('stock/', include(stock.api.stock_api_urls)),
|
||||
path('build/', include(build.api.build_api_urls)),
|
||||
path('order/', include(order.api.order_api_urls)),
|
||||
path('label/', include(label.api.label_api_urls)),
|
||||
path('report/', include(report.api.report_api_urls)),
|
||||
path('machine/', include(machine.api.machine_api_urls)),
|
||||
path('user/', include(users.api.user_urls)),
|
||||
path('admin/', include(common.api.admin_api_urls)),
|
||||
path('web/', include(web_api_urls)),
|
||||
# Plugin endpoints
|
||||
path('', include(plugin.api.plugin_api_urls)),
|
||||
|
||||
# Common endpoints endpoint
|
||||
path('', include(common.api.common_api_urls)),
|
||||
|
||||
# OpenAPI Schema
|
||||
re_path('schema/', SpectacularAPIView.as_view(custom_settings={'SCHEMA_PATH_PREFIX': '/api/'}), name='schema'),
|
||||
|
||||
path(
|
||||
'schema/',
|
||||
SpectacularAPIView.as_view(custom_settings={'SCHEMA_PATH_PREFIX': '/api/'}),
|
||||
name='schema',
|
||||
),
|
||||
# InvenTree information endpoints
|
||||
path("version-text", VersionTextView.as_view(), name="api-version-text"), # version text
|
||||
path(
|
||||
'version-text', VersionTextView.as_view(), name='api-version-text'
|
||||
), # version text
|
||||
path('version/', VersionView.as_view(), name='api-version'), # version info
|
||||
path('', InfoView.as_view(), name='api-inventree-info'), # server info
|
||||
|
||||
# Auth API endpoints
|
||||
path('auth/', include([
|
||||
re_path(r'^registration/account-confirm-email/(?P<key>[-:\w]+)/$', ConfirmEmailView.as_view(), name='account_confirm_email'),
|
||||
path('registration/', include('dj_rest_auth.registration.urls')),
|
||||
path('providers/', SocialProviderListView.as_view(), name='social_providers'),
|
||||
path('emails/', include([path('<int:pk>/', include([
|
||||
path('primary/', EmailPrimaryView.as_view(), name='email-primary'),
|
||||
path('verify/', EmailVerifyView.as_view(), name='email-verify'),
|
||||
path('remove/', EmailRemoveView().as_view(), name='email-remove'),])),
|
||||
path('', EmailListView.as_view(), name='email-list')
|
||||
])),
|
||||
path('social/', include(social_auth_urlpatterns)),
|
||||
path('social/', SocialAccountListView.as_view(), name='social_account_list'),
|
||||
path('social/<int:pk>/disconnect/', SocialAccountDisconnectView.as_view(), name='social_account_disconnect'),
|
||||
path('', include('dj_rest_auth.urls')),
|
||||
])),
|
||||
|
||||
path(
|
||||
'auth/',
|
||||
include([
|
||||
re_path(
|
||||
r'^registration/account-confirm-email/(?P<key>[-:\w]+)/$',
|
||||
ConfirmEmailView.as_view(),
|
||||
name='account_confirm_email',
|
||||
),
|
||||
path('registration/', include('dj_rest_auth.registration.urls')),
|
||||
path(
|
||||
'providers/', SocialProviderListView.as_view(), name='social_providers'
|
||||
),
|
||||
path(
|
||||
'emails/',
|
||||
include([
|
||||
path(
|
||||
'<int:pk>/',
|
||||
include([
|
||||
path(
|
||||
'primary/',
|
||||
EmailPrimaryView.as_view(),
|
||||
name='email-primary',
|
||||
),
|
||||
path(
|
||||
'verify/',
|
||||
EmailVerifyView.as_view(),
|
||||
name='email-verify',
|
||||
),
|
||||
path(
|
||||
'remove/',
|
||||
EmailRemoveView().as_view(),
|
||||
name='email-remove',
|
||||
),
|
||||
]),
|
||||
),
|
||||
path('', EmailListView.as_view(), name='email-list'),
|
||||
]),
|
||||
),
|
||||
path('social/', include(social_auth_urlpatterns)),
|
||||
path(
|
||||
'social/', SocialAccountListView.as_view(), name='social_account_list'
|
||||
),
|
||||
path(
|
||||
'social/<int:pk>/disconnect/',
|
||||
SocialAccountDisconnectView.as_view(),
|
||||
name='social_account_disconnect',
|
||||
),
|
||||
path('logout/', users.api.Logout.as_view(), name='api-logout'),
|
||||
path(
|
||||
'login-redirect/',
|
||||
users.api.LoginRedirect.as_view(),
|
||||
name='api-login-redirect',
|
||||
),
|
||||
path('', include('dj_rest_auth.urls')),
|
||||
]),
|
||||
),
|
||||
# Magic login URLs
|
||||
path("email/generate/", csrf_exempt(GetSimpleLoginView().as_view()), name="sesame-generate",),
|
||||
path("email/login/", LoginView.as_view(), name="sesame-login"),
|
||||
|
||||
path(
|
||||
'email/generate/',
|
||||
csrf_exempt(GetSimpleLoginView().as_view()),
|
||||
name='sesame-generate',
|
||||
),
|
||||
path('email/login/', LoginView.as_view(), name='sesame-login'),
|
||||
# Unknown endpoint
|
||||
re_path(r'^.*$', NotFoundView.as_view(), name='api-404'),
|
||||
]
|
||||
|
||||
settings_urls = [
|
||||
|
||||
re_path(r'^i18n/?', include('django.conf.urls.i18n')),
|
||||
|
||||
re_path(r'^appearance/?', AppearanceSelectView.as_view(), name='settings-appearance'),
|
||||
|
||||
path('i18n/', include('django.conf.urls.i18n')),
|
||||
path('appearance/', AppearanceSelectView.as_view(), name='settings-appearance'),
|
||||
# Catch any other urls
|
||||
re_path(r'^.*$', SettingsView.as_view(template_name='InvenTree/settings/settings.html'), name='settings'),
|
||||
path(
|
||||
'',
|
||||
SettingsView.as_view(template_name='InvenTree/settings/settings.html'),
|
||||
name='settings',
|
||||
),
|
||||
]
|
||||
|
||||
notifications_urls = [
|
||||
|
||||
# Catch any other urls
|
||||
re_path(r'^.*$', NotificationsView.as_view(), name='notifications'),
|
||||
path('', NotificationsView.as_view(), name='notifications')
|
||||
]
|
||||
|
||||
# These javascript files are served "dynamically" - i.e. rendered on demand
|
||||
dynamic_javascript_urls = [
|
||||
re_path(r'^calendar.js', DynamicJsView.as_view(template_name='js/dynamic/calendar.js'), name='calendar.js'),
|
||||
re_path(r'^nav.js', DynamicJsView.as_view(template_name='js/dynamic/nav.js'), name='nav.js'),
|
||||
re_path(r'^permissions.js', DynamicJsView.as_view(template_name='js/dynamic/permissions.js'), name='permissions.js'),
|
||||
re_path(r'^settings.js', DynamicJsView.as_view(template_name='js/dynamic/settings.js'), name='settings.js'),
|
||||
path(
|
||||
'calendar.js',
|
||||
DynamicJsView.as_view(template_name='js/dynamic/calendar.js'),
|
||||
name='calendar.js',
|
||||
),
|
||||
path(
|
||||
'nav.js',
|
||||
DynamicJsView.as_view(template_name='js/dynamic/nav.js'),
|
||||
name='nav.js',
|
||||
),
|
||||
path(
|
||||
'permissions.js',
|
||||
DynamicJsView.as_view(template_name='js/dynamic/permissions.js'),
|
||||
name='permissions.js',
|
||||
),
|
||||
path(
|
||||
'settings.js',
|
||||
DynamicJsView.as_view(template_name='js/dynamic/settings.js'),
|
||||
name='settings.js',
|
||||
),
|
||||
]
|
||||
|
||||
# These javascript files are passed through the Django translation layer
|
||||
translated_javascript_urls = [
|
||||
re_path(r'^api.js', DynamicJsView.as_view(template_name='js/translated/api.js'), name='api.js'),
|
||||
re_path(r'^attachment.js', DynamicJsView.as_view(template_name='js/translated/attachment.js'), name='attachment.js'),
|
||||
re_path(r'^barcode.js', DynamicJsView.as_view(template_name='js/translated/barcode.js'), name='barcode.js'),
|
||||
re_path(r'^bom.js', DynamicJsView.as_view(template_name='js/translated/bom.js'), name='bom.js'),
|
||||
re_path(r'^build.js', DynamicJsView.as_view(template_name='js/translated/build.js'), name='build.js'),
|
||||
re_path(r'^charts.js', DynamicJsView.as_view(template_name='js/translated/charts.js'), name='charts.js'),
|
||||
re_path(r'^company.js', DynamicJsView.as_view(template_name='js/translated/company.js'), name='company.js'),
|
||||
re_path(r'^filters.js', DynamicJsView.as_view(template_name='js/translated/filters.js'), name='filters.js'),
|
||||
re_path(r'^forms.js', DynamicJsView.as_view(template_name='js/translated/forms.js'), name='forms.js'),
|
||||
re_path(r'^helpers.js', DynamicJsView.as_view(template_name='js/translated/helpers.js'), name='helpers.js'),
|
||||
re_path(r'^index.js', DynamicJsView.as_view(template_name='js/translated/index.js'), name='index.js'),
|
||||
re_path(r'^label.js', DynamicJsView.as_view(template_name='js/translated/label.js'), name='label.js'),
|
||||
re_path(r'^model_renderers.js', DynamicJsView.as_view(template_name='js/translated/model_renderers.js'), name='model_renderers.js'),
|
||||
re_path(r'^modals.js', DynamicJsView.as_view(template_name='js/translated/modals.js'), name='modals.js'),
|
||||
re_path(r'^order.js', DynamicJsView.as_view(template_name='js/translated/order.js'), name='order.js'),
|
||||
re_path(r'^part.js', DynamicJsView.as_view(template_name='js/translated/part.js'), name='part.js'),
|
||||
re_path(r'^purchase_order.js', DynamicJsView.as_view(template_name='js/translated/purchase_order.js'), name='purchase_order.js'),
|
||||
re_path(r'^return_order.js', DynamicJsView.as_view(template_name='js/translated/return_order.js'), name='return_order.js'),
|
||||
re_path(r'^report.js', DynamicJsView.as_view(template_name='js/translated/report.js'), name='report.js'),
|
||||
re_path(r'^sales_order.js', DynamicJsView.as_view(template_name='js/translated/sales_order.js'), name='sales_order.js'),
|
||||
re_path(r'^search.js', DynamicJsView.as_view(template_name='js/translated/search.js'), name='search.js'),
|
||||
re_path(r'^stock.js', DynamicJsView.as_view(template_name='js/translated/stock.js'), name='stock.js'),
|
||||
re_path(r'^status_codes.js', DynamicJsView.as_view(template_name='js/translated/status_codes.js'), name='status_codes.js'),
|
||||
re_path(r'^plugin.js', DynamicJsView.as_view(template_name='js/translated/plugin.js'), name='plugin.js'),
|
||||
re_path(r'^pricing.js', DynamicJsView.as_view(template_name='js/translated/pricing.js'), name='pricing.js'),
|
||||
re_path(r'^news.js', DynamicJsView.as_view(template_name='js/translated/news.js'), name='news.js'),
|
||||
re_path(r'^tables.js', DynamicJsView.as_view(template_name='js/translated/tables.js'), name='tables.js'),
|
||||
re_path(r'^table_filters.js', DynamicJsView.as_view(template_name='js/translated/table_filters.js'), name='table_filters.js'),
|
||||
re_path(r'^notification.js', DynamicJsView.as_view(template_name='js/translated/notification.js'), name='notification.js'),
|
||||
path(
|
||||
'api.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/api.js'),
|
||||
name='api.js',
|
||||
),
|
||||
path(
|
||||
'attachment.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/attachment.js'),
|
||||
name='attachment.js',
|
||||
),
|
||||
path(
|
||||
'barcode.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/barcode.js'),
|
||||
name='barcode.js',
|
||||
),
|
||||
path(
|
||||
'bom.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/bom.js'),
|
||||
name='bom.js',
|
||||
),
|
||||
path(
|
||||
'build.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/build.js'),
|
||||
name='build.js',
|
||||
),
|
||||
path(
|
||||
'charts.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/charts.js'),
|
||||
name='charts.js',
|
||||
),
|
||||
path(
|
||||
'company.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/company.js'),
|
||||
name='company.js',
|
||||
),
|
||||
path(
|
||||
'filters.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/filters.js'),
|
||||
name='filters.js',
|
||||
),
|
||||
path(
|
||||
'forms.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/forms.js'),
|
||||
name='forms.js',
|
||||
),
|
||||
path(
|
||||
'helpers.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/helpers.js'),
|
||||
name='helpers.js',
|
||||
),
|
||||
path(
|
||||
'index.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/index.js'),
|
||||
name='index.js',
|
||||
),
|
||||
path(
|
||||
'label.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/label.js'),
|
||||
name='label.js',
|
||||
),
|
||||
path(
|
||||
'model_renderers.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/model_renderers.js'),
|
||||
name='model_renderers.js',
|
||||
),
|
||||
path(
|
||||
'modals.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/modals.js'),
|
||||
name='modals.js',
|
||||
),
|
||||
path(
|
||||
'order.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/order.js'),
|
||||
name='order.js',
|
||||
),
|
||||
path(
|
||||
'part.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/part.js'),
|
||||
name='part.js',
|
||||
),
|
||||
path(
|
||||
'purchase_order.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/purchase_order.js'),
|
||||
name='purchase_order.js',
|
||||
),
|
||||
path(
|
||||
'return_order.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/return_order.js'),
|
||||
name='return_order.js',
|
||||
),
|
||||
path(
|
||||
'report.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/report.js'),
|
||||
name='report.js',
|
||||
),
|
||||
path(
|
||||
'sales_order.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/sales_order.js'),
|
||||
name='sales_order.js',
|
||||
),
|
||||
path(
|
||||
'search.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/search.js'),
|
||||
name='search.js',
|
||||
),
|
||||
path(
|
||||
'stock.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/stock.js'),
|
||||
name='stock.js',
|
||||
),
|
||||
path(
|
||||
'status_codes.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/status_codes.js'),
|
||||
name='status_codes.js',
|
||||
),
|
||||
path(
|
||||
'plugin.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/plugin.js'),
|
||||
name='plugin.js',
|
||||
),
|
||||
path(
|
||||
'pricing.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/pricing.js'),
|
||||
name='pricing.js',
|
||||
),
|
||||
path(
|
||||
'news.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/news.js'),
|
||||
name='news.js',
|
||||
),
|
||||
path(
|
||||
'tables.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/tables.js'),
|
||||
name='tables.js',
|
||||
),
|
||||
path(
|
||||
'table_filters.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/table_filters.js'),
|
||||
name='table_filters.js',
|
||||
),
|
||||
path(
|
||||
'notification.js',
|
||||
DynamicJsView.as_view(template_name='js/translated/notification.js'),
|
||||
name='notification.js',
|
||||
),
|
||||
]
|
||||
|
||||
backendpatterns = [
|
||||
# "Dynamic" javascript files which are rendered using InvenTree templating.
|
||||
re_path(r'^js/dynamic/', include(dynamic_javascript_urls)),
|
||||
re_path(r'^js/i18n/', include(translated_javascript_urls)),
|
||||
|
||||
re_path(r'^auth/', include('rest_framework.urls', namespace='rest_framework')),
|
||||
re_path(r'^auth/?', auth_request),
|
||||
|
||||
re_path(r'^api/', include(apipatterns)),
|
||||
re_path(r'^api-doc/', SpectacularRedocView.as_view(url_name='schema'), name='api-doc'),
|
||||
path('auth/', include('rest_framework.urls', namespace='rest_framework')),
|
||||
path('auth/', auth_request),
|
||||
path('api/', include(apipatterns)),
|
||||
path('api-doc/', SpectacularRedocView.as_view(url_name='schema'), name='api-doc'),
|
||||
]
|
||||
|
||||
if settings.ENABLE_CLASSIC_FRONTEND:
|
||||
# "Dynamic" javascript files which are rendered using InvenTree templating.
|
||||
backendpatterns += [
|
||||
re_path(r'^js/dynamic/', include(dynamic_javascript_urls)),
|
||||
re_path(r'^js/i18n/', include(translated_javascript_urls)),
|
||||
]
|
||||
|
||||
classic_frontendpatterns = [
|
||||
|
||||
# Apps
|
||||
re_path(r'^build/', include(build_urls)),
|
||||
re_path(r'^common/', include(common_urls)),
|
||||
re_path(r'^company/', include(company_urls)),
|
||||
re_path(r'^order/', include(order_urls)),
|
||||
re_path(r'^manufacturer-part/', include(manufacturer_part_urls)),
|
||||
re_path(r'^part/', include(part_urls)),
|
||||
re_path(r'^stock/', include(stock_urls)),
|
||||
re_path(r'^supplier-part/', include(supplier_part_urls)),
|
||||
|
||||
re_path(r'^edit-user/', EditUserView.as_view(), name='edit-user'),
|
||||
re_path(r'^set-password/', SetPasswordView.as_view(), name='set-password'),
|
||||
|
||||
re_path(r'^index/', IndexView.as_view(), name='index'),
|
||||
re_path(r'^notifications/', include(notifications_urls)),
|
||||
re_path(r'^search/', SearchView.as_view(), name='search'),
|
||||
re_path(r'^settings/', include(settings_urls)),
|
||||
re_path(r'^about/', AboutView.as_view(), name='about'),
|
||||
re_path(r'^stats/', DatabaseStatsView.as_view(), name='stats'),
|
||||
|
||||
path('build/', include(build_urls)),
|
||||
path('common/', include(common_urls)),
|
||||
path('company/', include(company_urls)),
|
||||
path('order/', include(order_urls)),
|
||||
path('manufacturer-part/', include(manufacturer_part_urls)),
|
||||
path('part/', include(part_urls)),
|
||||
path('stock/', include(stock_urls)),
|
||||
path('supplier-part/', include(supplier_part_urls)),
|
||||
path('edit-user/', EditUserView.as_view(), name='edit-user'),
|
||||
path('set-password/', SetPasswordView.as_view(), name='set-password'),
|
||||
path('index/', IndexView.as_view(), name='index'),
|
||||
path('notifications/', include(notifications_urls)),
|
||||
path('search/', SearchView.as_view(), name='search'),
|
||||
path('settings/', include(settings_urls)),
|
||||
path('about/', AboutView.as_view(), name='about'),
|
||||
path('stats/', DatabaseStatsView.as_view(), name='stats'),
|
||||
# DB user sessions
|
||||
path('accounts/sessions/other/delete/', view=CustomSessionDeleteOtherView.as_view(), name='session_delete_other', ),
|
||||
re_path(r'^accounts/sessions/(?P<pk>\w+)/delete/$', view=CustomSessionDeleteView.as_view(), name='session_delete', ),
|
||||
|
||||
path(
|
||||
'accounts/sessions/other/delete/',
|
||||
view=CustomSessionDeleteOtherView.as_view(),
|
||||
name='session_delete_other',
|
||||
),
|
||||
re_path(
|
||||
r'^accounts/sessions/(?P<pk>\w+)/delete/$',
|
||||
view=CustomSessionDeleteView.as_view(),
|
||||
name='session_delete',
|
||||
),
|
||||
# Single Sign On / allauth
|
||||
# overrides of urlpatterns
|
||||
re_path(r'^accounts/email/', CustomEmailView.as_view(), name='account_email'),
|
||||
re_path(r'^accounts/social/connections/', CustomConnectionsView.as_view(), name='socialaccount_connections'),
|
||||
re_path(r"^accounts/password/reset/key/(?P<uidb36>[0-9A-Za-z]+)-(?P<key>.+)/$", CustomPasswordResetFromKeyView.as_view(), name="account_reset_password_from_key"),
|
||||
|
||||
path('accounts/email/', CustomEmailView.as_view(), name='account_email'),
|
||||
path(
|
||||
'accounts/social/connections/',
|
||||
CustomConnectionsView.as_view(),
|
||||
name='socialaccount_connections',
|
||||
),
|
||||
re_path(
|
||||
r'^accounts/password/reset/key/(?P<uidb36>[0-9A-Za-z]+)-(?P<key>.+)/$',
|
||||
CustomPasswordResetFromKeyView.as_view(),
|
||||
name='account_reset_password_from_key',
|
||||
),
|
||||
# Override login page
|
||||
re_path("accounts/login/", CustomLoginView.as_view(), name="account_login"),
|
||||
|
||||
re_path(r'^accounts/', include('allauth_2fa.urls')), # MFA support
|
||||
re_path(r'^accounts/', include('allauth.urls')), # included urlpatterns
|
||||
path('accounts/login/', CustomLoginView.as_view(), name='account_login'),
|
||||
path('accounts/', include('allauth_2fa.urls')), # MFA support
|
||||
path('accounts/', include('allauth.urls')), # included urlpatterns
|
||||
]
|
||||
|
||||
urlpatterns = []
|
||||
|
||||
if settings.INVENTREE_ADMIN_ENABLED:
|
||||
admin_url = settings.INVENTREE_ADMIN_URL,
|
||||
admin_url = (settings.INVENTREE_ADMIN_URL,)
|
||||
urlpatterns += [
|
||||
path(f'{admin_url}/error_log/', include('error_report.urls')),
|
||||
path(f'{admin_url}/', admin.site.urls, name='inventree-admin'),
|
||||
@@ -234,6 +440,15 @@ if settings.ENABLE_CLASSIC_FRONTEND:
|
||||
frontendpatterns += classic_frontendpatterns
|
||||
if settings.ENABLE_PLATFORM_FRONTEND:
|
||||
frontendpatterns += platform_urls
|
||||
if not settings.ENABLE_CLASSIC_FRONTEND:
|
||||
# Add a redirect for login views
|
||||
frontendpatterns += [
|
||||
path(
|
||||
'accounts/login/',
|
||||
RedirectView.as_view(url=settings.FRONTEND_URL_BASE, permanent=False),
|
||||
name='account_login',
|
||||
)
|
||||
]
|
||||
|
||||
urlpatterns += frontendpatterns
|
||||
|
||||
@@ -249,17 +464,24 @@ if settings.DEBUG:
|
||||
# Media file access
|
||||
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||
|
||||
# Debug toolbar access (only allowed in DEBUG mode)
|
||||
if settings.DEBUG_TOOLBAR_ENABLED:
|
||||
import debug_toolbar
|
||||
urlpatterns = [
|
||||
path('__debug__/', include(debug_toolbar.urls)),
|
||||
] + urlpatterns
|
||||
|
||||
# Redirect for favicon.ico
|
||||
urlpatterns.append(
|
||||
path('favicon.ico', RedirectView.as_view(url=f'{settings.STATIC_URL}img/favicon/favicon.ico'))
|
||||
path(
|
||||
'favicon.ico',
|
||||
RedirectView.as_view(url=f'{settings.STATIC_URL}img/favicon/favicon.ico'),
|
||||
)
|
||||
)
|
||||
|
||||
# Send any unknown URLs to the parts page
|
||||
urlpatterns += [re_path(r'^.*$', RedirectView.as_view(url='/index/', permanent=False), name='index')]
|
||||
urlpatterns += [
|
||||
re_path(
|
||||
r'^.*$',
|
||||
RedirectView.as_view(
|
||||
url='/index/'
|
||||
if settings.ENABLE_CLASSIC_FRONTEND
|
||||
else settings.FRONTEND_URL_BASE,
|
||||
permanent=False,
|
||||
),
|
||||
name='index',
|
||||
)
|
||||
]
|
||||
|
||||
@@ -63,13 +63,14 @@ class AllowedURLValidator(validators.URLValidator):
|
||||
|
||||
def __call__(self, value):
|
||||
"""Validate the URL."""
|
||||
|
||||
import common.models
|
||||
|
||||
self.schemes = allowable_url_schemes()
|
||||
|
||||
# Determine if 'strict' URL validation is required (i.e. if the URL must have a schema prefix)
|
||||
strict_urls = common.models.InvenTreeSetting.get_setting('INVENTREE_STRICT_URLS', True, cache=False)
|
||||
strict_urls = common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_STRICT_URLS', True, cache=False
|
||||
)
|
||||
|
||||
if not strict_urls:
|
||||
# Allow URLs which do not have a provided schema
|
||||
@@ -117,7 +118,7 @@ def validate_overage(value):
|
||||
i = Decimal(value)
|
||||
|
||||
if i < 0:
|
||||
raise ValidationError(_("Overage value must not be negative"))
|
||||
raise ValidationError(_('Overage value must not be negative'))
|
||||
|
||||
# Looks like a number
|
||||
return True
|
||||
@@ -133,17 +134,15 @@ def validate_overage(value):
|
||||
f = float(v)
|
||||
|
||||
if f < 0:
|
||||
raise ValidationError(_("Overage value must not be negative"))
|
||||
raise ValidationError(_('Overage value must not be negative'))
|
||||
elif f > 100:
|
||||
raise ValidationError(_("Overage must not exceed 100%"))
|
||||
raise ValidationError(_('Overage must not exceed 100%'))
|
||||
|
||||
return True
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
raise ValidationError(
|
||||
_("Invalid value for overage")
|
||||
)
|
||||
raise ValidationError(_('Invalid value for overage'))
|
||||
|
||||
|
||||
def validate_part_name_format(value):
|
||||
@@ -179,8 +178,6 @@ def validate_part_name_format(value):
|
||||
try:
|
||||
Template(value).render({'part': p})
|
||||
except Exception as exc:
|
||||
raise ValidationError({
|
||||
'value': str(exc)
|
||||
})
|
||||
raise ValidationError({'value': str(exc)})
|
||||
|
||||
return True
|
||||
|
||||
@@ -19,7 +19,7 @@ from dulwich.repo import NotGitRepository, Repo
|
||||
from .api_version import INVENTREE_API_TEXT, INVENTREE_API_VERSION
|
||||
|
||||
# InvenTree software version
|
||||
INVENTREE_SW_VERSION = "0.13.0"
|
||||
INVENTREE_SW_VERSION = '0.14.5'
|
||||
|
||||
# Discover git
|
||||
try:
|
||||
@@ -30,10 +30,9 @@ except (NotGitRepository, FileNotFoundError):
|
||||
|
||||
|
||||
def checkMinPythonVersion():
|
||||
"""Check that the Python version is at least 3.9"""
|
||||
|
||||
version = sys.version.split(" ")[0]
|
||||
docs = "https://docs.inventree.org/en/stable/start/intro/#python-requirements"
|
||||
"""Check that the Python version is at least 3.9."""
|
||||
version = sys.version.split(' ')[0]
|
||||
docs = 'https://docs.inventree.org/en/stable/start/intro/#python-requirements'
|
||||
|
||||
msg = f"""
|
||||
InvenTree requires Python 3.9 or above - you are running version {version}.
|
||||
@@ -47,22 +46,22 @@ def checkMinPythonVersion():
|
||||
if sys.version_info.major == 3 and sys.version_info.minor < 9:
|
||||
raise RuntimeError(msg)
|
||||
|
||||
print(f"Python version {version} - {sys.executable}")
|
||||
print(f'Python version {version} - {sys.executable}')
|
||||
|
||||
|
||||
def inventreeInstanceName():
|
||||
"""Returns the InstanceName settings for the current database."""
|
||||
import common.models
|
||||
|
||||
return common.models.InvenTreeSetting.get_setting("INVENTREE_INSTANCE", "")
|
||||
return common.models.InvenTreeSetting.get_setting('INVENTREE_INSTANCE', '')
|
||||
|
||||
|
||||
def inventreeInstanceTitle():
|
||||
"""Returns the InstanceTitle for the current database."""
|
||||
import common.models
|
||||
|
||||
if common.models.InvenTreeSetting.get_setting("INVENTREE_INSTANCE_TITLE", False):
|
||||
return common.models.InvenTreeSetting.get_setting("INVENTREE_INSTANCE", "")
|
||||
if common.models.InvenTreeSetting.get_setting('INVENTREE_INSTANCE_TITLE', False):
|
||||
return common.models.InvenTreeSetting.get_setting('INVENTREE_INSTANCE', '')
|
||||
return 'InvenTree'
|
||||
|
||||
|
||||
@@ -76,7 +75,7 @@ def inventreeVersionTuple(version=None):
|
||||
if version is None:
|
||||
version = INVENTREE_SW_VERSION
|
||||
|
||||
match = re.match(r"^.*(\d+)\.(\d+)\.(\d+).*$", str(version))
|
||||
match = re.match(r'^.*(\d+)\.(\d+)\.(\d+).*$', str(version))
|
||||
|
||||
return [int(g) for g in match.groups()]
|
||||
|
||||
@@ -93,29 +92,29 @@ def inventreeDocsVersion():
|
||||
Release -> "major.minor.sub" e.g. "0.5.2"
|
||||
"""
|
||||
if isInvenTreeDevelopmentVersion():
|
||||
return "latest"
|
||||
return 'latest'
|
||||
return INVENTREE_SW_VERSION # pragma: no cover
|
||||
|
||||
|
||||
def inventreeDocUrl():
|
||||
"""Return URL for InvenTree documentation site."""
|
||||
tag = inventreeDocsVersion()
|
||||
return f"https://docs.inventree.org/en/{tag}"
|
||||
return f'https://docs.inventree.org/en/{tag}'
|
||||
|
||||
|
||||
def inventreeAppUrl():
|
||||
"""Return URL for InvenTree app site."""
|
||||
return f'{inventreeDocUrl()}/app/app',
|
||||
return f'{inventreeDocUrl()}/app/app/'
|
||||
|
||||
|
||||
def inventreeCreditsUrl():
|
||||
"""Return URL for InvenTree credits site."""
|
||||
return "https://docs.inventree.org/en/latest/credits/"
|
||||
return 'https://docs.inventree.org/en/latest/credits/'
|
||||
|
||||
|
||||
def inventreeGithubUrl():
|
||||
"""Return URL for InvenTree github site."""
|
||||
return "https://github.com/InvenTree/InvenTree/"
|
||||
return 'https://github.com/InvenTree/InvenTree/'
|
||||
|
||||
|
||||
def isInvenTreeUpToDate():
|
||||
@@ -124,7 +123,10 @@ def isInvenTreeUpToDate():
|
||||
A background task periodically queries GitHub for latest version, and stores it to the database as "_INVENTREE_LATEST_VERSION"
|
||||
"""
|
||||
import common.models
|
||||
latest = common.models.InvenTreeSetting.get_setting('_INVENTREE_LATEST_VERSION', backup_value=None, create=False)
|
||||
|
||||
latest = common.models.InvenTreeSetting.get_setting(
|
||||
'_INVENTREE_LATEST_VERSION', backup_value=None, create=False
|
||||
)
|
||||
|
||||
# No record for "latest" version - we must assume we are up to date!
|
||||
if not latest:
|
||||
@@ -144,24 +146,26 @@ def inventreeApiVersion():
|
||||
|
||||
def parse_version_text():
|
||||
"""Parse the version text to structured data."""
|
||||
patched_data = INVENTREE_API_TEXT.split("\n\n")
|
||||
patched_data = INVENTREE_API_TEXT.split('\n\n')
|
||||
# Remove first newline on latest version
|
||||
patched_data[0] = patched_data[0].replace("\n", "", 1)
|
||||
patched_data[0] = patched_data[0].replace('\n', '', 1)
|
||||
|
||||
version_data = {}
|
||||
for version in patched_data:
|
||||
data = version.split("\n")
|
||||
data = version.split('\n')
|
||||
|
||||
version_split = data[0].split(' -> ')
|
||||
version_detail = version_split[1].split(':', 1) if len(version_split) > 1 else ['', ]
|
||||
version_detail = (
|
||||
version_split[1].split(':', 1) if len(version_split) > 1 else ['']
|
||||
)
|
||||
new_data = {
|
||||
"version": version_split[0].strip(),
|
||||
"date": version_detail[0].strip(),
|
||||
"gh": version_detail[1].strip() if len(version_detail) > 1 else None,
|
||||
"text": data[1:],
|
||||
"latest": False,
|
||||
'version': version_split[0].strip(),
|
||||
'date': version_detail[0].strip(),
|
||||
'gh': version_detail[1].strip() if len(version_detail) > 1 else None,
|
||||
'text': data[1:],
|
||||
'latest': False,
|
||||
}
|
||||
version_data[new_data["version"]] = new_data
|
||||
version_data[new_data['version']] = new_data
|
||||
return version_data
|
||||
|
||||
|
||||
@@ -183,7 +187,7 @@ def inventreeApiText(versions: int = 10, start_version: int = 0):
|
||||
start_version = INVENTREE_API_VERSION - versions
|
||||
|
||||
return {
|
||||
f"v{a}": version_data.get(f"v{a}", None)
|
||||
f'v{a}': version_data.get(f'v{a}', None)
|
||||
for a in range(start_version, start_version + versions)
|
||||
}
|
||||
|
||||
@@ -194,7 +198,7 @@ def inventreeDjangoVersion():
|
||||
|
||||
|
||||
def inventreePythonVersion():
|
||||
"""Returns the version of python"""
|
||||
"""Returns the version of python."""
|
||||
return sys.version.split(' ')[0]
|
||||
|
||||
|
||||
@@ -222,7 +226,9 @@ def inventreeCommitDate():
|
||||
if main_commit is None:
|
||||
return None
|
||||
|
||||
commit_dt = dt.fromtimestamp(main_commit.commit_time) + td(seconds=main_commit.commit_timezone)
|
||||
commit_dt = dt.fromtimestamp(main_commit.commit_time) + td(
|
||||
seconds=main_commit.commit_timezone
|
||||
)
|
||||
return str(commit_dt.date())
|
||||
|
||||
|
||||
|
||||
@@ -5,8 +5,7 @@ as JSON objects and passing them to modal forms (using jQuery / bootstrap).
|
||||
"""
|
||||
|
||||
from django.contrib.auth import password_validation
|
||||
from django.contrib.auth.mixins import (LoginRequiredMixin,
|
||||
PermissionRequiredMixin)
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.http import HttpResponse, HttpResponseRedirect, JsonResponse
|
||||
from django.shortcuts import redirect
|
||||
@@ -20,8 +19,7 @@ from django.views.generic.base import RedirectView, TemplateView
|
||||
|
||||
from allauth.account.forms import AddEmailForm
|
||||
from allauth.account.models import EmailAddress
|
||||
from allauth.account.views import (EmailView, LoginView,
|
||||
PasswordResetFromKeyView)
|
||||
from allauth.account.views import EmailView, LoginView, PasswordResetFromKeyView
|
||||
from allauth.socialaccount.forms import DisconnectForm
|
||||
from allauth.socialaccount.views import ConnectionsView
|
||||
from djmoney.contrib.exchange.models import ExchangeBackend, Rate
|
||||
@@ -33,7 +31,7 @@ from part.models import PartCategory
|
||||
from users.models import RuleSet, check_user_role
|
||||
|
||||
from .forms import EditUserForm, SetPasswordForm
|
||||
from .helpers import remove_non_printable_characters, strip_html_tags
|
||||
from .helpers import is_ajax, remove_non_printable_characters, strip_html_tags
|
||||
|
||||
|
||||
def auth_request(request):
|
||||
@@ -104,7 +102,6 @@ class InvenTreeRoleMixin(PermissionRequiredMixin):
|
||||
return True
|
||||
|
||||
for required in roles_required:
|
||||
|
||||
(role, permission) = required.split('.')
|
||||
|
||||
if role not in RuleSet.RULESET_NAMES:
|
||||
@@ -138,12 +135,14 @@ class InvenTreeRoleMixin(PermissionRequiredMixin):
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
|
||||
table = f"{app_label}_{model_name}"
|
||||
table = f'{app_label}_{model_name}'
|
||||
|
||||
permission = self.get_permission_class()
|
||||
|
||||
if not permission:
|
||||
raise AttributeError(f"permission_class not defined for {type(self).__name__}")
|
||||
raise AttributeError(
|
||||
f'permission_class not defined for {type(self).__name__}'
|
||||
)
|
||||
|
||||
# Check if the user has the required permission
|
||||
return RuleSet.check_table_permission(user, table, permission)
|
||||
@@ -182,7 +181,6 @@ class InvenTreeRoleMixin(PermissionRequiredMixin):
|
||||
}
|
||||
|
||||
for view_class in permission_map.keys():
|
||||
|
||||
if issubclass(type(self), view_class):
|
||||
return permission_map[view_class]
|
||||
|
||||
@@ -258,7 +256,7 @@ class AjaxMixin(InvenTreeRoleMixin):
|
||||
if not data:
|
||||
data = {}
|
||||
|
||||
if not request.is_ajax():
|
||||
if not is_ajax(request):
|
||||
return HttpResponseRedirect('/')
|
||||
|
||||
if context is None:
|
||||
@@ -282,9 +280,7 @@ class AjaxMixin(InvenTreeRoleMixin):
|
||||
data['title'] = self.get_form_title()
|
||||
|
||||
data['html_form'] = render_to_string(
|
||||
self.ajax_template_name,
|
||||
context,
|
||||
request=request
|
||||
self.ajax_template_name, context, request=request
|
||||
)
|
||||
|
||||
# Custom feedback`data
|
||||
@@ -329,14 +325,16 @@ class AjaxUpdateView(AjaxMixin, UpdateView):
|
||||
"""
|
||||
super(UpdateView, self).get(request, *args, **kwargs)
|
||||
|
||||
return self.renderJsonResponse(request, self.get_form(), context=self.get_context_data())
|
||||
return self.renderJsonResponse(
|
||||
request, self.get_form(), context=self.get_context_data()
|
||||
)
|
||||
|
||||
def save(self, object, form, **kwargs):
|
||||
"""Method for updating the object in the database. Default implementation is very simple, but can be overridden if required.
|
||||
|
||||
Args:
|
||||
object - The current object, to be updated
|
||||
form - The validated form
|
||||
object: The current object, to be updated
|
||||
form: The validated form
|
||||
|
||||
Returns:
|
||||
object instance for supplied form
|
||||
@@ -379,7 +377,6 @@ class AjaxUpdateView(AjaxMixin, UpdateView):
|
||||
data[key] = value
|
||||
|
||||
if valid:
|
||||
|
||||
# Save the updated object to the database
|
||||
self.save(self.object, form)
|
||||
|
||||
@@ -399,8 +396,8 @@ class AjaxUpdateView(AjaxMixin, UpdateView):
|
||||
class EditUserView(AjaxUpdateView):
|
||||
"""View for editing user information."""
|
||||
|
||||
ajax_template_name = "modal_form.html"
|
||||
ajax_form_title = _("Edit User Information")
|
||||
ajax_template_name = 'modal_form.html'
|
||||
ajax_form_title = _('Edit User Information')
|
||||
form_class = EditUserForm
|
||||
|
||||
def get_object(self):
|
||||
@@ -411,8 +408,8 @@ class EditUserView(AjaxUpdateView):
|
||||
class SetPasswordView(AjaxUpdateView):
|
||||
"""View for setting user password."""
|
||||
|
||||
ajax_template_name = "InvenTree/password.html"
|
||||
ajax_form_title = _("Set Password")
|
||||
ajax_template_name = 'InvenTree/password.html'
|
||||
ajax_form_title = _('Set Password')
|
||||
form_class = SetPasswordForm
|
||||
|
||||
def get_object(self):
|
||||
@@ -494,14 +491,14 @@ class SearchView(TemplateView):
|
||||
class DynamicJsView(TemplateView):
|
||||
"""View for returning javacsript files, which instead of being served dynamically, are passed through the django translation engine!"""
|
||||
|
||||
template_name = ""
|
||||
template_name = ''
|
||||
content_type = 'text/javascript'
|
||||
|
||||
|
||||
class SettingsView(TemplateView):
|
||||
"""View for configuring User settings."""
|
||||
|
||||
template_name = "InvenTree/settings/settings.html"
|
||||
template_name = 'InvenTree/settings/settings.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
"""Add data for template."""
|
||||
@@ -509,33 +506,37 @@ class SettingsView(TemplateView):
|
||||
|
||||
ctx['settings'] = common_models.InvenTreeSetting.objects.all().order_by('key')
|
||||
|
||||
ctx["base_currency"] = common_settings.currency_code_default()
|
||||
ctx["currencies"] = common_settings.currency_codes
|
||||
ctx['base_currency'] = common_settings.currency_code_default()
|
||||
ctx['currencies'] = common_settings.currency_codes
|
||||
|
||||
ctx["rates"] = Rate.objects.filter(backend="InvenTreeExchange")
|
||||
ctx['rates'] = Rate.objects.filter(backend='InvenTreeExchange')
|
||||
|
||||
ctx["categories"] = PartCategory.objects.all().order_by('tree_id', 'lft', 'name')
|
||||
ctx['categories'] = PartCategory.objects.all().order_by(
|
||||
'tree_id', 'lft', 'name'
|
||||
)
|
||||
|
||||
# When were the rates last updated?
|
||||
try:
|
||||
backend = ExchangeBackend.objects.filter(name='InvenTreeExchange')
|
||||
if backend.exists():
|
||||
backend = backend.first()
|
||||
ctx["rates_updated"] = backend.last_update
|
||||
ctx['rates_updated'] = backend.last_update
|
||||
except Exception:
|
||||
ctx["rates_updated"] = None
|
||||
ctx['rates_updated'] = None
|
||||
|
||||
# Forms and context for allauth
|
||||
ctx['add_email_form'] = AddEmailForm
|
||||
ctx["can_add_email"] = EmailAddress.objects.can_add_email(self.request.user)
|
||||
ctx['can_add_email'] = EmailAddress.objects.can_add_email(self.request.user)
|
||||
|
||||
# Form and context for allauth social-accounts
|
||||
ctx["request"] = self.request
|
||||
ctx['request'] = self.request
|
||||
ctx['social_form'] = DisconnectForm(request=self.request)
|
||||
|
||||
# user db sessions
|
||||
ctx['session_key'] = self.request.session.session_key
|
||||
ctx['session_list'] = self.request.user.session_set.filter(expire_date__gt=now()).order_by('-last_activity')
|
||||
ctx['session_list'] = self.request.user.session_set.filter(
|
||||
expire_date__gt=now()
|
||||
).order_by('-last_activity')
|
||||
|
||||
return ctx
|
||||
|
||||
@@ -550,20 +551,23 @@ class AllauthOverrides(LoginRequiredMixin):
|
||||
|
||||
class CustomEmailView(AllauthOverrides, EmailView):
|
||||
"""Override of allauths EmailView to always show the settings but leave the functions allow."""
|
||||
success_url = reverse_lazy("settings")
|
||||
|
||||
success_url = reverse_lazy('settings')
|
||||
|
||||
|
||||
class CustomConnectionsView(AllauthOverrides, ConnectionsView):
|
||||
"""Override of allauths ConnectionsView to always show the settings but leave the functions allow."""
|
||||
success_url = reverse_lazy("settings")
|
||||
|
||||
success_url = reverse_lazy('settings')
|
||||
|
||||
|
||||
class CustomPasswordResetFromKeyView(PasswordResetFromKeyView):
|
||||
"""Override of allauths PasswordResetFromKeyView to always show the settings but leave the functions allow."""
|
||||
success_url = reverse_lazy("account_login")
|
||||
|
||||
success_url = reverse_lazy('account_login')
|
||||
|
||||
|
||||
class UserSessionOverride():
|
||||
class UserSessionOverride:
|
||||
"""Overrides sucessurl to lead to settings."""
|
||||
|
||||
def get_success_url(self):
|
||||
@@ -573,11 +577,13 @@ class UserSessionOverride():
|
||||
|
||||
class CustomSessionDeleteView(UserSessionOverride, SessionDeleteView):
|
||||
"""Revert to settings after session delete."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class CustomSessionDeleteOtherView(UserSessionOverride, SessionDeleteOtherView):
|
||||
"""Revert to settings after session delete."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
@@ -607,7 +613,9 @@ class AppearanceSelectView(RedirectView):
|
||||
def get_user_theme(self):
|
||||
"""Get current user color theme."""
|
||||
try:
|
||||
user_theme = common_models.ColorTheme.objects.filter(user=self.request.user).get()
|
||||
user_theme = common_models.ColorTheme.objects.filter(
|
||||
user=self.request.user
|
||||
).get()
|
||||
except common_models.ColorTheme.DoesNotExist:
|
||||
user_theme = None
|
||||
|
||||
@@ -638,18 +646,18 @@ class AppearanceSelectView(RedirectView):
|
||||
class DatabaseStatsView(AjaxView):
|
||||
"""View for displaying database statistics."""
|
||||
|
||||
ajax_template_name = "stats.html"
|
||||
ajax_form_title = _("System Information")
|
||||
ajax_template_name = 'stats.html'
|
||||
ajax_form_title = _('System Information')
|
||||
|
||||
|
||||
class AboutView(AjaxView):
|
||||
"""A view for displaying InvenTree version information"""
|
||||
"""A view for displaying InvenTree version information."""
|
||||
|
||||
ajax_template_name = "about.html"
|
||||
ajax_form_title = _("About InvenTree")
|
||||
ajax_template_name = 'about.html'
|
||||
ajax_form_title = _('About InvenTree')
|
||||
|
||||
|
||||
class NotificationsView(TemplateView):
|
||||
"""View for showing notifications."""
|
||||
|
||||
template_name = "InvenTree/notifications/notifications.html"
|
||||
template_name = 'InvenTree/notifications/notifications.html'
|
||||
|
||||
@@ -10,6 +10,8 @@ import os # pragma: no cover
|
||||
|
||||
from django.core.wsgi import get_wsgi_application # pragma: no cover
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "InvenTree.settings") # pragma: no cover
|
||||
os.environ.setdefault(
|
||||
'DJANGO_SETTINGS_MODULE', 'InvenTree.settings'
|
||||
) # pragma: no cover
|
||||
|
||||
application = get_wsgi_application() # pragma: no cover
|
||||
|
||||
8
InvenTree/_testfolder/.gitignore
vendored
Normal file
8
InvenTree/_testfolder/.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
# Files used for testing
|
||||
dummy_image.*
|
||||
_tmp.csv
|
||||
part_image_123abc.png
|
||||
label.pdf
|
||||
label.png
|
||||
my_special*
|
||||
_tests*.txt
|
||||
@@ -51,6 +51,7 @@ class BuildResource(InvenTreeResource):
|
||||
notes = Field(attribute='notes')
|
||||
|
||||
|
||||
@admin.register(Build)
|
||||
class BuildAdmin(ImportExportModelAdmin):
|
||||
"""Class for managing the Build model via the admin interface"""
|
||||
|
||||
@@ -83,8 +84,9 @@ class BuildAdmin(ImportExportModelAdmin):
|
||||
]
|
||||
|
||||
|
||||
@admin.register(BuildItem)
|
||||
class BuildItemAdmin(admin.ModelAdmin):
|
||||
"""Class for managing the BuildItem model via the admin interface"""
|
||||
"""Class for managing the BuildItem model via the admin interface."""
|
||||
|
||||
list_display = (
|
||||
'stock_item',
|
||||
@@ -98,6 +100,7 @@ class BuildItemAdmin(admin.ModelAdmin):
|
||||
]
|
||||
|
||||
|
||||
@admin.register(BuildLine)
|
||||
class BuildLineAdmin(admin.ModelAdmin):
|
||||
"""Class for managing the BuildLine model via the admin interface"""
|
||||
|
||||
@@ -112,8 +115,3 @@ class BuildLineAdmin(admin.ModelAdmin):
|
||||
'build__reference',
|
||||
'bom_item__sub_part__name',
|
||||
]
|
||||
|
||||
|
||||
admin.site.register(Build, BuildAdmin)
|
||||
admin.site.register(BuildItem, BuildItemAdmin)
|
||||
admin.site.register(BuildLine, BuildLineAdmin)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""JSON API for the Build app."""
|
||||
|
||||
from django.db.models import F, Q
|
||||
from django.urls import include, path, re_path
|
||||
from django.urls import include, path
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
@@ -314,11 +314,21 @@ class BuildLineEndpoint:
|
||||
queryset = BuildLine.objects.all()
|
||||
serializer_class = build.serializers.BuildLineSerializer
|
||||
|
||||
def get_source_build(self) -> Build:
|
||||
"""Return the source Build object for the BuildLine queryset.
|
||||
|
||||
This source build is used to filter the available stock for each BuildLine.
|
||||
|
||||
- If this is a "detail" view, use the build associated with the line
|
||||
- If this is a "list" view, use the build associated with the request
|
||||
"""
|
||||
raise NotImplementedError("get_source_build must be implemented in the child class")
|
||||
|
||||
def get_queryset(self):
|
||||
"""Override queryset to select-related and annotate"""
|
||||
queryset = super().get_queryset()
|
||||
|
||||
queryset = build.serializers.BuildLineSerializer.annotate_queryset(queryset)
|
||||
source_build = self.get_source_build()
|
||||
queryset = build.serializers.BuildLineSerializer.annotate_queryset(queryset, build=source_build)
|
||||
|
||||
return queryset
|
||||
|
||||
@@ -353,10 +363,26 @@ class BuildLineList(BuildLineEndpoint, ListCreateAPI):
|
||||
'bom_item__reference',
|
||||
]
|
||||
|
||||
def get_source_build(self) -> Build:
|
||||
"""Return the target build for the BuildLine queryset."""
|
||||
|
||||
try:
|
||||
build_id = self.request.query_params.get('build', None)
|
||||
if build_id:
|
||||
build = Build.objects.get(pk=build_id)
|
||||
return build
|
||||
except (Build.DoesNotExist, AttributeError, ValueError):
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
class BuildLineDetail(BuildLineEndpoint, RetrieveUpdateDestroyAPI):
|
||||
"""API endpoint for detail view of a BuildLine object."""
|
||||
pass
|
||||
|
||||
def get_source_build(self) -> Build:
|
||||
"""Return the target source location for the BuildLine queryset."""
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class BuildOrderContextMixin:
|
||||
@@ -587,44 +613,44 @@ class BuildAttachmentDetail(AttachmentMixin, RetrieveUpdateDestroyAPI):
|
||||
build_api_urls = [
|
||||
|
||||
# Attachments
|
||||
re_path(r'^attachment/', include([
|
||||
path(r'<int:pk>/', BuildAttachmentDetail.as_view(), name='api-build-attachment-detail'),
|
||||
re_path(r'^.*$', BuildAttachmentList.as_view(), name='api-build-attachment-list'),
|
||||
path('attachment/', include([
|
||||
path('<int:pk>/', BuildAttachmentDetail.as_view(), name='api-build-attachment-detail'),
|
||||
path('', BuildAttachmentList.as_view(), name='api-build-attachment-list'),
|
||||
])),
|
||||
|
||||
# Build lines
|
||||
re_path(r'^line/', include([
|
||||
path(r'<int:pk>/', BuildLineDetail.as_view(), name='api-build-line-detail'),
|
||||
re_path(r'^.*$', BuildLineList.as_view(), name='api-build-line-list'),
|
||||
path('line/', include([
|
||||
path('<int:pk>/', BuildLineDetail.as_view(), name='api-build-line-detail'),
|
||||
path('', BuildLineList.as_view(), name='api-build-line-list'),
|
||||
])),
|
||||
|
||||
# Build Items
|
||||
re_path(r'^item/', include([
|
||||
path(r'<int:pk>/', include([
|
||||
re_path(r'^metadata/', MetadataView.as_view(), {'model': BuildItem}, name='api-build-item-metadata'),
|
||||
re_path(r'^.*$', BuildItemDetail.as_view(), name='api-build-item-detail'),
|
||||
path('item/', include([
|
||||
path('<int:pk>/', include([
|
||||
path('metadata/', MetadataView.as_view(), {'model': BuildItem}, name='api-build-item-metadata'),
|
||||
path('', BuildItemDetail.as_view(), name='api-build-item-detail'),
|
||||
])),
|
||||
re_path(r'^.*$', BuildItemList.as_view(), name='api-build-item-list'),
|
||||
path('', BuildItemList.as_view(), name='api-build-item-list'),
|
||||
])),
|
||||
|
||||
# Build Detail
|
||||
path(r'<int:pk>/', include([
|
||||
re_path(r'^allocate/', BuildAllocate.as_view(), name='api-build-allocate'),
|
||||
re_path(r'^auto-allocate/', BuildAutoAllocate.as_view(), name='api-build-auto-allocate'),
|
||||
re_path(r'^complete/', BuildOutputComplete.as_view(), name='api-build-output-complete'),
|
||||
re_path(r'^create-output/', BuildOutputCreate.as_view(), name='api-build-output-create'),
|
||||
re_path(r'^delete-outputs/', BuildOutputDelete.as_view(), name='api-build-output-delete'),
|
||||
re_path(r'^scrap-outputs/', BuildOutputScrap.as_view(), name='api-build-output-scrap'),
|
||||
re_path(r'^finish/', BuildFinish.as_view(), name='api-build-finish'),
|
||||
re_path(r'^cancel/', BuildCancel.as_view(), name='api-build-cancel'),
|
||||
re_path(r'^unallocate/', BuildUnallocate.as_view(), name='api-build-unallocate'),
|
||||
re_path(r'^metadata/', MetadataView.as_view(), {'model': Build}, name='api-build-metadata'),
|
||||
re_path(r'^.*$', BuildDetail.as_view(), name='api-build-detail'),
|
||||
path('<int:pk>/', include([
|
||||
path('allocate/', BuildAllocate.as_view(), name='api-build-allocate'),
|
||||
path('auto-allocate/', BuildAutoAllocate.as_view(), name='api-build-auto-allocate'),
|
||||
path('complete/', BuildOutputComplete.as_view(), name='api-build-output-complete'),
|
||||
path('create-output/', BuildOutputCreate.as_view(), name='api-build-output-create'),
|
||||
path('delete-outputs/', BuildOutputDelete.as_view(), name='api-build-output-delete'),
|
||||
path('scrap-outputs/', BuildOutputScrap.as_view(), name='api-build-output-scrap'),
|
||||
path('finish/', BuildFinish.as_view(), name='api-build-finish'),
|
||||
path('cancel/', BuildCancel.as_view(), name='api-build-cancel'),
|
||||
path('unallocate/', BuildUnallocate.as_view(), name='api-build-unallocate'),
|
||||
path('metadata/', MetadataView.as_view(), {'model': Build}, name='api-build-metadata'),
|
||||
path('', BuildDetail.as_view(), name='api-build-detail'),
|
||||
])),
|
||||
|
||||
# Build order status code information
|
||||
re_path(r'status/', StatusView.as_view(), {StatusView.MODEL_REF: BuildStatus}, name='api-build-status-codes'),
|
||||
path('status/', StatusView.as_view(), {StatusView.MODEL_REF: BuildStatus}, name='api-build-status-codes'),
|
||||
|
||||
# Build List
|
||||
re_path(r'^.*$', BuildList.as_view(), name='api-build-list'),
|
||||
path('', BuildList.as_view(), name='api-build-list'),
|
||||
]
|
||||
|
||||
@@ -3,12 +3,6 @@
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import mptt.fields
|
||||
from build.models import Build
|
||||
|
||||
|
||||
def update_tree(apps, schema_editor):
|
||||
# Update the Build MPTT model
|
||||
Build.objects.rebuild()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@@ -49,5 +43,4 @@ class Migration(migrations.Migration):
|
||||
field=models.PositiveIntegerField(db_index=True, default=0, editable=False),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.RunPython(update_tree, reverse_code=migrations.RunPython.noop),
|
||||
]
|
||||
|
||||
@@ -57,6 +57,4 @@ class Migration(migrations.Migration):
|
||||
('build', '0028_builditem_bom_item'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(assign_bom_items, reverse_code=migrations.RunPython.noop),
|
||||
]
|
||||
operations = []
|
||||
|
||||
@@ -4,6 +4,7 @@ import decimal
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from django.conf import settings
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ValidationError
|
||||
@@ -28,7 +29,6 @@ from build.validators import generate_next_build_reference, validate_build_order
|
||||
import InvenTree.fields
|
||||
import InvenTree.helpers
|
||||
import InvenTree.helpers_model
|
||||
import InvenTree.mixins
|
||||
import InvenTree.models
|
||||
import InvenTree.ready
|
||||
import InvenTree.tasks
|
||||
@@ -45,7 +45,7 @@ import users.models
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Build(MPTTModel, InvenTree.mixins.DiffMixin, InvenTree.models.InvenTreeBarcodeMixin, InvenTree.models.InvenTreeNotesMixin, InvenTree.models.MetadataMixin, InvenTree.models.ReferenceIndexingMixin):
|
||||
class Build(InvenTree.models.InvenTreeBarcodeMixin, InvenTree.models.InvenTreeNotesMixin, InvenTree.models.MetadataMixin, InvenTree.models.PluginValidationMixin, InvenTree.models.ReferenceIndexingMixin, MPTTModel):
|
||||
"""A Build object organises the creation of new StockItem objects from other existing StockItem objects.
|
||||
|
||||
Attributes:
|
||||
@@ -162,7 +162,9 @@ class Build(MPTTModel, InvenTree.mixins.DiffMixin, InvenTree.models.InvenTreeBar
|
||||
|
||||
def get_absolute_url(self):
|
||||
"""Return the web URL associated with this BuildOrder"""
|
||||
return reverse('build-detail', kwargs={'pk': self.id})
|
||||
if settings.ENABLE_CLASSIC_FRONTEND:
|
||||
return reverse('build-detail', kwargs={'pk': self.id})
|
||||
return InvenTree.helpers.pui_url(f'/build/{self.id}')
|
||||
|
||||
reference = models.CharField(
|
||||
unique=True,
|
||||
@@ -916,6 +918,11 @@ class Build(MPTTModel, InvenTree.mixins.DiffMixin, InvenTree.models.InvenTreeBar
|
||||
# List the allocated BuildItem objects for the given output
|
||||
allocated_items = output.items_to_install.all()
|
||||
|
||||
if (common.settings.prevent_build_output_complete_on_incompleted_tests() and output.hasRequiredTests() and not output.passedAllRequiredTests()):
|
||||
serial = output.serial
|
||||
raise ValidationError(
|
||||
_(f"Build output {serial} has not passed all required tests"))
|
||||
|
||||
for build_item in allocated_items:
|
||||
# Complete the allocation of stock for that item
|
||||
build_item.complete_allocation(user)
|
||||
@@ -1247,7 +1254,7 @@ class BuildOrderAttachment(InvenTree.models.InvenTreeAttachment):
|
||||
build = models.ForeignKey(Build, on_delete=models.CASCADE, related_name='attachments')
|
||||
|
||||
|
||||
class BuildLine(models.Model):
|
||||
class BuildLine(InvenTree.models.InvenTreeModel):
|
||||
"""A BuildLine object links a BOMItem to a Build.
|
||||
|
||||
When a new Build is created, the BuildLine objects are created automatically.
|
||||
@@ -1326,7 +1333,7 @@ class BuildLine(models.Model):
|
||||
return self.allocated_quantity() > self.quantity
|
||||
|
||||
|
||||
class BuildItem(InvenTree.models.MetadataMixin, models.Model):
|
||||
class BuildItem(InvenTree.models.InvenTreeMetadataModel):
|
||||
"""A BuildItem links multiple StockItem objects to a Build.
|
||||
|
||||
These are used to allocate part stock to a build. Once the Build is completed, the parts are removed from stock and the BuildItemAllocation objects are removed.
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""JSON serializers for Build API."""
|
||||
|
||||
from decimal import Decimal
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -7,22 +9,25 @@ from django.utils.translation import gettext_lazy as _
|
||||
from django.db import models
|
||||
from django.db.models import ExpressionWrapper, F, FloatField
|
||||
from django.db.models import Case, Sum, When, Value
|
||||
from django.db.models import BooleanField
|
||||
from django.db.models import BooleanField, Q
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
from rest_framework import serializers
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from sql_util.utils import SubquerySum
|
||||
|
||||
from InvenTree.serializers import InvenTreeModelSerializer, InvenTreeAttachmentSerializer
|
||||
from InvenTree.serializers import UserSerializer
|
||||
|
||||
import InvenTree.helpers
|
||||
from InvenTree.serializers import InvenTreeDecimalField
|
||||
from InvenTree.status_codes import StockStatus
|
||||
from InvenTree.status_codes import BuildStatusGroups, StockStatus
|
||||
|
||||
from stock.models import generate_batch_code, StockItem, StockLocation
|
||||
from stock.serializers import StockItemSerializerBrief, LocationSerializer
|
||||
|
||||
import common.models
|
||||
from common.serializers import ProjectCodeSerializer
|
||||
import part.filters
|
||||
from part.serializers import BomItemSerializer, PartSerializer, PartBriefSerializer
|
||||
@@ -519,6 +524,17 @@ class BuildOutputCompleteSerializer(serializers.Serializer):
|
||||
|
||||
outputs = data.get('outputs', [])
|
||||
|
||||
if common.settings.prevent_build_output_complete_on_incompleted_tests():
|
||||
errors = []
|
||||
for output in outputs:
|
||||
stock_item = output['output']
|
||||
if stock_item.hasRequiredTests() and not stock_item.passedAllRequiredTests():
|
||||
serial = stock_item.serial
|
||||
errors.append(_(f"Build output {serial} has not passed all required tests"))
|
||||
|
||||
if errors:
|
||||
raise ValidationError(errors)
|
||||
|
||||
if len(outputs) == 0:
|
||||
raise ValidationError(_("A list of build outputs must be provided"))
|
||||
|
||||
@@ -904,18 +920,24 @@ class BuildAllocationSerializer(serializers.Serializer):
|
||||
if build_line.bom_item.consumable:
|
||||
continue
|
||||
|
||||
params = {
|
||||
"build_line": build_line,
|
||||
"stock_item": stock_item,
|
||||
"install_into": output,
|
||||
}
|
||||
|
||||
try:
|
||||
# Create a new BuildItem to allocate stock
|
||||
build_item, created = BuildItem.objects.get_or_create(
|
||||
build_line=build_line,
|
||||
stock_item=stock_item,
|
||||
install_into=output,
|
||||
)
|
||||
if created:
|
||||
build_item.quantity = quantity
|
||||
else:
|
||||
if build_item := BuildItem.objects.filter(**params).first():
|
||||
# Find an existing BuildItem for this stock item
|
||||
# If it exists, increase the quantity
|
||||
build_item.quantity += quantity
|
||||
build_item.save()
|
||||
build_item.save()
|
||||
else:
|
||||
# Create a new BuildItem to allocate stock
|
||||
build_item = BuildItem.objects.create(
|
||||
quantity=quantity,
|
||||
**params
|
||||
)
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
# Catch model errors and re-throw as DRF errors
|
||||
raise ValidationError(detail=serializers.as_serializer_error(exc))
|
||||
@@ -1019,7 +1041,7 @@ class BuildItemSerializer(InvenTreeModelSerializer):
|
||||
"""Determine which extra details fields should be included"""
|
||||
part_detail = kwargs.pop('part_detail', True)
|
||||
location_detail = kwargs.pop('location_detail', True)
|
||||
stock_detail = kwargs.pop('stock_detail', False)
|
||||
stock_detail = kwargs.pop('stock_detail', True)
|
||||
build_detail = kwargs.pop('build_detail', False)
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -1055,11 +1077,13 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
|
||||
# Annotated fields
|
||||
'allocated',
|
||||
'in_production',
|
||||
'on_order',
|
||||
'available_stock',
|
||||
'available_substitute_stock',
|
||||
'available_variant_stock',
|
||||
'total_available_stock',
|
||||
'external_stock',
|
||||
]
|
||||
|
||||
read_only_fields = [
|
||||
@@ -1070,26 +1094,54 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
|
||||
quantity = serializers.FloatField()
|
||||
|
||||
bom_item = serializers.PrimaryKeyRelatedField(label=_('BOM Item'), read_only=True)
|
||||
|
||||
# Foreign key fields
|
||||
bom_item_detail = BomItemSerializer(source='bom_item', many=False, read_only=True, pricing=False)
|
||||
part_detail = PartSerializer(source='bom_item.sub_part', many=False, read_only=True, pricing=False)
|
||||
allocations = BuildItemSerializer(many=True, read_only=True)
|
||||
|
||||
# Annotated (calculated) fields
|
||||
allocated = serializers.FloatField(read_only=True)
|
||||
on_order = serializers.FloatField(read_only=True)
|
||||
available_stock = serializers.FloatField(read_only=True)
|
||||
allocated = serializers.FloatField(
|
||||
label=_('Allocated Stock'),
|
||||
read_only=True
|
||||
)
|
||||
|
||||
on_order = serializers.FloatField(
|
||||
label=_('On Order'),
|
||||
read_only=True
|
||||
)
|
||||
|
||||
in_production = serializers.FloatField(
|
||||
label=_('In Production'),
|
||||
read_only=True
|
||||
)
|
||||
|
||||
available_stock = serializers.FloatField(
|
||||
label=_('Available Stock'),
|
||||
read_only=True
|
||||
)
|
||||
|
||||
available_substitute_stock = serializers.FloatField(read_only=True)
|
||||
available_variant_stock = serializers.FloatField(read_only=True)
|
||||
total_available_stock = serializers.FloatField(read_only=True)
|
||||
external_stock = serializers.FloatField(read_only=True)
|
||||
|
||||
@staticmethod
|
||||
def annotate_queryset(queryset):
|
||||
def annotate_queryset(queryset, build=None):
|
||||
"""Add extra annotations to the queryset:
|
||||
|
||||
- allocated: Total stock quantity allocated against this build line
|
||||
- available: Total stock available for allocation against this build line
|
||||
- on_order: Total stock on order for this build line
|
||||
- in_production: Total stock currently in production for this build line
|
||||
|
||||
Arguments:
|
||||
queryset: The queryset to annotate
|
||||
build: The build order to filter against (optional)
|
||||
|
||||
Note: If the 'build' is provided, we can use it to filter available stock, depending on the specified location for the build
|
||||
|
||||
"""
|
||||
queryset = queryset.select_related(
|
||||
'build', 'bom_item',
|
||||
@@ -1126,6 +1178,23 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
|
||||
ref = 'bom_item__sub_part__'
|
||||
|
||||
stock_filter = None
|
||||
|
||||
if build is not None and build.take_from is not None:
|
||||
location = build.take_from
|
||||
# Filter by locations below the specified location
|
||||
stock_filter = Q(
|
||||
location__tree_id=location.tree_id,
|
||||
location__lft__gte=location.lft,
|
||||
location__rght__lte=location.rght,
|
||||
location__level__gte=location.level,
|
||||
)
|
||||
|
||||
# Annotate the "in_production" quantity
|
||||
queryset = queryset.annotate(
|
||||
in_production=part.filters.annotate_in_production_quantity(reference=ref)
|
||||
)
|
||||
|
||||
# Annotate the "on_order" quantity
|
||||
# Difficulty: Medium
|
||||
queryset = queryset.annotate(
|
||||
@@ -1133,10 +1202,8 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
)
|
||||
|
||||
# Annotate the "available" quantity
|
||||
# TODO: In the future, this should be refactored.
|
||||
# TODO: Note that part.serializers.BomItemSerializer also has a similar annotation
|
||||
queryset = queryset.alias(
|
||||
total_stock=part.filters.annotate_total_stock(reference=ref),
|
||||
total_stock=part.filters.annotate_total_stock(reference=ref, filter=stock_filter),
|
||||
allocated_to_sales_orders=part.filters.annotate_sales_order_allocations(reference=ref),
|
||||
allocated_to_build_orders=part.filters.annotate_build_order_allocations(reference=ref),
|
||||
)
|
||||
@@ -1149,11 +1216,21 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
)
|
||||
)
|
||||
|
||||
external_stock_filter = Q(location__external=True)
|
||||
|
||||
if stock_filter:
|
||||
external_stock_filter &= stock_filter
|
||||
|
||||
# Add 'external stock' annotations
|
||||
queryset = queryset.annotate(
|
||||
external_stock=part.filters.annotate_total_stock(reference=ref, filter=external_stock_filter)
|
||||
)
|
||||
|
||||
ref = 'bom_item__substitutes__part__'
|
||||
|
||||
# Extract similar information for any 'substitute' parts
|
||||
queryset = queryset.alias(
|
||||
substitute_stock=part.filters.annotate_total_stock(reference=ref),
|
||||
substitute_stock=part.filters.annotate_total_stock(reference=ref, filter=stock_filter),
|
||||
substitute_build_allocations=part.filters.annotate_build_order_allocations(reference=ref),
|
||||
substitute_sales_allocations=part.filters.annotate_sales_order_allocations(reference=ref)
|
||||
)
|
||||
@@ -1167,7 +1244,7 @@ class BuildLineSerializer(InvenTreeModelSerializer):
|
||||
)
|
||||
|
||||
# Annotate the queryset with 'available variant stock' information
|
||||
variant_stock_query = part.filters.variant_stock_query(reference='bom_item__sub_part__')
|
||||
variant_stock_query = part.filters.variant_stock_query(reference='bom_item__sub_part__', filter=stock_filter)
|
||||
|
||||
queryset = queryset.alias(
|
||||
variant_stock_total=part.filters.annotate_variant_quantity(variant_stock_query, reference='quantity'),
|
||||
|
||||
@@ -270,7 +270,7 @@ src="{% static 'img/blank_image.png' %}"
|
||||
'{% url "api-build-detail" build.pk %}',
|
||||
{
|
||||
method: 'DELETE',
|
||||
title: '{% trans "Delete Build Order" %}',
|
||||
title: '{% trans "Delete Build Order" escape %}',
|
||||
redirect: "{% url 'build-index' %}",
|
||||
}
|
||||
);
|
||||
@@ -280,7 +280,7 @@ src="{% static 'img/blank_image.png' %}"
|
||||
<!-- Barcode functionality callbacks -->
|
||||
$('#show-qr-code').click(function() {
|
||||
showQRDialog(
|
||||
'{% trans "Build Order QR Code" %}',
|
||||
'{% trans "Build Order QR Code" escape %}',
|
||||
'{"build": {{ build.pk }} }'
|
||||
);
|
||||
});
|
||||
@@ -292,7 +292,7 @@ src="{% static 'img/blank_image.png' %}"
|
||||
build: {{ build.pk }},
|
||||
},
|
||||
{
|
||||
title: '{% trans "Link Barcode to Build Order" %}',
|
||||
title: '{% trans "Link Barcode to Build Order" escape %}',
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
@@ -200,6 +200,11 @@
|
||||
<div id='build-lines-toolbar'>
|
||||
{% include "filter_list.html" with id='buildlines' %}
|
||||
</div>
|
||||
{% if build.take_from %}
|
||||
<div class='alert alert-block alert-info'>
|
||||
{% trans "Available stock has been filtered based on specified source location for this build order" %}
|
||||
</div>
|
||||
{% endif %}
|
||||
<table class='table table-striped table-condensed' id='build-lines-table' data-toolbar='#build-lines-toolbar'></table>
|
||||
</div>
|
||||
</div>
|
||||
@@ -373,7 +378,14 @@ onPanelLoad('allocate', function() {
|
||||
loadBuildLineTable(
|
||||
"#build-lines-table",
|
||||
{{ build.pk }},
|
||||
{}
|
||||
{
|
||||
{% if build.take_from %}
|
||||
location: {{ build.take_from.pk }},
|
||||
{% endif %}
|
||||
{% if build.project_code %}
|
||||
project_code: {{ build.project_code.pk }},
|
||||
{% endif %}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
@@ -419,8 +431,8 @@ function allocateSelectedLines() {
|
||||
|
||||
if (unallocated_lines.length == 0) {
|
||||
showAlertDialog(
|
||||
'{% trans "Allocation Complete" %}',
|
||||
'{% trans "All lines have been fully allocated" %}',
|
||||
'{% trans "Allocation Complete" escape %}',
|
||||
'{% trans "All lines have been fully allocated" escape %}',
|
||||
);
|
||||
} else {
|
||||
|
||||
|
||||
@@ -822,6 +822,58 @@ class BuildAllocationTest(BuildAPITest):
|
||||
allocation.refresh_from_db()
|
||||
self.assertEqual(allocation.quantity, 5000)
|
||||
|
||||
def test_fractional_allocation(self):
|
||||
"""Test allocation of a fractional quantity of stock items.
|
||||
|
||||
Ref: https://github.com/inventree/InvenTree/issues/6508
|
||||
"""
|
||||
|
||||
si = StockItem.objects.get(pk=2)
|
||||
|
||||
# Find line item
|
||||
line = self.build.build_lines.all().filter(bom_item__sub_part=si.part).first()
|
||||
|
||||
# Test a fractional quantity when the *available* quantity is greater than 1
|
||||
si.quantity = 100
|
||||
si.save()
|
||||
|
||||
response = self.post(
|
||||
self.url,
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"build_line": line.pk,
|
||||
"stock_item": si.pk,
|
||||
"quantity": 0.1616,
|
||||
}
|
||||
]
|
||||
},
|
||||
expected_code=201
|
||||
)
|
||||
|
||||
# Test a fractional quantity when the *available* quantity is less than 1
|
||||
si = StockItem.objects.create(
|
||||
part=si.part,
|
||||
quantity=0.3159,
|
||||
tree_id=0,
|
||||
level=0,
|
||||
lft=0, rght=0
|
||||
)
|
||||
|
||||
response = self.post(
|
||||
self.url,
|
||||
{
|
||||
"items": [
|
||||
{
|
||||
"build_line": line.pk,
|
||||
"stock_item": si.pk,
|
||||
"quantity": 0.1616,
|
||||
}
|
||||
]
|
||||
},
|
||||
expected_code=201,
|
||||
)
|
||||
|
||||
|
||||
class BuildOverallocationTest(BuildAPITest):
|
||||
"""Unit tests for over allocation of stock items against a build order.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Unit tests for the 'build' models"""
|
||||
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from django.test import TestCase
|
||||
@@ -14,8 +14,8 @@ from InvenTree import status_codes as status
|
||||
import common.models
|
||||
import build.tasks
|
||||
from build.models import Build, BuildItem, BuildLine, generate_next_build_reference
|
||||
from part.models import Part, BomItem, BomItemSubstitute
|
||||
from stock.models import StockItem
|
||||
from part.models import Part, BomItem, BomItemSubstitute, PartTestTemplate
|
||||
from stock.models import StockItem, StockItemTestResult
|
||||
from users.models import Owner
|
||||
|
||||
import logging
|
||||
@@ -55,6 +55,76 @@ class BuildTestBase(TestCase):
|
||||
trackable=True,
|
||||
)
|
||||
|
||||
# create one build with one required test template
|
||||
cls.tested_part_with_required_test = Part.objects.create(
|
||||
name="Part having required tests",
|
||||
description="Why does it matter what my description is?",
|
||||
assembly=True,
|
||||
trackable=True,
|
||||
)
|
||||
|
||||
cls.test_template_required = PartTestTemplate.objects.create(
|
||||
part=cls.tested_part_with_required_test,
|
||||
test_name="Required test",
|
||||
description="Required test template description",
|
||||
required=True,
|
||||
requires_value=False,
|
||||
requires_attachment=False
|
||||
)
|
||||
|
||||
ref = generate_next_build_reference()
|
||||
|
||||
cls.build_w_tests_trackable = Build.objects.create(
|
||||
reference=ref,
|
||||
title="This is a build",
|
||||
part=cls.tested_part_with_required_test,
|
||||
quantity=1,
|
||||
issued_by=get_user_model().objects.get(pk=1),
|
||||
)
|
||||
|
||||
cls.stockitem_with_required_test = StockItem.objects.create(
|
||||
part=cls.tested_part_with_required_test,
|
||||
quantity=1,
|
||||
is_building=True,
|
||||
serial=uuid.uuid4(),
|
||||
build=cls.build_w_tests_trackable
|
||||
)
|
||||
|
||||
# now create a part with a non-required test template
|
||||
cls.tested_part_wo_required_test = Part.objects.create(
|
||||
name="Part with one non.required test",
|
||||
description="Why does it matter what my description is?",
|
||||
assembly=True,
|
||||
trackable=True,
|
||||
)
|
||||
|
||||
cls.test_template_non_required = PartTestTemplate.objects.create(
|
||||
part=cls.tested_part_wo_required_test,
|
||||
test_name="Required test template",
|
||||
description="Required test template description",
|
||||
required=False,
|
||||
requires_value=False,
|
||||
requires_attachment=False
|
||||
)
|
||||
|
||||
ref = generate_next_build_reference()
|
||||
|
||||
cls.build_wo_tests_trackable = Build.objects.create(
|
||||
reference=ref,
|
||||
title="This is a build",
|
||||
part=cls.tested_part_wo_required_test,
|
||||
quantity=1,
|
||||
issued_by=get_user_model().objects.get(pk=1),
|
||||
)
|
||||
|
||||
cls.stockitem_wo_required_test = StockItem.objects.create(
|
||||
part=cls.tested_part_wo_required_test,
|
||||
quantity=1,
|
||||
is_building=True,
|
||||
serial=uuid.uuid4(),
|
||||
build=cls.build_wo_tests_trackable
|
||||
)
|
||||
|
||||
cls.sub_part_1 = Part.objects.create(
|
||||
name="Widget A",
|
||||
description="A widget",
|
||||
@@ -245,7 +315,7 @@ class BuildTest(BuildTestBase):
|
||||
|
||||
def test_init(self):
|
||||
"""Perform some basic tests before we start the ball rolling"""
|
||||
self.assertEqual(StockItem.objects.count(), 10)
|
||||
self.assertEqual(StockItem.objects.count(), 12)
|
||||
|
||||
# Build is PENDING
|
||||
self.assertEqual(self.build.status, status.BuildStatus.PENDING)
|
||||
@@ -558,7 +628,7 @@ class BuildTest(BuildTestBase):
|
||||
self.assertEqual(BuildItem.objects.count(), 0)
|
||||
|
||||
# New stock items should have been created!
|
||||
self.assertEqual(StockItem.objects.count(), 13)
|
||||
self.assertEqual(StockItem.objects.count(), 15)
|
||||
|
||||
# This stock item has been marked as "consumed"
|
||||
item = StockItem.objects.get(pk=self.stock_1_1.pk)
|
||||
@@ -573,6 +643,27 @@ class BuildTest(BuildTestBase):
|
||||
for output in outputs:
|
||||
self.assertFalse(output.is_building)
|
||||
|
||||
def test_complete_with_required_tests(self):
|
||||
"""Test the prevention completion when a required test is missing feature"""
|
||||
|
||||
# with required tests incompleted the save should fail
|
||||
common.models.InvenTreeSetting.set_setting('PREVENT_BUILD_COMPLETION_HAVING_INCOMPLETED_TESTS', True, change_user=None)
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
self.build_w_tests_trackable.complete_build_output(self.stockitem_with_required_test, None)
|
||||
|
||||
# let's complete the required test and see if it could be saved
|
||||
StockItemTestResult.objects.create(
|
||||
stock_item=self.stockitem_with_required_test,
|
||||
template=self.test_template_required,
|
||||
result=True
|
||||
)
|
||||
|
||||
self.build_w_tests_trackable.complete_build_output(self.stockitem_with_required_test, None)
|
||||
|
||||
# let's see if a non required test could be saved
|
||||
self.build_wo_tests_trackable.complete_build_output(self.stockitem_wo_required_test, None)
|
||||
|
||||
def test_overdue_notification(self):
|
||||
"""Test sending of notifications when a build order is overdue."""
|
||||
self.build.target_date = datetime.now().date() - timedelta(days=1)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Basic unit tests for the BuildOrder app"""
|
||||
|
||||
from django.conf import settings
|
||||
from django.test import tag
|
||||
from django.urls import reverse
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
@@ -40,7 +42,8 @@ class BuildTestSimple(InvenTreeTestCase):
|
||||
def test_url(self):
|
||||
"""Test URL lookup"""
|
||||
b1 = Build.objects.get(pk=1)
|
||||
self.assertEqual(b1.get_absolute_url(), '/build/1/')
|
||||
if settings.ENABLE_CLASSIC_FRONTEND:
|
||||
self.assertEqual(b1.get_absolute_url(), '/build/1/')
|
||||
|
||||
def test_is_complete(self):
|
||||
"""Test build completion status"""
|
||||
@@ -116,11 +119,13 @@ class TestBuildViews(InvenTreeTestCase):
|
||||
is_building=True,
|
||||
)
|
||||
|
||||
@tag('cui')
|
||||
def test_build_index(self):
|
||||
"""Test build index view."""
|
||||
response = self.client.get(reverse('build-index'))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@tag('cui')
|
||||
def test_build_detail(self):
|
||||
"""Test the detail view for a Build object."""
|
||||
pk = 1
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
"""URL lookup for Build app."""
|
||||
|
||||
from django.urls import include, path, re_path
|
||||
from django.urls import include, path
|
||||
|
||||
from . import views
|
||||
|
||||
|
||||
build_urls = [
|
||||
|
||||
path(r'<int:pk>/', include([
|
||||
re_path(r'^.*$', views.BuildDetail.as_view(), name='build-detail'),
|
||||
path('<int:pk>/', include([
|
||||
path('', views.BuildDetail.as_view(), name='build-detail'),
|
||||
])),
|
||||
|
||||
re_path(r'.*$', views.BuildIndex.as_view(), name='build-index'),
|
||||
path('', views.BuildIndex.as_view(), name='build-index'),
|
||||
]
|
||||
|
||||
@@ -22,7 +22,7 @@ class SettingsAdmin(ImportExportModelAdmin):
|
||||
class UserSettingsAdmin(ImportExportModelAdmin):
|
||||
"""Admin settings for InvenTreeUserSetting."""
|
||||
|
||||
list_display = ('key', 'value', 'user', )
|
||||
list_display = ('key', 'value', 'user')
|
||||
|
||||
def get_readonly_fields(self, request, obj=None): # pragma: no cover
|
||||
"""Prevent the 'key' field being edited once the setting is created."""
|
||||
@@ -40,23 +40,31 @@ class WebhookAdmin(ImportExportModelAdmin):
|
||||
class NotificationEntryAdmin(admin.ModelAdmin):
|
||||
"""Admin settings for NotificationEntry."""
|
||||
|
||||
list_display = ('key', 'uid', 'updated', )
|
||||
list_display = ('key', 'uid', 'updated')
|
||||
|
||||
|
||||
class NotificationMessageAdmin(admin.ModelAdmin):
|
||||
"""Admin settings for NotificationMessage."""
|
||||
|
||||
list_display = ('age_human', 'user', 'category', 'name', 'read', 'target_object', 'source_object', )
|
||||
list_display = (
|
||||
'age_human',
|
||||
'user',
|
||||
'category',
|
||||
'name',
|
||||
'read',
|
||||
'target_object',
|
||||
'source_object',
|
||||
)
|
||||
|
||||
list_filter = ('category', 'read', 'user', )
|
||||
list_filter = ('category', 'read', 'user')
|
||||
|
||||
search_fields = ('name', 'category', 'message', )
|
||||
search_fields = ('name', 'category', 'message')
|
||||
|
||||
|
||||
class NewsFeedEntryAdmin(admin.ModelAdmin):
|
||||
"""Admin settings for NewsFeedEntry."""
|
||||
|
||||
list_display = ('title', 'author', 'published', 'summary', )
|
||||
list_display = ('title', 'author', 'published', 'summary')
|
||||
|
||||
|
||||
admin.site.register(common.models.InvenTreeSetting, SettingsAdmin)
|
||||
|
||||
@@ -8,8 +8,11 @@ from django.urls import include, path, re_path
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
import django_q.models
|
||||
from django_q.tasks import async_task
|
||||
from djmoney.contrib.exchange.models import ExchangeBackend, Rate
|
||||
from drf_spectacular.utils import OpenApiResponse, extend_schema
|
||||
from error_report.models import Error
|
||||
from rest_framework import permissions, serializers
|
||||
from rest_framework.exceptions import NotAcceptable, NotFound
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
@@ -23,8 +26,13 @@ from InvenTree.api import BulkDeleteMixin, MetadataView
|
||||
from InvenTree.config import CONFIG_LOOKUPS
|
||||
from InvenTree.filters import ORDER_FILTER, SEARCH_ORDER_FILTER
|
||||
from InvenTree.helpers import inheritors
|
||||
from InvenTree.mixins import (ListAPI, ListCreateAPI, RetrieveAPI,
|
||||
RetrieveUpdateAPI, RetrieveUpdateDestroyAPI)
|
||||
from InvenTree.mixins import (
|
||||
ListAPI,
|
||||
ListCreateAPI,
|
||||
RetrieveAPI,
|
||||
RetrieveUpdateAPI,
|
||||
RetrieveUpdateDestroyAPI,
|
||||
)
|
||||
from InvenTree.permissions import IsStaffOrReadOnly, IsSuperuser
|
||||
from plugin.models import NotificationUserSetting
|
||||
from plugin.serializers import NotificationUserSettingSerializer
|
||||
@@ -41,11 +49,20 @@ class CsrfExemptMixin(object):
|
||||
|
||||
class WebhookView(CsrfExemptMixin, APIView):
|
||||
"""Endpoint for receiving webhooks."""
|
||||
|
||||
authentication_classes = []
|
||||
permission_classes = []
|
||||
model_class = common.models.WebhookEndpoint
|
||||
run_async = False
|
||||
serializer_class = None
|
||||
|
||||
@extend_schema(
|
||||
responses={
|
||||
200: OpenApiResponse(
|
||||
description='Any data can be posted to the endpoint - everything will be passed to the WebhookEndpoint model.'
|
||||
)
|
||||
}
|
||||
)
|
||||
def post(self, request, endpoint, *args, **kwargs):
|
||||
"""Process incoming webhook."""
|
||||
# get webhook definition
|
||||
@@ -66,8 +83,7 @@ class WebhookView(CsrfExemptMixin, APIView):
|
||||
async_task(self._process_payload, message.id)
|
||||
else:
|
||||
self._process_result(
|
||||
self.webhook.process_payload(message, payload, headers),
|
||||
message,
|
||||
self.webhook.process_payload(message, payload, headers), message
|
||||
)
|
||||
|
||||
data = self.webhook.get_return(payload, headers, request)
|
||||
@@ -76,8 +92,7 @@ class WebhookView(CsrfExemptMixin, APIView):
|
||||
def _process_payload(self, message_id):
|
||||
message = common.models.WebhookMessage.objects.get(message_id=message_id)
|
||||
self._process_result(
|
||||
self.webhook.process_payload(message, message.body, message.header),
|
||||
message,
|
||||
self.webhook.process_payload(message, message.body, message.header), message
|
||||
)
|
||||
|
||||
def _process_result(self, result, message):
|
||||
@@ -106,14 +121,13 @@ class WebhookView(CsrfExemptMixin, APIView):
|
||||
|
||||
|
||||
class CurrencyExchangeView(APIView):
|
||||
"""API endpoint for displaying currency information"""
|
||||
"""API endpoint for displaying currency information."""
|
||||
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
]
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
serializer_class = None
|
||||
|
||||
def get(self, request, format=None):
|
||||
"""Return information on available currency conversions"""
|
||||
"""Return information on available currency conversions."""
|
||||
# Extract a list of all available rates
|
||||
try:
|
||||
rates = Rate.objects.all()
|
||||
@@ -133,7 +147,9 @@ class CurrencyExchangeView(APIView):
|
||||
updated = None
|
||||
|
||||
response = {
|
||||
'base_currency': common.models.InvenTreeSetting.get_setting('INVENTREE_DEFAULT_CURRENCY', 'USD'),
|
||||
'base_currency': common.models.InvenTreeSetting.get_setting(
|
||||
'INVENTREE_DEFAULT_CURRENCY', 'USD'
|
||||
),
|
||||
'exchange_rates': {},
|
||||
'updated': updated,
|
||||
}
|
||||
@@ -150,20 +166,16 @@ class CurrencyRefreshView(APIView):
|
||||
User must be a 'staff' user to access this endpoint
|
||||
"""
|
||||
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
permissions.IsAdminUser,
|
||||
]
|
||||
permission_classes = [permissions.IsAuthenticated, permissions.IsAdminUser]
|
||||
serializer_class = None
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
"""Performing a POST request will update currency exchange rates"""
|
||||
"""Performing a POST request will update currency exchange rates."""
|
||||
from InvenTree.tasks import update_exchange_rates
|
||||
|
||||
update_exchange_rates(force=True)
|
||||
|
||||
return Response({
|
||||
'success': 'Exchange rates updated',
|
||||
})
|
||||
return Response({'success': 'Exchange rates updated'})
|
||||
|
||||
|
||||
class SettingsList(ListAPI):
|
||||
@@ -174,25 +186,19 @@ class SettingsList(ListAPI):
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
ordering_fields = [
|
||||
'pk',
|
||||
'key',
|
||||
'name',
|
||||
]
|
||||
ordering_fields = ['pk', 'key', 'name']
|
||||
|
||||
search_fields = [
|
||||
'key',
|
||||
]
|
||||
search_fields = ['key']
|
||||
|
||||
|
||||
class GlobalSettingsList(SettingsList):
|
||||
"""API endpoint for accessing a list of global settings objects."""
|
||||
|
||||
queryset = common.models.InvenTreeSetting.objects.exclude(key__startswith="_")
|
||||
queryset = common.models.InvenTreeSetting.objects.exclude(key__startswith='_')
|
||||
serializer_class = common.serializers.GlobalSettingsSerializer
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
"""Ensure all global settings are created"""
|
||||
"""Ensure all global settings are created."""
|
||||
common.models.InvenTreeSetting.build_default_values()
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
@@ -221,25 +227,24 @@ class GlobalSettingsDetail(RetrieveUpdateAPI):
|
||||
"""
|
||||
|
||||
lookup_field = 'key'
|
||||
queryset = common.models.InvenTreeSetting.objects.exclude(key__startswith="_")
|
||||
queryset = common.models.InvenTreeSetting.objects.exclude(key__startswith='_')
|
||||
serializer_class = common.serializers.GlobalSettingsSerializer
|
||||
|
||||
def get_object(self):
|
||||
"""Attempt to find a global setting object with the provided key."""
|
||||
key = str(self.kwargs['key']).upper()
|
||||
|
||||
if key.startswith('_') or key not in common.models.InvenTreeSetting.SETTINGS.keys():
|
||||
if (
|
||||
key.startswith('_')
|
||||
or key not in common.models.InvenTreeSetting.SETTINGS.keys()
|
||||
):
|
||||
raise NotFound()
|
||||
|
||||
return common.models.InvenTreeSetting.get_setting_object(
|
||||
key,
|
||||
cache=False, create=True
|
||||
key, cache=False, create=True
|
||||
)
|
||||
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
GlobalSettingsPermissions,
|
||||
]
|
||||
permission_classes = [permissions.IsAuthenticated, GlobalSettingsPermissions]
|
||||
|
||||
|
||||
class UserSettingsList(SettingsList):
|
||||
@@ -249,7 +254,7 @@ class UserSettingsList(SettingsList):
|
||||
serializer_class = common.serializers.UserSettingsSerializer
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
"""Ensure all user settings are created"""
|
||||
"""Ensure all user settings are created."""
|
||||
common.models.InvenTreeUserSetting.build_default_values(user=request.user)
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
@@ -294,18 +299,17 @@ class UserSettingsDetail(RetrieveUpdateAPI):
|
||||
"""Attempt to find a user setting object with the provided key."""
|
||||
key = str(self.kwargs['key']).upper()
|
||||
|
||||
if key.startswith('_') or key not in common.models.InvenTreeUserSetting.SETTINGS.keys():
|
||||
if (
|
||||
key.startswith('_')
|
||||
or key not in common.models.InvenTreeUserSetting.SETTINGS.keys()
|
||||
):
|
||||
raise NotFound()
|
||||
|
||||
return common.models.InvenTreeUserSetting.get_setting_object(
|
||||
key,
|
||||
user=self.request.user,
|
||||
cache=False, create=True
|
||||
key, user=self.request.user, cache=False, create=True
|
||||
)
|
||||
|
||||
permission_classes = [
|
||||
UserSettingsPermissions,
|
||||
]
|
||||
permission_classes = [UserSettingsPermissions]
|
||||
|
||||
|
||||
class NotificationUserSettingsList(SettingsList):
|
||||
@@ -334,39 +338,29 @@ class NotificationUserSettingsDetail(RetrieveUpdateAPI):
|
||||
|
||||
queryset = NotificationUserSetting.objects.all()
|
||||
serializer_class = NotificationUserSettingSerializer
|
||||
permission_classes = [UserSettingsPermissions, ]
|
||||
permission_classes = [UserSettingsPermissions]
|
||||
|
||||
|
||||
class NotificationMessageMixin:
|
||||
"""Generic mixin for NotificationMessage."""
|
||||
|
||||
queryset = common.models.NotificationMessage.objects.all()
|
||||
serializer_class = common.serializers.NotificationMessageSerializer
|
||||
permission_classes = [UserSettingsPermissions, ]
|
||||
permission_classes = [UserSettingsPermissions]
|
||||
|
||||
|
||||
class NotificationList(NotificationMessageMixin, BulkDeleteMixin, ListAPI):
|
||||
"""List view for all notifications of the current user."""
|
||||
|
||||
permission_classes = [permissions.IsAuthenticated, ]
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
ordering_fields = [
|
||||
'category',
|
||||
'name',
|
||||
'read',
|
||||
'creation',
|
||||
]
|
||||
ordering_fields = ['category', 'name', 'read', 'creation']
|
||||
|
||||
search_fields = [
|
||||
'name',
|
||||
'message',
|
||||
]
|
||||
search_fields = ['name', 'message']
|
||||
|
||||
filterset_fields = [
|
||||
'category',
|
||||
'read',
|
||||
]
|
||||
filterset_fields = ['category', 'read']
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
"""Only list notifications which apply to the current user."""
|
||||
@@ -380,7 +374,7 @@ class NotificationList(NotificationMessageMixin, BulkDeleteMixin, ListAPI):
|
||||
return queryset
|
||||
|
||||
def filter_delete_queryset(self, queryset, request):
|
||||
"""Ensure that the user can only delete their *own* notifications"""
|
||||
"""Ensure that the user can only delete their *own* notifications."""
|
||||
queryset = queryset.filter(user=request.user)
|
||||
return queryset
|
||||
|
||||
@@ -401,29 +395,29 @@ class NotificationReadAll(NotificationMessageMixin, RetrieveAPI):
|
||||
self.queryset.filter(user=request.user, read=False).update(read=True)
|
||||
return Response({'status': 'ok'})
|
||||
except Exception as exc:
|
||||
raise serializers.ValidationError(detail=serializers.as_serializer_error(exc))
|
||||
raise serializers.ValidationError(
|
||||
detail=serializers.as_serializer_error(exc)
|
||||
)
|
||||
|
||||
|
||||
class NewsFeedMixin:
|
||||
"""Generic mixin for NewsFeedEntry."""
|
||||
|
||||
queryset = common.models.NewsFeedEntry.objects.all()
|
||||
serializer_class = common.serializers.NewsFeedEntrySerializer
|
||||
permission_classes = [IsAdminUser, ]
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
|
||||
class NewsFeedEntryList(NewsFeedMixin, BulkDeleteMixin, ListAPI):
|
||||
"""List view for all news items."""
|
||||
|
||||
filter_backends = ORDER_FILTER
|
||||
|
||||
ordering_fields = [
|
||||
'published',
|
||||
'author',
|
||||
'read',
|
||||
]
|
||||
ordering = '-published'
|
||||
|
||||
filterset_fields = [
|
||||
'read',
|
||||
]
|
||||
ordering_fields = ['published', 'author', 'read']
|
||||
|
||||
filterset_fields = ['read']
|
||||
|
||||
|
||||
class NewsFeedEntryDetail(NewsFeedMixin, RetrieveUpdateDestroyAPI):
|
||||
@@ -435,14 +429,14 @@ class ConfigList(ListAPI):
|
||||
|
||||
queryset = CONFIG_LOOKUPS
|
||||
serializer_class = common.serializers.ConfigSerializer
|
||||
permission_classes = [IsSuperuser, ]
|
||||
permission_classes = [IsSuperuser]
|
||||
|
||||
|
||||
class ConfigDetail(RetrieveAPI):
|
||||
"""Detail view for an individual configuration."""
|
||||
|
||||
serializer_class = common.serializers.ConfigSerializer
|
||||
permission_classes = [IsSuperuser, ]
|
||||
permission_classes = [IsSuperuser]
|
||||
|
||||
def get_object(self):
|
||||
"""Attempt to find a config object with the provided key."""
|
||||
@@ -458,10 +452,10 @@ class NotesImageList(ListCreateAPI):
|
||||
|
||||
queryset = common.models.NotesImage.objects.all()
|
||||
serializer_class = common.serializers.NotesImageSerializer
|
||||
permission_classes = [permissions.IsAuthenticated, ]
|
||||
permission_classes = [permissions.IsAuthenticated]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
"""Create (upload) a new notes image"""
|
||||
"""Create (upload) a new notes image."""
|
||||
image = serializer.save()
|
||||
image.user = self.request.user
|
||||
image.save()
|
||||
@@ -475,18 +469,13 @@ class ProjectCodeList(ListCreateAPI):
|
||||
permission_classes = [permissions.IsAuthenticated, IsStaffOrReadOnly]
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
ordering_fields = [
|
||||
'code',
|
||||
]
|
||||
ordering_fields = ['code']
|
||||
|
||||
search_fields = [
|
||||
'code',
|
||||
'description',
|
||||
]
|
||||
search_fields = ['code', 'description']
|
||||
|
||||
|
||||
class ProjectCodeDetail(RetrieveUpdateDestroyAPI):
|
||||
"""Detail view for a particular project code"""
|
||||
"""Detail view for a particular project code."""
|
||||
|
||||
queryset = common.models.ProjectCode.objects.all()
|
||||
serializer_class = common.serializers.ProjectCodeSerializer
|
||||
@@ -494,7 +483,7 @@ class ProjectCodeDetail(RetrieveUpdateDestroyAPI):
|
||||
|
||||
|
||||
class CustomUnitList(ListCreateAPI):
|
||||
"""List view for custom units"""
|
||||
"""List view for custom units."""
|
||||
|
||||
queryset = common.models.CustomUnit.objects.all()
|
||||
serializer_class = common.serializers.CustomUnitSerializer
|
||||
@@ -503,26 +492,116 @@ class CustomUnitList(ListCreateAPI):
|
||||
|
||||
|
||||
class CustomUnitDetail(RetrieveUpdateDestroyAPI):
|
||||
"""Detail view for a particular custom unit"""
|
||||
"""Detail view for a particular custom unit."""
|
||||
|
||||
queryset = common.models.CustomUnit.objects.all()
|
||||
serializer_class = common.serializers.CustomUnitSerializer
|
||||
permission_classes = [permissions.IsAuthenticated, IsStaffOrReadOnly]
|
||||
|
||||
|
||||
class ErrorMessageList(BulkDeleteMixin, ListAPI):
|
||||
"""List view for server error messages."""
|
||||
|
||||
queryset = Error.objects.all()
|
||||
serializer_class = common.serializers.ErrorMessageSerializer
|
||||
permission_classes = [permissions.IsAuthenticated, IsAdminUser]
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
ordering = '-when'
|
||||
|
||||
ordering_fields = ['when', 'info']
|
||||
|
||||
search_fields = ['info', 'data']
|
||||
|
||||
|
||||
class ErrorMessageDetail(RetrieveUpdateDestroyAPI):
|
||||
"""Detail view for a single error message."""
|
||||
|
||||
queryset = Error.objects.all()
|
||||
serializer_class = common.serializers.ErrorMessageSerializer
|
||||
permission_classes = [permissions.IsAuthenticated, IsAdminUser]
|
||||
|
||||
|
||||
class BackgroundTaskOverview(APIView):
|
||||
"""Provides an overview of the background task queue status."""
|
||||
|
||||
permission_classes = [permissions.IsAuthenticated, IsAdminUser]
|
||||
serializer_class = None
|
||||
|
||||
def get(self, request, format=None):
|
||||
"""Return information about the current status of the background task queue."""
|
||||
import django_q.models as q_models
|
||||
|
||||
import InvenTree.status
|
||||
|
||||
serializer = common.serializers.TaskOverviewSerializer({
|
||||
'is_running': InvenTree.status.is_worker_running(),
|
||||
'pending_tasks': q_models.OrmQ.objects.count(),
|
||||
'scheduled_tasks': q_models.Schedule.objects.count(),
|
||||
'failed_tasks': q_models.Failure.objects.count(),
|
||||
})
|
||||
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class PendingTaskList(BulkDeleteMixin, ListAPI):
|
||||
"""Provides a read-only list of currently pending tasks."""
|
||||
|
||||
permission_classes = [permissions.IsAuthenticated, IsAdminUser]
|
||||
|
||||
queryset = django_q.models.OrmQ.objects.all()
|
||||
serializer_class = common.serializers.PendingTaskSerializer
|
||||
|
||||
|
||||
class ScheduledTaskList(ListAPI):
|
||||
"""Provides a read-only list of currently scheduled tasks."""
|
||||
|
||||
permission_classes = [permissions.IsAuthenticated, IsAdminUser]
|
||||
|
||||
queryset = django_q.models.Schedule.objects.all()
|
||||
serializer_class = common.serializers.ScheduledTaskSerializer
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
ordering_fields = ['pk', 'func', 'last_run', 'next_run']
|
||||
|
||||
search_fields = ['func']
|
||||
|
||||
def get_queryset(self):
|
||||
"""Return annotated queryset."""
|
||||
queryset = super().get_queryset()
|
||||
return common.serializers.ScheduledTaskSerializer.annotate_queryset(queryset)
|
||||
|
||||
|
||||
class FailedTaskList(BulkDeleteMixin, ListAPI):
|
||||
"""Provides a read-only list of currently failed tasks."""
|
||||
|
||||
permission_classes = [permissions.IsAuthenticated, IsAdminUser]
|
||||
|
||||
queryset = django_q.models.Failure.objects.all()
|
||||
serializer_class = common.serializers.FailedTaskSerializer
|
||||
|
||||
filter_backends = SEARCH_ORDER_FILTER
|
||||
|
||||
ordering_fields = ['pk', 'func', 'started', 'stopped']
|
||||
|
||||
search_fields = ['func']
|
||||
|
||||
|
||||
class FlagList(ListAPI):
|
||||
"""List view for feature flags."""
|
||||
|
||||
queryset = settings.FLAGS
|
||||
serializer_class = common.serializers.FlagSerializer
|
||||
permission_classes = [permissions.AllowAny, ]
|
||||
permission_classes = [permissions.AllowAny]
|
||||
|
||||
|
||||
class FlagDetail(RetrieveAPI):
|
||||
"""Detail view for an individual feature flag."""
|
||||
|
||||
serializer_class = common.serializers.FlagSerializer
|
||||
permission_classes = [permissions.AllowAny, ]
|
||||
permission_classes = [permissions.AllowAny]
|
||||
|
||||
def get_object(self):
|
||||
"""Attempt to find a config object with the provided key."""
|
||||
@@ -535,97 +614,184 @@ class FlagDetail(RetrieveAPI):
|
||||
|
||||
settings_api_urls = [
|
||||
# User settings
|
||||
re_path(r'^user/', include([
|
||||
# User Settings Detail
|
||||
re_path(r'^(?P<key>\w+)/', UserSettingsDetail.as_view(), name='api-user-setting-detail'),
|
||||
|
||||
# User Settings List
|
||||
re_path(r'^.*$', UserSettingsList.as_view(), name='api-user-setting-list'),
|
||||
])),
|
||||
|
||||
path(
|
||||
'user/',
|
||||
include([
|
||||
# User Settings Detail
|
||||
re_path(
|
||||
r'^(?P<key>\w+)/',
|
||||
UserSettingsDetail.as_view(),
|
||||
name='api-user-setting-detail',
|
||||
),
|
||||
# User Settings List
|
||||
path('', UserSettingsList.as_view(), name='api-user-setting-list'),
|
||||
]),
|
||||
),
|
||||
# Notification settings
|
||||
re_path(r'^notification/', include([
|
||||
# Notification Settings Detail
|
||||
path(r'<int:pk>/', NotificationUserSettingsDetail.as_view(), name='api-notification-setting-detail'),
|
||||
|
||||
# Notification Settings List
|
||||
re_path(r'^.*$', NotificationUserSettingsList.as_view(), name='api-notification-setting-list'),
|
||||
])),
|
||||
|
||||
path(
|
||||
'notification/',
|
||||
include([
|
||||
# Notification Settings Detail
|
||||
path(
|
||||
'<int:pk>/',
|
||||
NotificationUserSettingsDetail.as_view(),
|
||||
name='api-notification-setting-detail',
|
||||
),
|
||||
# Notification Settings List
|
||||
path(
|
||||
'',
|
||||
NotificationUserSettingsList.as_view(),
|
||||
name='api-notification-setting-list',
|
||||
),
|
||||
]),
|
||||
),
|
||||
# Global settings
|
||||
re_path(r'^global/', include([
|
||||
# Global Settings Detail
|
||||
re_path(r'^(?P<key>\w+)/', GlobalSettingsDetail.as_view(), name='api-global-setting-detail'),
|
||||
|
||||
# Global Settings List
|
||||
re_path(r'^.*$', GlobalSettingsList.as_view(), name='api-global-setting-list'),
|
||||
])),
|
||||
path(
|
||||
'global/',
|
||||
include([
|
||||
# Global Settings Detail
|
||||
re_path(
|
||||
r'^(?P<key>\w+)/',
|
||||
GlobalSettingsDetail.as_view(),
|
||||
name='api-global-setting-detail',
|
||||
),
|
||||
# Global Settings List
|
||||
path('', GlobalSettingsList.as_view(), name='api-global-setting-list'),
|
||||
]),
|
||||
),
|
||||
]
|
||||
|
||||
common_api_urls = [
|
||||
# Webhooks
|
||||
path('webhook/<slug:endpoint>/', WebhookView.as_view(), name='api-webhook'),
|
||||
|
||||
# Uploaded images for notes
|
||||
re_path(r'^notes-image-upload/', NotesImageList.as_view(), name='api-notes-image-list'),
|
||||
|
||||
path('notes-image-upload/', NotesImageList.as_view(), name='api-notes-image-list'),
|
||||
# Background task information
|
||||
path(
|
||||
'background-task/',
|
||||
include([
|
||||
path('pending/', PendingTaskList.as_view(), name='api-pending-task-list'),
|
||||
path(
|
||||
'scheduled/',
|
||||
ScheduledTaskList.as_view(),
|
||||
name='api-scheduled-task-list',
|
||||
),
|
||||
path('failed/', FailedTaskList.as_view(), name='api-failed-task-list'),
|
||||
path('', BackgroundTaskOverview.as_view(), name='api-task-overview'),
|
||||
]),
|
||||
),
|
||||
path(
|
||||
'error-report/',
|
||||
include([
|
||||
path('<int:pk>/', ErrorMessageDetail.as_view(), name='api-error-detail'),
|
||||
path('', ErrorMessageList.as_view(), name='api-error-list'),
|
||||
]),
|
||||
),
|
||||
# Project codes
|
||||
re_path(r'^project-code/', include([
|
||||
path(r'<int:pk>/', include([
|
||||
re_path(r'^metadata/', MetadataView.as_view(), {'model': common.models.ProjectCode}, name='api-project-code-metadata'),
|
||||
re_path(r'^.*$', ProjectCodeDetail.as_view(), name='api-project-code-detail'),
|
||||
])),
|
||||
re_path(r'^.*$', ProjectCodeList.as_view(), name='api-project-code-list'),
|
||||
])),
|
||||
|
||||
path(
|
||||
'project-code/',
|
||||
include([
|
||||
path(
|
||||
'<int:pk>/',
|
||||
include([
|
||||
path(
|
||||
'metadata/',
|
||||
MetadataView.as_view(),
|
||||
{'model': common.models.ProjectCode},
|
||||
name='api-project-code-metadata',
|
||||
),
|
||||
path(
|
||||
'', ProjectCodeDetail.as_view(), name='api-project-code-detail'
|
||||
),
|
||||
]),
|
||||
),
|
||||
path('', ProjectCodeList.as_view(), name='api-project-code-list'),
|
||||
]),
|
||||
),
|
||||
# Custom physical units
|
||||
re_path(r'^units/', include([
|
||||
path(r'<int:pk>/', include([
|
||||
re_path(r'^.*$', CustomUnitDetail.as_view(), name='api-custom-unit-detail'),
|
||||
])),
|
||||
re_path(r'^.*$', CustomUnitList.as_view(), name='api-custom-unit-list'),
|
||||
])),
|
||||
|
||||
path(
|
||||
'units/',
|
||||
include([
|
||||
path(
|
||||
'<int:pk>/',
|
||||
include([
|
||||
path('', CustomUnitDetail.as_view(), name='api-custom-unit-detail')
|
||||
]),
|
||||
),
|
||||
path('', CustomUnitList.as_view(), name='api-custom-unit-list'),
|
||||
]),
|
||||
),
|
||||
# Currencies
|
||||
re_path(r'^currency/', include([
|
||||
re_path(r'^exchange/', CurrencyExchangeView.as_view(), name='api-currency-exchange'),
|
||||
re_path(r'^refresh/', CurrencyRefreshView.as_view(), name='api-currency-refresh'),
|
||||
])),
|
||||
|
||||
path(
|
||||
'currency/',
|
||||
include([
|
||||
path(
|
||||
'exchange/',
|
||||
CurrencyExchangeView.as_view(),
|
||||
name='api-currency-exchange',
|
||||
),
|
||||
path(
|
||||
'refresh/', CurrencyRefreshView.as_view(), name='api-currency-refresh'
|
||||
),
|
||||
]),
|
||||
),
|
||||
# Notifications
|
||||
re_path(r'^notifications/', include([
|
||||
# Individual purchase order detail URLs
|
||||
path(r'<int:pk>/', include([
|
||||
re_path(r'.*$', NotificationDetail.as_view(), name='api-notifications-detail'),
|
||||
])),
|
||||
# Read all
|
||||
re_path(r'^readall/', NotificationReadAll.as_view(), name='api-notifications-readall'),
|
||||
|
||||
# Notification messages list
|
||||
re_path(r'^.*$', NotificationList.as_view(), name='api-notifications-list'),
|
||||
])),
|
||||
|
||||
path(
|
||||
'notifications/',
|
||||
include([
|
||||
# Individual purchase order detail URLs
|
||||
path(
|
||||
'<int:pk>/',
|
||||
include([
|
||||
path(
|
||||
'',
|
||||
NotificationDetail.as_view(),
|
||||
name='api-notifications-detail',
|
||||
)
|
||||
]),
|
||||
),
|
||||
# Read all
|
||||
path(
|
||||
'readall/',
|
||||
NotificationReadAll.as_view(),
|
||||
name='api-notifications-readall',
|
||||
),
|
||||
# Notification messages list
|
||||
path('', NotificationList.as_view(), name='api-notifications-list'),
|
||||
]),
|
||||
),
|
||||
# News
|
||||
re_path(r'^news/', include([
|
||||
path(r'<int:pk>/', include([
|
||||
re_path(r'.*$', NewsFeedEntryDetail.as_view(), name='api-news-detail'),
|
||||
])),
|
||||
re_path(r'^.*$', NewsFeedEntryList.as_view(), name='api-news-list'),
|
||||
])),
|
||||
|
||||
path(
|
||||
'news/',
|
||||
include([
|
||||
path(
|
||||
'<int:pk>/',
|
||||
include([
|
||||
path('', NewsFeedEntryDetail.as_view(), name='api-news-detail')
|
||||
]),
|
||||
),
|
||||
path('', NewsFeedEntryList.as_view(), name='api-news-list'),
|
||||
]),
|
||||
),
|
||||
# Flags
|
||||
path('flags/', include([
|
||||
path('<str:key>/', FlagDetail.as_view(), name='api-flag-detail'),
|
||||
re_path(r'^.*$', FlagList.as_view(), name='api-flag-list'),
|
||||
])),
|
||||
|
||||
path(
|
||||
'flags/',
|
||||
include([
|
||||
path('<str:key>/', FlagDetail.as_view(), name='api-flag-detail'),
|
||||
path('', FlagList.as_view(), name='api-flag-list'),
|
||||
]),
|
||||
),
|
||||
# Status
|
||||
path('generic/status/', include([
|
||||
path(f'<str:{StatusView.MODEL_REF}>/', include([
|
||||
path('', StatusView.as_view(), name='api-status'),
|
||||
])),
|
||||
path('', AllStatusViews.as_view(), name='api-status-all'),
|
||||
])),
|
||||
path(
|
||||
'generic/status/',
|
||||
include([
|
||||
path(
|
||||
f'<str:{StatusView.MODEL_REF}>/',
|
||||
include([path('', StatusView.as_view(), name='api-status')]),
|
||||
),
|
||||
path('', AllStatusViews.as_view(), name='api-status-all'),
|
||||
]),
|
||||
),
|
||||
]
|
||||
|
||||
admin_api_urls = [
|
||||
|
||||
@@ -4,7 +4,7 @@ import logging
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
from InvenTree.ready import isImportingData
|
||||
import InvenTree.ready
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
@@ -19,6 +19,9 @@ class CommonConfig(AppConfig):
|
||||
|
||||
def ready(self):
|
||||
"""Initialize restart flag clearance on startup."""
|
||||
if InvenTree.ready.isRunningMigrations():
|
||||
return
|
||||
|
||||
self.clear_restart_flag()
|
||||
|
||||
def clear_restart_flag(self):
|
||||
@@ -26,10 +29,14 @@ class CommonConfig(AppConfig):
|
||||
try:
|
||||
import common.models
|
||||
|
||||
if common.models.InvenTreeSetting.get_setting('SERVER_RESTART_REQUIRED', backup_value=False, create=False, cache=False):
|
||||
logger.info("Clearing SERVER_RESTART_REQUIRED flag")
|
||||
if common.models.InvenTreeSetting.get_setting(
|
||||
'SERVER_RESTART_REQUIRED', backup_value=False, create=False, cache=False
|
||||
):
|
||||
logger.info('Clearing SERVER_RESTART_REQUIRED flag')
|
||||
|
||||
if not isImportingData():
|
||||
common.models.InvenTreeSetting.set_setting('SERVER_RESTART_REQUIRED', False, None)
|
||||
if not InvenTree.ready.isImportingData():
|
||||
common.models.InvenTreeSetting.set_setting(
|
||||
'SERVER_RESTART_REQUIRED', False, None
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -49,12 +49,12 @@ class FileManager:
|
||||
ext = os.path.splitext(file.name)[-1].lower().replace('.', '')
|
||||
|
||||
try:
|
||||
if ext in ['csv', 'tsv', ]:
|
||||
if ext in ['csv', 'tsv']:
|
||||
# These file formats need string decoding
|
||||
raw_data = file.read().decode('utf-8')
|
||||
# Reset stream position to beginning of file
|
||||
file.seek(0)
|
||||
elif ext in ['xls', 'xlsx', 'json', 'yaml', ]:
|
||||
elif ext in ['xls', 'xlsx', 'json', 'yaml']:
|
||||
raw_data = file.read()
|
||||
# Reset stream position to beginning of file
|
||||
file.seek(0)
|
||||
@@ -81,7 +81,12 @@ class FileManager:
|
||||
|
||||
def update_headers(self):
|
||||
"""Update headers."""
|
||||
self.HEADERS = self.REQUIRED_HEADERS + self.ITEM_MATCH_HEADERS + self.OPTIONAL_MATCH_HEADERS + self.OPTIONAL_HEADERS
|
||||
self.HEADERS = (
|
||||
self.REQUIRED_HEADERS
|
||||
+ self.ITEM_MATCH_HEADERS
|
||||
+ self.OPTIONAL_MATCH_HEADERS
|
||||
+ self.OPTIONAL_HEADERS
|
||||
)
|
||||
|
||||
def setup(self):
|
||||
"""Setup headers should be overridden in usage to set the Different Headers."""
|
||||
@@ -149,15 +154,9 @@ class FileManager:
|
||||
break
|
||||
|
||||
if not guess_exists:
|
||||
headers.append({
|
||||
'name': header,
|
||||
'guess': guess
|
||||
})
|
||||
headers.append({'name': header, 'guess': guess})
|
||||
else:
|
||||
headers.append({
|
||||
'name': header,
|
||||
'guess': None
|
||||
})
|
||||
headers.append({'name': header, 'guess': None})
|
||||
|
||||
return headers
|
||||
|
||||
@@ -180,7 +179,6 @@ class FileManager:
|
||||
rows = []
|
||||
|
||||
for i in range(self.row_count()):
|
||||
|
||||
data = list(self.get_row_data(i))
|
||||
|
||||
# Is the row completely empty? Skip!
|
||||
@@ -203,10 +201,7 @@ class FileManager:
|
||||
if empty:
|
||||
continue
|
||||
|
||||
row = {
|
||||
'data': data,
|
||||
'index': i
|
||||
}
|
||||
row = {'data': data, 'index': i}
|
||||
|
||||
rows.append(row)
|
||||
|
||||
|
||||
@@ -9,10 +9,7 @@ from .files import FileManager
|
||||
class UploadFileForm(forms.Form):
|
||||
"""Step 1 of FileManagementFormView."""
|
||||
|
||||
file = forms.FileField(
|
||||
label=_('File'),
|
||||
help_text=_('Select file to upload'),
|
||||
)
|
||||
file = forms.FileField(label=_('File'), help_text=_('Select file to upload'))
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Update label and help_text."""
|
||||
@@ -67,9 +64,7 @@ class MatchFieldForm(forms.Form):
|
||||
self.fields[field_name] = forms.ChoiceField(
|
||||
choices=[('', '-' * 10)] + headers_choices,
|
||||
required=False,
|
||||
widget=forms.Select(attrs={
|
||||
'class': 'select fieldselect',
|
||||
})
|
||||
widget=forms.Select(attrs={'class': 'select fieldselect'}),
|
||||
)
|
||||
if col['guess']:
|
||||
self.fields[field_name].initial = col['guess']
|
||||
@@ -107,7 +102,9 @@ class MatchItemForm(forms.Form):
|
||||
field_name = col_guess.lower() + '-' + str(row['index'])
|
||||
|
||||
# check if field def was overridden
|
||||
overriden_field = self.get_special_field(col_guess, row, file_manager)
|
||||
overriden_field = self.get_special_field(
|
||||
col_guess, row, file_manager
|
||||
)
|
||||
if overriden_field:
|
||||
self.fields[field_name] = overriden_field
|
||||
|
||||
@@ -117,23 +114,23 @@ class MatchItemForm(forms.Form):
|
||||
value = row.get(col_guess.lower(), '')
|
||||
# Set field input box
|
||||
self.fields[field_name] = forms.CharField(
|
||||
required=True,
|
||||
initial=value,
|
||||
required=True, initial=value
|
||||
)
|
||||
|
||||
# Create item selection box
|
||||
elif col_guess in file_manager.OPTIONAL_MATCH_HEADERS:
|
||||
# Get item options
|
||||
item_options = [(option.id, option) for option in row['match_options_' + col_guess]]
|
||||
item_options = [
|
||||
(option.id, option)
|
||||
for option in row['match_options_' + col_guess]
|
||||
]
|
||||
# Get item match
|
||||
item_match = row['match_' + col_guess]
|
||||
# Set field select box
|
||||
self.fields[field_name] = forms.ChoiceField(
|
||||
choices=[('', '-' * 10)] + item_options,
|
||||
required=False,
|
||||
widget=forms.Select(attrs={
|
||||
'class': 'select bomselect',
|
||||
})
|
||||
widget=forms.Select(attrs={'class': 'select bomselect'}),
|
||||
)
|
||||
# Update select box when match was found
|
||||
if item_match:
|
||||
@@ -142,7 +139,9 @@ class MatchItemForm(forms.Form):
|
||||
# Create item selection box
|
||||
elif col_guess in file_manager.ITEM_MATCH_HEADERS:
|
||||
# Get item options
|
||||
item_options = [(option.id, option) for option in row['item_options']]
|
||||
item_options = [
|
||||
(option.id, option) for option in row['item_options']
|
||||
]
|
||||
# Get item match
|
||||
item_match = row['item_match']
|
||||
# Set field name
|
||||
@@ -151,9 +150,7 @@ class MatchItemForm(forms.Form):
|
||||
self.fields[field_name] = forms.ChoiceField(
|
||||
choices=[('', '-' * 10)] + item_options,
|
||||
required=False,
|
||||
widget=forms.Select(attrs={
|
||||
'class': 'select bomselect',
|
||||
})
|
||||
widget=forms.Select(attrs={'class': 'select bomselect'}),
|
||||
)
|
||||
# Update select box when match was found
|
||||
if item_match:
|
||||
@@ -169,8 +166,7 @@ class MatchItemForm(forms.Form):
|
||||
value = row.get(col_guess.lower(), '')
|
||||
# Set field input box
|
||||
self.fields[field_name] = forms.CharField(
|
||||
required=False,
|
||||
initial=value,
|
||||
required=False, initial=value
|
||||
)
|
||||
|
||||
def get_special_field(self, col_guess, row, file_manager):
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user