mirror of
https://github.com/inventree/InvenTree.git
synced 2025-12-19 13:20:37 -06:00
Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62f44d06bf | ||
|
|
ee5238b13e | ||
|
|
2db9b28063 | ||
|
|
30beb12c81 | ||
|
|
644bcb263f | ||
|
|
2ae1d1c663 | ||
|
|
951225b420 | ||
|
|
836ec3289d | ||
|
|
3482b81e8a | ||
|
|
fb97385b23 | ||
|
|
276ad572c5 | ||
|
|
1e7f4dcb4b | ||
|
|
72bec42c0f | ||
|
|
8e5202b39a | ||
|
|
63664714ca |
@@ -1,43 +0,0 @@
|
||||
# Dockerfile for the InvenTree devcontainer
|
||||
|
||||
# In contrast with the "production" image (which is based on an Alpine image)
|
||||
# we use a Debian-based image for the devcontainer
|
||||
|
||||
FROM mcr.microsoft.com/devcontainers/python:3.11-bookworm@sha256:5140e54af7a0399a4932dd4c4653d085fcf451b093d7424867df1828ffbb9b81
|
||||
|
||||
# InvenTree paths
|
||||
ENV INVENTREE_HOME="/home/inventree"
|
||||
ENV INVENTREE_DATA_DIR="${INVENTREE_HOME}/dev"
|
||||
ENV INVENTREE_STATIC_ROOT="${INVENTREE_DATA_DIR}/static"
|
||||
ENV INVENTREE_MEDIA_ROOT="${INVENTREE_DATA_DIR}/media"
|
||||
ENV INVENTREE_BACKUP_DIR="${INVENTREE_DATA_DIR}/backup"
|
||||
ENV INVENTREE_PLUGIN_DIR="${INVENTREE_DATA_DIR}/plugins"
|
||||
ENV INVENTREE_CONFIG_FILE="${INVENTREE_DATA_DIR}/config.yaml"
|
||||
ENV INVENTREE_SECRET_KEY_FILE="${INVENTREE_DATA_DIR}/secret_key.txt"
|
||||
ENV INVENTREE_OIDC_PRIVATE_KEY_FILE="${INVENTREE_DATA_DIR}/oidc.pem"
|
||||
|
||||
# Required for running playwright within devcontainer
|
||||
ENV DISPLAY=:0
|
||||
ENV LIBGL_ALWAYS_INDIRECT=1
|
||||
|
||||
COPY contrib/container/init.sh ./
|
||||
RUN chmod +x init.sh
|
||||
|
||||
# Install required base packages
|
||||
RUN apt update && apt install -y \
|
||||
python3.11-dev python3.11-venv \
|
||||
postgresql-client \
|
||||
libldap2-dev libsasl2-dev \
|
||||
libpango1.0-0 libcairo2 \
|
||||
poppler-utils weasyprint
|
||||
|
||||
# Install packages required for frontend development
|
||||
RUN apt install -y \
|
||||
yarn nodejs npm
|
||||
|
||||
# Update to the latest stable node version
|
||||
RUN npm install -g n --ignore-scripts && n lts
|
||||
|
||||
RUN yarn config set network-timeout 600000 -g
|
||||
|
||||
ENTRYPOINT ["/bin/bash", "./init.sh"]
|
||||
@@ -1,77 +0,0 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/python-3
|
||||
{
|
||||
"name": "InvenTree devcontainer",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "inventree",
|
||||
"overrideCommand": true,
|
||||
"workspaceFolder": "/home/inventree/",
|
||||
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
// Configure properties specific to VS Code.
|
||||
"vscode": {
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"python.defaultInterpreterPath": "${containerWorkspaceFolder}/dev/venv/bin/python",
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.pylintEnabled": false,
|
||||
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
|
||||
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
|
||||
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
|
||||
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
|
||||
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
|
||||
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
|
||||
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
|
||||
"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
|
||||
},
|
||||
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"batisteo.vscode-django",
|
||||
"eamodio.gitlens",
|
||||
"biomejs.biome"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
"forwardPorts": [5173, 5432, 6379, 8000, 8080],
|
||||
"portsAttributes": {
|
||||
"5173": {
|
||||
"label": "Vite Server"
|
||||
},
|
||||
"5432": {
|
||||
"label": "PostgreSQL Database"
|
||||
},
|
||||
"6379": {
|
||||
"label": "Redis Server"
|
||||
},
|
||||
"8000": {
|
||||
"label": "InvenTree Server"
|
||||
},
|
||||
"8080": {
|
||||
"label": "mkdocs server"
|
||||
}
|
||||
},
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": ".devcontainer/postCreateCommand.sh",
|
||||
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "vscode",
|
||||
"containerUser": "vscode",
|
||||
|
||||
"remoteEnv": {
|
||||
|
||||
// Python config
|
||||
"PIP_USER": "no",
|
||||
|
||||
// used to load the venv into the PATH and activate it
|
||||
// Ref: https://stackoverflow.com/a/56286534
|
||||
"VIRTUAL_ENV": "${containerWorkspaceFolder}/dev/venv",
|
||||
"PATH": "${containerWorkspaceFolder}/dev/venv/bin:${containerEnv:PATH}"
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
services:
|
||||
db:
|
||||
image: postgres:15
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- 5432/tcp
|
||||
volumes:
|
||||
- ../dev-db/:/var/lib/postgresql/data:z
|
||||
environment:
|
||||
POSTGRES_DB: inventree
|
||||
POSTGRES_USER: inventree_user
|
||||
POSTGRES_PASSWORD: inventree_password
|
||||
|
||||
redis:
|
||||
image: redis:7.0
|
||||
restart: always
|
||||
ports:
|
||||
- 6379
|
||||
|
||||
inventree:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: .devcontainer/Dockerfile
|
||||
volumes:
|
||||
- ../:/home/inventree:z
|
||||
- /tmp/.X11-unix:/tmp/.X11-unix
|
||||
|
||||
environment:
|
||||
INVENTREE_DB_ENGINE: postgresql
|
||||
INVENTREE_DB_NAME: inventree
|
||||
INVENTREE_DB_HOST: db
|
||||
INVENTREE_DB_USER: inventree_user
|
||||
INVENTREE_DB_PASSWORD: inventree_password
|
||||
INVENTREE_DEBUG: True
|
||||
INVENTREE_CACHE_HOST: redis
|
||||
INVENTREE_CACHE_PORT: 6379
|
||||
INVENTREE_PLUGINS_ENABLED: True
|
||||
INVENTREE_SITE_URL: http://localhost:8000
|
||||
INVENTREE_CORS_ORIGIN_ALLOW_ALL: True
|
||||
INVENTREE_PY_ENV: /home/inventree/dev/venv
|
||||
INVENTREE_DEVCONTAINER: True
|
||||
|
||||
depends_on:
|
||||
- db
|
||||
@@ -1,39 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "Running postCreateCommand.sh ..."
|
||||
|
||||
# Avoiding Dubious Ownership in Dev Containers for setup commands that use git
|
||||
git config --global --add safe.directory /home/inventree
|
||||
|
||||
# create venv
|
||||
python3 -m venv /home/inventree/dev/venv --system-site-packages --upgrade-deps
|
||||
. /home/inventree/dev/venv/bin/activate
|
||||
|
||||
# remove existing gitconfig created by "Avoiding Dubious Ownership" step
|
||||
# so that it gets copied from host to the container to have your global
|
||||
# git config in container
|
||||
rm -f /home/vscode/.gitconfig
|
||||
|
||||
# Fix issue related to CFFI version mismatch
|
||||
pip uninstall cffi -y
|
||||
sudo apt remove --purge -y python3-cffi
|
||||
pip install --no-cache-dir --force-reinstall --ignore-installed cffi
|
||||
|
||||
# Upgrade pip
|
||||
python3 -m pip install --upgrade pip
|
||||
|
||||
# Ensure the correct invoke is available
|
||||
pip3 install --ignore-installed --upgrade invoke Pillow
|
||||
|
||||
# install base level packages
|
||||
pip3 install -Ur contrib/container/requirements.txt --require-hashes
|
||||
|
||||
# Run initial InvenTree server setup
|
||||
invoke update -s
|
||||
|
||||
# Configure dev environment
|
||||
invoke dev.setup-dev
|
||||
|
||||
# Install required frontend packages
|
||||
invoke int.frontend-install
|
||||
@@ -1,79 +0,0 @@
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- stable
|
||||
- refs/tags/*
|
||||
paths:
|
||||
include:
|
||||
- src/backend
|
||||
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
Python39:
|
||||
PYTHON_VERSION: '3.9'
|
||||
maxParallel: 3
|
||||
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '$(PYTHON_VERSION)'
|
||||
architecture: 'x64'
|
||||
|
||||
- task: PythonScript@0
|
||||
displayName: 'Export project path'
|
||||
inputs:
|
||||
scriptSource: 'inline'
|
||||
script: |
|
||||
"""Search all subdirectories for `manage.py`."""
|
||||
from glob import iglob
|
||||
from os import path
|
||||
# Python >= 3.5
|
||||
manage_py = next(iglob(path.join('**', 'manage.py'), recursive=True), None)
|
||||
if not manage_py:
|
||||
raise SystemExit('Could not find a Django project')
|
||||
project_location = path.dirname(path.abspath(manage_py))
|
||||
print('Found Django project in', project_location)
|
||||
print('##vso[task.setvariable variable=projectRoot]{}'.format(project_location))
|
||||
|
||||
- script: |
|
||||
python -m pip install --upgrade pip setuptools wheel uv
|
||||
uv pip install --require-hashes -r src/backend/requirements.txt
|
||||
uv pip install --require-hashes -r src/backend/requirements-dev.txt
|
||||
sudo apt-get install poppler-utils
|
||||
sudo apt-get install libpoppler-dev
|
||||
uv pip install unittest-xml-reporting coverage invoke
|
||||
displayName: 'Install prerequisites'
|
||||
env:
|
||||
UV_SYSTEM_PYTHON: 1
|
||||
|
||||
- script: |
|
||||
pushd '$(projectRoot)'
|
||||
invoke update --uv
|
||||
coverage run manage.py test --testrunner xmlrunner.extra.djangotestrunner.XMLTestRunner --no-input
|
||||
coverage xml -i
|
||||
displayName: 'Run tests'
|
||||
env:
|
||||
INVENTREE_DB_ENGINE: sqlite3
|
||||
INVENTREE_DB_NAME: inventree
|
||||
INVENTREE_MEDIA_ROOT: ./media
|
||||
INVENTREE_STATIC_ROOT: ./static
|
||||
INVENTREE_BACKUP_DIR: ./backup
|
||||
INVENTREE_SITE_URL: http://localhost:8000
|
||||
INVENTREE_PLUGINS_ENABLED: true
|
||||
UV_SYSTEM_PYTHON: 1
|
||||
INVENTREE_DEBUG: true
|
||||
INVENTREE_LOG_LEVEL: INFO
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: "**/TEST-*.xml"
|
||||
testRunTitle: 'Python $(PYTHON_VERSION)'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishCodeCoverageResults@2
|
||||
inputs:
|
||||
summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml'
|
||||
25
.eslintrc.yml
Normal file
25
.eslintrc.yml
Normal file
@@ -0,0 +1,25 @@
|
||||
env:
|
||||
commonjs: false
|
||||
browser: true
|
||||
es2021: true
|
||||
jquery: true
|
||||
extends:
|
||||
- google
|
||||
parserOptions:
|
||||
ecmaVersion: 12
|
||||
rules:
|
||||
no-var: off
|
||||
guard-for-in: off
|
||||
no-trailing-spaces: off
|
||||
camelcase: off
|
||||
padded-blocks: off
|
||||
prefer-const: off
|
||||
max-len: off
|
||||
require-jsdoc: off
|
||||
valid-jsdoc: off
|
||||
no-multiple-empty-lines: off
|
||||
comma-dangle: off
|
||||
prefer-spread: off
|
||||
indent:
|
||||
- error
|
||||
- 4
|
||||
@@ -1,3 +0,0 @@
|
||||
# .git-blame-ignore-revs
|
||||
# Code Structure refactor https://github.com/inventree/InvenTree/pull/5582
|
||||
0bace3f3afaa213c63b5dcc70103f0d232637a9a
|
||||
11
.github/CODEOWNERS
vendored
11
.github/CODEOWNERS
vendored
@@ -2,12 +2,5 @@
|
||||
* @SchrodingersGat
|
||||
|
||||
# plugins are co-owned
|
||||
/src/backend/InvenTree/plugin/ @SchrodingersGat @matmair
|
||||
/src/backend/InvenTree/plugins/ @SchrodingersGat @matmair
|
||||
|
||||
# Installer functions
|
||||
.pkgr.yml @matmair
|
||||
Procfile @matmair
|
||||
runtime.txt @matmair
|
||||
/contrib/installer @matmair
|
||||
/contrib/packager.io @matmair
|
||||
/InvenTree/plugin/ @SchrodingersGat @matmair
|
||||
/InvenTree/plugins/ @SchrodingersGat @matmair
|
||||
|
||||
5
.github/FUNDING.yml
vendored
5
.github/FUNDING.yml
vendored
@@ -1,3 +1,2 @@
|
||||
polar: inventree
|
||||
github: inventree
|
||||
custom: [paypal.me/inventree]
|
||||
patreon: inventree
|
||||
ko_fi: inventree
|
||||
|
||||
47
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
47
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
---
|
||||
name: Bug
|
||||
about: Create a bug report to help us improve InvenTree!
|
||||
title: "[BUG] Enter bug description"
|
||||
labels: bug, question
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!---
|
||||
Everything inside these brackets is hidden - please remove them where you fill out information.
|
||||
--->
|
||||
|
||||
|
||||
**Describe the bug**
|
||||
<!---
|
||||
A clear and concise description of what the bug is.
|
||||
--->
|
||||
|
||||
**Steps to Reproduce**
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
<!---
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
--->
|
||||
|
||||
**Expected behavior**
|
||||
<!---
|
||||
A clear and concise description of what you expected to happen.
|
||||
--->
|
||||
|
||||
<!---
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
--->
|
||||
|
||||
**Deployment Method**
|
||||
- [ ] Docker
|
||||
- [ ] Bare Metal
|
||||
|
||||
**Version Information**
|
||||
<!---
|
||||
You can get this by going to the "About InvenTree" section in the upper right corner and clicking on to the "copy version information"
|
||||
--->
|
||||
83
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
83
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@@ -1,83 +0,0 @@
|
||||
name: "Bug"
|
||||
description: "Create a bug report to help us improve InvenTree!"
|
||||
labels: ["bug", "question", "triage:not-checked"]
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: no-duplicate-issues
|
||||
attributes:
|
||||
label: "Please verify that this bug has NOT been raised before."
|
||||
description: "Search in the issues sections by clicking [HERE](https://github.com/inventree/inventree/issues?q=) and read the [Frequently Asked Questions](https://docs.inventree.org/en/latest/sref/faq)!"
|
||||
options:
|
||||
- label: "I checked and didn't find a similar issue"
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Describe the bug*"
|
||||
description: "A clear and concise description of what the bug is."
|
||||
- type: textarea
|
||||
id: steps-to-reproduce
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Steps to Reproduce"
|
||||
description: "Steps to reproduce the behaviour, please make it detailed"
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See the error
|
||||
- type: textarea
|
||||
id: expected-behavior
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Expected behaviour"
|
||||
description: "A clear and concise description of what you expected to happen."
|
||||
placeholder: "..."
|
||||
- type: dropdown
|
||||
id: deployment
|
||||
attributes:
|
||||
label: "Deployment Method"
|
||||
options:
|
||||
- Docker
|
||||
- Package
|
||||
- Bare metal
|
||||
- Other - added info in Steps to Reproduce
|
||||
- type: textarea
|
||||
id: version-info
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Version Information"
|
||||
description: "The version info block."
|
||||
placeholder: "You can get this by going to the `About InvenTree` section in the upper right corner and clicking on the `copy version information` button"
|
||||
- type: dropdown
|
||||
id: tried-reproduce
|
||||
attributes:
|
||||
label: Try to reproduce on the demo site
|
||||
description: You can sign in at [InvenTree Demo](https://demo.inventree.org) with admin:inventree. Note that this instance runs on the latest dev version, so your bug may be fixed there.
|
||||
options:
|
||||
- I did not try to reproduce
|
||||
- I tried to reproduce
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: result-reproduce
|
||||
attributes:
|
||||
label: Is the bug reproducible on the demo site?
|
||||
options:
|
||||
- Not reproducible
|
||||
- Reproducible
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: "Relevant log output"
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: false
|
||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
1
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1 +0,0 @@
|
||||
blank_issues_enabled: false
|
||||
15
.github/ISSUE_TEMPLATE/documentation.yaml
vendored
15
.github/ISSUE_TEMPLATE/documentation.yaml
vendored
@@ -1,15 +0,0 @@
|
||||
name: "Documentation"
|
||||
description: "Create an issue to improve the documentation"
|
||||
labels: ["documentation", "triage:not-checked"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Create a new issue regarding the InvenTree documentation
|
||||
- type: textarea
|
||||
id: repro
|
||||
attributes:
|
||||
label: Body of the issue
|
||||
description: Please provide one distinct thing to fix or a clearly defined enhancement
|
||||
validations:
|
||||
required: true
|
||||
26
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
26
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: "[FR]"
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request the result of a bug?**
|
||||
Please link it here.
|
||||
|
||||
**Problem**
|
||||
A clear and concise description of what the problem is. e.g. I'm always frustrated when [...]
|
||||
|
||||
**Suggested solution**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Examples of other systems**
|
||||
Show how other software handles your FR if you have examples.
|
||||
|
||||
**Do you want to develop this?**
|
||||
If so please describe briefly how you would like to implement it (so we can give advice) and if you have experience in the needed technology (you do not need to be a pro - this is just as a information for us).
|
||||
53
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
53
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
@@ -1,53 +0,0 @@
|
||||
name: Feature Request
|
||||
description: Suggest an idea for this project
|
||||
title: "[FR] title"
|
||||
labels: ["enhancement", "triage:not-checked"]
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: no-duplicate-issues
|
||||
attributes:
|
||||
label: "Please verify that this feature request has NOT been suggested before."
|
||||
description: "Search in the issues sections by clicking [HERE](https://github.com/inventree/inventree/issues?q=)"
|
||||
options:
|
||||
- label: "I checked and didn't find a similar feature request"
|
||||
required: true
|
||||
- type: textarea
|
||||
id: problem
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Problem statement"
|
||||
description: "A clear and concise description of the problem or missing feature."
|
||||
placeholder: "I am always struggling with ..."
|
||||
- type: textarea
|
||||
id: solution
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Suggested solution"
|
||||
description: "A clear and concise description of what you want to happen to solve the problem statement."
|
||||
placeholder: "In my use-case, ..."
|
||||
- type: textarea
|
||||
id: alternatives
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Describe alternatives you've considered"
|
||||
description: "A clear and concise description of any alternative solutions or features you've considered."
|
||||
placeholder: "This could also be done by doing ..."
|
||||
- type: textarea
|
||||
id: examples
|
||||
validations:
|
||||
required: false
|
||||
attributes:
|
||||
label: "Examples of other systems"
|
||||
description: "Show how other software handles your FR if you have examples."
|
||||
placeholder: "I software xxx this is done in the following way..."
|
||||
- type: checkboxes
|
||||
id: self-develop
|
||||
attributes:
|
||||
label: "Do you want to develop this?"
|
||||
description: "This is not required, and you do not need to be a pro - this is just as information for us."
|
||||
options:
|
||||
- label: "I want to develop this."
|
||||
required: false
|
||||
46
.github/ISSUE_TEMPLATE/install.yaml
vendored
46
.github/ISSUE_TEMPLATE/install.yaml
vendored
@@ -1,46 +0,0 @@
|
||||
name: "Install problems"
|
||||
description: "If you have problems deploying InvenTree"
|
||||
labels: ["question", "triage:not-checked", "setup"]
|
||||
body:
|
||||
- type: checkboxes
|
||||
id: deployment
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Deployment Method"
|
||||
options:
|
||||
- label: "Installer"
|
||||
- label: "Docker Development"
|
||||
- label: "Docker Production"
|
||||
- label: "Bare metal Development"
|
||||
- label: "Bare metal Production"
|
||||
- label: "Digital Ocean image"
|
||||
- label: "Other (please provide a link `Steps to Reproduce`"
|
||||
- type: textarea
|
||||
id: description
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Describe the problem*"
|
||||
description: "A clear and concise description of what is failing."
|
||||
- type: textarea
|
||||
id: steps-to-reproduce
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: "Steps to Reproduce"
|
||||
description: "Steps to reproduce the behaviour, please make it detailed"
|
||||
placeholder: |
|
||||
0. Link to all docs you used
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See the error
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: "Relevant log output"
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: bash
|
||||
validations:
|
||||
required: false
|
||||
12
.github/actions/migration/action.yaml
vendored
12
.github/actions/migration/action.yaml
vendored
@@ -1,6 +1,6 @@
|
||||
name: 'Migration test'
|
||||
description: 'Run migration test sequence'
|
||||
author: 'InvenTree'
|
||||
description: 'Run migration test sequenze'
|
||||
author: 'inventree'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
@@ -9,9 +9,9 @@ runs:
|
||||
shell: bash
|
||||
run: |
|
||||
invoke migrate
|
||||
invoke dev.import-fixtures
|
||||
invoke import-fixtures
|
||||
invoke export-records -f data.json
|
||||
python3 ./src/backend/InvenTree/manage.py flush --noinput
|
||||
python3 ./InvenTree/manage.py flush --noinput
|
||||
invoke migrate
|
||||
invoke import-records -c -f data.json
|
||||
invoke import-records -c -f data.json
|
||||
invoke import-records -f data.json
|
||||
invoke import-records -f data.json
|
||||
|
||||
46
.github/actions/setup/action.yaml
vendored
46
.github/actions/setup/action.yaml
vendored
@@ -1,6 +1,6 @@
|
||||
name: 'Setup Enviroment'
|
||||
description: 'Setup the environment for general InvenTree tests'
|
||||
author: 'InvenTree'
|
||||
description: 'Setup the enviroment for general InvenTree tests'
|
||||
author: 'inventree'
|
||||
inputs:
|
||||
python:
|
||||
required: false
|
||||
@@ -35,66 +35,56 @@ runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # pin@v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Python installs
|
||||
- name: Set up Python ${{ env.python_version }}
|
||||
if: ${{ inputs.python == 'true' }}
|
||||
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # pin@v5.0.0
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ env.python_version }}
|
||||
cache: pip
|
||||
cache-dependency-path: |
|
||||
src/backend/requirements.txt
|
||||
src/backend/requirements-dev.txt
|
||||
contrib/container/requirements.txt
|
||||
contrib/dev_reqs/requirements.txt
|
||||
- name: Install Base Python Dependencies
|
||||
if: ${{ inputs.python == 'true' }}
|
||||
shell: bash
|
||||
run: |
|
||||
python3 -m pip install -U pip
|
||||
pip3 install -U invoke wheel
|
||||
pip3 install 'uv<0.3.0'
|
||||
- name: Allow uv to use the system Python by default
|
||||
run: echo "UV_SYSTEM_PYTHON=1" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
pip3 install invoke wheel
|
||||
- name: Install Specific Python Dependencies
|
||||
if: ${{ inputs.pip-dependency }}
|
||||
shell: bash
|
||||
run: uv pip install ${PIP_DEPS}
|
||||
env:
|
||||
PIP_DEPS: ${{ inputs.pip-dependency }}
|
||||
run: pip3 install ${{ inputs.pip-dependency }}
|
||||
|
||||
# NPM installs
|
||||
- name: Install node.js ${{ env.node_version }}
|
||||
if: ${{ inputs.npm == 'true' }}
|
||||
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # pin to v3.8.2
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: ${{ env.node_version }}
|
||||
cache: 'npm'
|
||||
- name: Intall npm packages
|
||||
if: ${{ inputs.npm == 'true' }}
|
||||
shell: bash
|
||||
run: npm install
|
||||
|
||||
# OS installs
|
||||
- name: Install OS Dependencies
|
||||
if: ${{ inputs.apt-dependency }}
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install ${APT_DEPS}
|
||||
sudo apt-get install ${APT_DEPS}
|
||||
env:
|
||||
APT_DEPS: ${{ inputs.apt-dependency }}
|
||||
sudo apt-get install ${{ inputs.apt-dependency }}
|
||||
|
||||
# Invoke commands
|
||||
- name: Install dev requirements
|
||||
if: ${{ inputs.dev-install == 'true' || inputs.install == 'true' }}
|
||||
if: ${{ inputs.dev-install == 'true' ||inputs.install == 'true' }}
|
||||
shell: bash
|
||||
run: uv pip install --require-hashes -r src/backend/requirements-dev.txt
|
||||
run: pip install -r requirements-dev.txt
|
||||
- name: Run invoke install
|
||||
if: ${{ inputs.install == 'true' }}
|
||||
shell: bash
|
||||
run: invoke install --uv
|
||||
run: invoke install
|
||||
- name: Run invoke update
|
||||
if: ${{ inputs.update == 'true' }}
|
||||
shell: bash
|
||||
run: invoke update --uv --skip-backup --skip-static
|
||||
run: invoke update
|
||||
|
||||
47
.github/dependabot.yml
vendored
47
.github/dependabot.yml
vendored
@@ -1,47 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: github-actions
|
||||
directory: /
|
||||
schedule:
|
||||
interval: weekly
|
||||
groups:
|
||||
dependencies:
|
||||
patterns:
|
||||
- "*" # Include all dependencies
|
||||
|
||||
- package-ecosystem: docker
|
||||
directory: /contrib/container
|
||||
schedule:
|
||||
interval: weekly
|
||||
|
||||
- package-ecosystem: docker
|
||||
directory: /.devcontainer
|
||||
schedule:
|
||||
interval: weekly
|
||||
|
||||
- package-ecosystem: pip
|
||||
directories:
|
||||
- /docs
|
||||
- /contrib/dev_reqs
|
||||
- /contrib/container
|
||||
- /src/backend
|
||||
schedule:
|
||||
interval: weekly
|
||||
day: friday
|
||||
groups:
|
||||
dependencies:
|
||||
patterns:
|
||||
- "*" # Include all dependencies
|
||||
assignees:
|
||||
- "matmair"
|
||||
versioning-strategy: increase
|
||||
|
||||
- package-ecosystem: npm
|
||||
directories:
|
||||
- /src/frontend
|
||||
schedule:
|
||||
interval: weekly
|
||||
groups:
|
||||
dependencies:
|
||||
patterns:
|
||||
- "*" # Include all dependencies
|
||||
12
.github/release.yml
vendored
12
.github/release.yml
vendored
@@ -4,8 +4,6 @@ changelog:
|
||||
exclude:
|
||||
labels:
|
||||
- translation
|
||||
- translations
|
||||
- documentation
|
||||
categories:
|
||||
- title: Breaking Changes
|
||||
labels:
|
||||
@@ -14,17 +12,10 @@ changelog:
|
||||
- title: Security Patches
|
||||
labels:
|
||||
- security
|
||||
- title: Database Changes
|
||||
labels:
|
||||
- migration
|
||||
- title: New Features
|
||||
labels:
|
||||
- Semver-Minor
|
||||
- feature
|
||||
- enhancement
|
||||
- title: Experimental Features
|
||||
labels:
|
||||
- experimental
|
||||
- title: Bug Fixes
|
||||
labels:
|
||||
- Semver-Patch
|
||||
@@ -35,9 +26,6 @@ changelog:
|
||||
- setup
|
||||
- demo
|
||||
- CI
|
||||
- title: Dependencies
|
||||
labels:
|
||||
- dependency
|
||||
- title: Other Changes
|
||||
labels:
|
||||
- "*"
|
||||
|
||||
100
.github/scripts/check_source_strings.py
vendored
100
.github/scripts/check_source_strings.py
vendored
@@ -1,100 +0,0 @@
|
||||
"""Script to check source strings for translations."""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import rapidfuzz
|
||||
|
||||
BACKEND_SOURCE_FILE = [
|
||||
'..',
|
||||
'..',
|
||||
'src',
|
||||
'backend',
|
||||
'InvenTree',
|
||||
'locale',
|
||||
'en',
|
||||
'LC_MESSAGES',
|
||||
'django.po',
|
||||
]
|
||||
|
||||
FRONTEND_SOURCE_FILE = [
|
||||
'..',
|
||||
'..',
|
||||
'src',
|
||||
'frontend',
|
||||
'src',
|
||||
'locales',
|
||||
'en',
|
||||
'messages.po',
|
||||
]
|
||||
|
||||
|
||||
def extract_source_strings(file_path):
|
||||
"""Extract source strings from the provided file."""
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
abs_file_path = os.path.abspath(os.path.join(here, *file_path))
|
||||
|
||||
sources = []
|
||||
|
||||
with open(abs_file_path, encoding='utf-8') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line.startswith('msgid '):
|
||||
msgid = line[6:].strip()
|
||||
|
||||
if msgid in sources:
|
||||
print(f'Duplicate source string: {msgid}')
|
||||
else:
|
||||
sources.append(msgid)
|
||||
|
||||
return sources
|
||||
|
||||
|
||||
def compare_source_strings(sources, threshold):
|
||||
"""Compare source strings to find duplicates (or close matches)."""
|
||||
issues = 0
|
||||
|
||||
for i, source in enumerate(sources):
|
||||
for other in sources[i + 1 :]:
|
||||
if other.lower() == source.lower():
|
||||
print(f'- Duplicate: {source} ~ {other}')
|
||||
issues += 1
|
||||
continue
|
||||
|
||||
ratio = rapidfuzz.fuzz.ratio(source, other)
|
||||
if ratio > threshold:
|
||||
print(f'- Close match: {source} ~ {other} ({ratio:.1f}%)')
|
||||
issues += 1
|
||||
|
||||
if issues:
|
||||
print(f' - Found {issues} issues.')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Check source strings for translations.'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--backend', action='store_true', help='Check backend source strings'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--frontend', action='store_true', help='Check frontend source strings'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--threshold',
|
||||
type=int,
|
||||
help='Set the threshold for string comparison',
|
||||
default=99,
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.backend:
|
||||
backend_sources = extract_source_strings(BACKEND_SOURCE_FILE)
|
||||
print('Backend source strings:', len(backend_sources))
|
||||
compare_source_strings(backend_sources, args.threshold)
|
||||
|
||||
if args.frontend:
|
||||
frontend_sources = extract_source_strings(FRONTEND_SOURCE_FILE)
|
||||
print('Frontend source strings:', len(frontend_sources))
|
||||
compare_source_strings(frontend_sources, args.threshold)
|
||||
310
.github/scripts/version_check.py
vendored
310
.github/scripts/version_check.py
vendored
@@ -1,310 +0,0 @@
|
||||
"""Ensure that the release tag matches the InvenTree version number.
|
||||
|
||||
Behaviour:
|
||||
master / main branch:
|
||||
- version number must end with 'dev'
|
||||
|
||||
tagged branch:
|
||||
- version number must match tag being built
|
||||
- version number cannot already exist as a release tag
|
||||
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
REPO = os.getenv('GITHUB_REPOSITORY', 'inventree/inventree')
|
||||
GITHUB_API_URL = os.getenv('GITHUB_API_URL', 'https://api.github.com')
|
||||
|
||||
|
||||
def get_src_dir() -> Path:
|
||||
"""Return the path to the InvenTree source directory."""
|
||||
here = Path(__file__).parent.absolute()
|
||||
src_dir = here.joinpath('..', '..', 'src', 'backend', 'InvenTree', 'InvenTree')
|
||||
|
||||
if not src_dir.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Could not find InvenTree source directory: '{src_dir}'"
|
||||
)
|
||||
|
||||
return src_dir
|
||||
|
||||
|
||||
def get_inventree_version() -> str:
|
||||
"""Return the InvenTree version string."""
|
||||
src_dir = get_src_dir()
|
||||
version_file = src_dir.joinpath('version.py')
|
||||
|
||||
if not version_file.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Could not find InvenTree version file: '{version_file}'"
|
||||
)
|
||||
|
||||
with open(version_file, encoding='utf-8') as f:
|
||||
text = f.read()
|
||||
|
||||
# Extract the InvenTree software version
|
||||
results = re.findall(r"""INVENTREE_SW_VERSION = '(.*)'""", text)
|
||||
|
||||
if len(results) != 1:
|
||||
raise ValueError(f'Could not find INVENTREE_SW_VERSION in {version_file}')
|
||||
|
||||
return results[0]
|
||||
|
||||
|
||||
def get_api_version() -> str:
|
||||
"""Return the InvenTree API version string."""
|
||||
src_dir = get_src_dir()
|
||||
api_version_file = src_dir.joinpath('api_version.py')
|
||||
|
||||
if not api_version_file.exists():
|
||||
raise FileNotFoundError(
|
||||
f"Could not find InvenTree API version file: '{api_version_file}'"
|
||||
)
|
||||
|
||||
with open(api_version_file, encoding='utf-8') as f:
|
||||
text = f.read()
|
||||
|
||||
# Extract the InvenTree software version
|
||||
results = re.findall(r"""INVENTREE_API_VERSION = (.*)""", text)
|
||||
|
||||
if len(results) != 1:
|
||||
raise ValueError(
|
||||
f'Could not find INVENTREE_API_VERSION in {api_version_file}'
|
||||
)
|
||||
|
||||
return results[0].strip().strip('"').strip("'")
|
||||
|
||||
|
||||
def version_number_to_tuple(version_string: str) -> tuple[int, int, int, str]:
|
||||
"""Validate a version number string, and convert to a tuple of integers.
|
||||
|
||||
e.g. 1.1.0
|
||||
e.g. 1.1.0 dev
|
||||
e.g. 1.2.3-rc2
|
||||
"""
|
||||
pattern = r'^(\d+)\.(\d+)\.(\d+)[\s-]?(.*)?$'
|
||||
|
||||
match = re.match(pattern, version_string)
|
||||
|
||||
if not match or len(match.groups()) < 3:
|
||||
raise ValueError(
|
||||
f"Version string '{version_string}' did not match required pattern"
|
||||
)
|
||||
|
||||
result = tuple(int(x) for x in match.groups()[:3])
|
||||
|
||||
# Add optional prerelease tag
|
||||
if len(match.groups()) > 3:
|
||||
result += (match.groups()[3] or '',)
|
||||
else:
|
||||
result += ('',)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_existing_release_tags(include_prerelease: bool = True):
|
||||
"""Request information on existing releases via the GitHub API."""
|
||||
# Check for github token
|
||||
token = os.getenv('GITHUB_TOKEN', None)
|
||||
headers = None
|
||||
|
||||
if token:
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
|
||||
response = requests.get(f'{GITHUB_API_URL}/repos/{REPO}/releases', headers=headers)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise ValueError(
|
||||
f'Unexpected status code from GitHub API: {response.status_code}'
|
||||
)
|
||||
|
||||
data = json.loads(response.text)
|
||||
|
||||
# Return a list of all tags
|
||||
tags = []
|
||||
|
||||
for release in data:
|
||||
tag = release['tag_name'].strip()
|
||||
|
||||
version_tuple = version_number_to_tuple(tag)
|
||||
|
||||
if len(version_tuple) >= 4 and version_tuple[3]:
|
||||
# Skip prerelease tags
|
||||
if not include_prerelease:
|
||||
print('-- skipping prerelease tag:', tag)
|
||||
continue
|
||||
|
||||
tags.append(tag)
|
||||
|
||||
return tags
|
||||
|
||||
|
||||
def check_version_number(version_string, allow_duplicate=False):
|
||||
"""Check the provided version number.
|
||||
|
||||
Returns True if the provided version is the 'newest' InvenTree release
|
||||
"""
|
||||
print(f"Checking version '{version_string}'")
|
||||
|
||||
version_tuple = version_number_to_tuple(version_string)
|
||||
|
||||
# Look through the existing releases
|
||||
existing = get_existing_release_tags(include_prerelease=False)
|
||||
|
||||
# Assume that this is the highest release, unless told otherwise
|
||||
highest_release = True
|
||||
|
||||
# A non-standard tag cannot be the 'highest' release
|
||||
if len(version_tuple) >= 4 and version_tuple[3]:
|
||||
highest_release = False
|
||||
print(f"-- Version tag '{version_string}' cannot be the highest release")
|
||||
|
||||
for release in existing:
|
||||
if version_string == release and not allow_duplicate:
|
||||
raise ValueError(f"Duplicate release '{version_string}' exists!")
|
||||
|
||||
release_tuple = version_number_to_tuple(release)
|
||||
|
||||
if release_tuple > version_tuple:
|
||||
highest_release = False
|
||||
print(f'Found newer release: {release!s}')
|
||||
|
||||
if highest_release:
|
||||
print(f"-- Version '{version_string}' is the highest release")
|
||||
|
||||
return highest_release
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='InvenTree Version Check')
|
||||
parser.add_argument(
|
||||
'--show-version',
|
||||
action='store_true',
|
||||
help='Print the InvenTree version and exit',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--show-api-version',
|
||||
action='store_true',
|
||||
help='Print the InvenTree API version and exit',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--decrement-api',
|
||||
type=str,
|
||||
default='false',
|
||||
help='Decrement the API version by 1 and print',
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
inventree_version = get_inventree_version()
|
||||
inventree_api_version = int(get_api_version())
|
||||
|
||||
if args.show_version:
|
||||
print(inventree_version)
|
||||
sys.exit(0)
|
||||
|
||||
if args.show_api_version:
|
||||
if str(args.decrement_api).strip().lower() == 'true':
|
||||
inventree_api_version -= 1
|
||||
print(inventree_api_version)
|
||||
sys.exit(0)
|
||||
|
||||
# Ensure that we are running in GH Actions
|
||||
if os.environ.get('GITHUB_ACTIONS', '') != 'true':
|
||||
print('This script is intended to be run within a GitHub Action!')
|
||||
sys.exit(1)
|
||||
|
||||
print('Running InvenTree version check...')
|
||||
|
||||
# GITHUB_REF_TYPE may be either 'branch' or 'tag'
|
||||
GITHUB_REF_TYPE = os.environ['GITHUB_REF_TYPE']
|
||||
|
||||
# GITHUB_REF may be either 'refs/heads/<branch>' or 'refs/heads/<tag>'
|
||||
GITHUB_REF = os.environ['GITHUB_REF']
|
||||
GITHUB_REF_NAME = os.environ['GITHUB_REF_NAME']
|
||||
GITHUB_BASE_REF = os.environ['GITHUB_BASE_REF']
|
||||
|
||||
# Print out version information, makes debugging actions *much* easier!
|
||||
print(f'GITHUB_REF: {GITHUB_REF}')
|
||||
print(f'GITHUB_REF_NAME: {GITHUB_REF_NAME}')
|
||||
print(f'GITHUB_REF_TYPE: {GITHUB_REF_TYPE}')
|
||||
print(f'GITHUB_BASE_REF: {GITHUB_BASE_REF}')
|
||||
|
||||
print(
|
||||
f"InvenTree Version: '{inventree_version}' - {version_number_to_tuple(inventree_version)}"
|
||||
)
|
||||
print(f"InvenTree API Version: '{inventree_api_version}'")
|
||||
|
||||
# Check version number and look for existing versions
|
||||
# If a release is found which matches the current tag, throw an error
|
||||
|
||||
allow_duplicate = False
|
||||
|
||||
# Note: on a 'tag' (release) we *must* allow duplicate versions, as this *is* the version that has just been released
|
||||
if GITHUB_REF_TYPE == 'tag':
|
||||
allow_duplicate = True
|
||||
|
||||
# Note: on a push to 'stable' branch we also allow duplicates
|
||||
if GITHUB_BASE_REF == 'stable':
|
||||
allow_duplicate = True
|
||||
|
||||
highest_release = check_version_number(
|
||||
inventree_version, allow_duplicate=allow_duplicate
|
||||
)
|
||||
|
||||
# Determine which docker tag we are going to use
|
||||
docker_tags = None
|
||||
|
||||
if GITHUB_REF_TYPE == 'tag':
|
||||
# GITHUB_REF should be of the form /refs/heads/<tag>
|
||||
version_tag = GITHUB_REF.split('/')[-1]
|
||||
print(f"Checking requirements for tagged release - '{version_tag}':")
|
||||
|
||||
if version_tag != inventree_version:
|
||||
print(
|
||||
f"Version number '{inventree_version}' does not match tag '{version_tag}'"
|
||||
)
|
||||
sys.exit
|
||||
|
||||
docker_tags = [version_tag, 'stable'] if highest_release else [version_tag]
|
||||
|
||||
elif GITHUB_REF_TYPE == 'branch':
|
||||
# Otherwise we know we are targeting the 'master' branch
|
||||
docker_tags = ['latest']
|
||||
highest_release = False
|
||||
|
||||
else:
|
||||
print('Unsupported branch / version combination:')
|
||||
print(f'InvenTree Version: {inventree_version}')
|
||||
print('GITHUB_REF_TYPE:', GITHUB_REF_TYPE)
|
||||
print('GITHUB_BASE_REF:', GITHUB_BASE_REF)
|
||||
print('GITHUB_REF:', GITHUB_REF)
|
||||
sys.exit(1)
|
||||
|
||||
if docker_tags is None:
|
||||
print('Docker tags could not be determined')
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Version check passed for '{inventree_version}'!")
|
||||
print(f"Docker tags: '{docker_tags}'")
|
||||
|
||||
target_repos = [REPO.lower(), f'ghcr.io/{REPO.lower()}']
|
||||
|
||||
# Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/
|
||||
with open(os.getenv('GITHUB_ENV'), 'a', encoding='utf-8') as env_file:
|
||||
# Construct tag string
|
||||
tag_list = [[f'{r}:{t}' for t in docker_tags] for r in target_repos]
|
||||
tags = ','.join(itertools.chain(*tag_list))
|
||||
|
||||
env_file.write(f'docker_tags={tags}\n')
|
||||
|
||||
if GITHUB_REF_TYPE == 'tag' and highest_release:
|
||||
env_file.write('stable_release=true\n')
|
||||
39
.github/workflows/backport.yaml
vendored
39
.github/workflows/backport.yaml
vendored
@@ -1,39 +0,0 @@
|
||||
# Backport tagged issues to a stable branch.
|
||||
#
|
||||
# To enable backporting for a pullrequest, add the label "backport" to the PR.
|
||||
# Additionally, add a label with the prefix "backport-to-" and the target branch
|
||||
|
||||
name: Backport
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: ["labeled", "closed"]
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport PR
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
if: |
|
||||
github.event.pull_request.merged == true
|
||||
&& contains(github.event.pull_request.labels.*.name, 'backport')
|
||||
&& (
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'backport')
|
||||
|| (github.event.action == 'closed')
|
||||
)
|
||||
steps:
|
||||
- name: Backport Action
|
||||
uses: sqren/backport-github-action@ad888e978060bc1b2798690dd9d03c4036560947 # pin@v9.2.2
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
auto_backport_label_prefix: backport-to-
|
||||
|
||||
- name: Info log
|
||||
if: ${{ success() }}
|
||||
run: cat ~/.backport/backport.info.log
|
||||
|
||||
- name: Debug log
|
||||
if: ${{ failure() }}
|
||||
run: cat ~/.backport/backport.debug.log
|
||||
38
.github/workflows/check_translations.yaml
vendored
38
.github/workflows/check_translations.yaml
vendored
@@ -8,42 +8,30 @@ on:
|
||||
branches:
|
||||
- l10
|
||||
|
||||
env:
|
||||
python_version: 3.9
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INVENTREE_DB_NAME: "./test_db.sqlite"
|
||||
INVENTREE_DB_NAME: './test_db.sqlite'
|
||||
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
|
||||
INVENTREE_DEBUG: true
|
||||
INVENTREE_LOG_LEVEL: INFO
|
||||
INVENTREE_DEBUG: info
|
||||
INVENTREE_MEDIA_ROOT: ./media
|
||||
INVENTREE_STATIC_ROOT: ./static
|
||||
INVENTREE_BACKUP_DIR: ./backup
|
||||
INVENTREE_SITE_URL: http://localhost:8000
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
install: true
|
||||
apt-dependency: gettext
|
||||
- name: Test Translations
|
||||
run: invoke dev.translate
|
||||
- name: Check for Duplicates
|
||||
uses: actions/checkout@v2
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python ./.github/scripts/check_source_strings.py --frontend --backend
|
||||
sudo apt-get update
|
||||
sudo apt-get install gettext
|
||||
pip3 install invoke
|
||||
invoke install
|
||||
- name: Test Translations
|
||||
run: invoke translate
|
||||
- name: Check Migration Files
|
||||
run: python3 .github/scripts/check_migration_files.py
|
||||
run: python3 ci/check_migration_files.py
|
||||
|
||||
150
.github/workflows/docker.yaml
vendored
150
.github/workflows/docker.yaml
vendored
@@ -19,95 +19,39 @@ on:
|
||||
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
- 'master'
|
||||
|
||||
jobs:
|
||||
paths-filter:
|
||||
permissions:
|
||||
contents: read # for dorny/paths-filter to fetch a list of changed files
|
||||
pull-requests: read # for dorny/paths-filter to read pull requests
|
||||
name: Filter
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
docker: ${{ steps.filter.outputs.docker }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # pin@v3.0.2
|
||||
id: filter
|
||||
with:
|
||||
filters: |
|
||||
docker:
|
||||
- .github/workflows/docker.yaml
|
||||
- contrib/container/**
|
||||
- src/backend/InvenTree/InvenTree/settings.py
|
||||
- src/backend/requirements.txt
|
||||
- tasks.py
|
||||
|
||||
# Build the docker image
|
||||
build:
|
||||
needs: paths-filter
|
||||
if: needs.paths-filter.outputs.docker == 'true' || github.event_name == 'release' || github.event_name == 'push' || contains(github.event.pull_request.labels.*.name, 'full-run')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
python_version: "3.11"
|
||||
runs-on: ubuntu-latest # in the future we can try to use alternative runners here
|
||||
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set Up Python ${{ env.python_version }}
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # pin@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.python_version }}
|
||||
uses: actions/checkout@v2
|
||||
- name: Version Check
|
||||
run: |
|
||||
pip install --require-hashes -r contrib/dev_reqs/requirements.txt
|
||||
python3 .github/scripts/version_check.py
|
||||
pip install requests
|
||||
python3 ci/version_check.py
|
||||
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
|
||||
- name: Test Docker Image
|
||||
id: test-docker
|
||||
run: |
|
||||
docker build . --target production --tag inventree-test -f contrib/container/Dockerfile
|
||||
docker run --rm inventree-test invoke version
|
||||
docker run --rm inventree-test invoke --version
|
||||
docker run --rm inventree-test invoke --list
|
||||
docker run --rm inventree-test gunicorn --version
|
||||
docker run --rm inventree-test pg_dump --version
|
||||
docker run --rm inventree-test test -f /home/inventree/init.sh
|
||||
docker run --rm inventree-test test -f /home/inventree/tasks.py
|
||||
docker run --rm inventree-test test -f /home/inventree/gunicorn.conf.py
|
||||
docker run --rm inventree-test test -f /home/inventree/src/backend/requirements.txt
|
||||
docker run --rm inventree-test test -f /home/inventree/src/backend/InvenTree/manage.py
|
||||
- name: Build Docker Image
|
||||
# Build the development docker image (using docker-compose.yml)
|
||||
run: docker compose --project-directory . -f contrib/container/dev-docker-compose.yml build --no-cache
|
||||
- name: Update Docker Image
|
||||
run: |
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke install
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke version
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke update
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke backup
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke restore
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke dev.setup-dev
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml up -d
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke wait
|
||||
docker-compose build
|
||||
- name: Run Unit Tests
|
||||
run: |
|
||||
docker-compose run inventree-dev-server invoke update
|
||||
docker-compose run inventree-dev-server invoke setup-dev
|
||||
docker-compose up -d
|
||||
docker-compose run inventree-dev-server invoke wait
|
||||
docker-compose run inventree-dev-server invoke test
|
||||
docker-compose down
|
||||
- name: Check Data Directory
|
||||
# The following file structure should have been created by the docker image
|
||||
run: |
|
||||
@@ -120,72 +64,50 @@ jobs:
|
||||
test -f data/config.yaml
|
||||
test -f data/plugins.txt
|
||||
test -f data/secret_key.txt
|
||||
test -f data/oidc.pem
|
||||
- name: Run Unit Tests
|
||||
run: |
|
||||
echo "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" >> contrib/container/docker.dev.env
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run --rm inventree-dev-server invoke dev.test --check --disable-pty --translations
|
||||
- name: Run Migration Tests
|
||||
run: |
|
||||
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run --rm inventree-dev-server invoke dev.test --check --migrations --translations
|
||||
- name: Clean up test folder
|
||||
run: |
|
||||
rm -rf InvenTree/_testfolder
|
||||
- name: Set up QEMU
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # pin@v3.6.0
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # pin@v3.11.1
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Set up cosign
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: sigstore/cosign-installer@d58896d6a1865668819e1d91763c7751a165e159 # pin@v3.9.2
|
||||
- name: Check if Dockerhub login is required
|
||||
id: docker_login
|
||||
run: |
|
||||
if [ -z "${{ secrets.DOCKER_USERNAME }}" ]; then
|
||||
echo "skip_dockerhub_login=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "skip_dockerhub_login=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
uses: sigstore/cosign-installer@48866aa521d8bf870604709cd43ec2f602d03ff2
|
||||
- name: Login to Dockerhub
|
||||
if: github.event_name != 'pull_request' && steps.docker_login.outputs.skip_dockerhub_login != 'true'
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # pin@v3.5.0
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Log into registry ghcr.io
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # pin@v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract Docker metadata
|
||||
if: github.event_name != 'pull_request'
|
||||
id: meta
|
||||
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # pin@v5.8.0
|
||||
uses: docker/metadata-action@69f6fc9d46f2f8bf0d5491e4aabe0bb8c6a4678a
|
||||
with:
|
||||
images: |
|
||||
inventree/inventree
|
||||
ghcr.io/${{ github.repository }}
|
||||
- uses: depot/setup-action@b0b1ea4f69e92ebf5dea3f8713a1b0c37b2126a5 # pin@v1
|
||||
- name: Push Docker Images
|
||||
id: push-docker
|
||||
- name: Build and Push
|
||||
id: build-and-push
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: depot/build-push-action@9785b135c3c76c33db102e45be96a25ab55cd507 # pin@v1
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
project: jczzbjkk68
|
||||
context: .
|
||||
file: ./contrib/container/Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
push: true
|
||||
sbom: true
|
||||
provenance: false
|
||||
target: production
|
||||
tags: ${{ env.docker_tags }}
|
||||
build-args: |
|
||||
commit_hash=${{ env.git_commit_hash }}
|
||||
commit_date=${{ env.git_commit_date }}
|
||||
- name: Sign the published image
|
||||
if: github.event_name != 'pull_request'
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: "true"
|
||||
run: cosign sign ${{ steps.meta.outputs.tags }}@${{ steps.build-and-push.outputs.digest }}
|
||||
- name: Push to Stable Branch
|
||||
uses: ad-m/github-push-action@master
|
||||
if: env.stable_release == 'true' && github.event_name != 'pull_request'
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: stable
|
||||
force: true
|
||||
|
||||
684
.github/workflows/qc_checks.yaml
vendored
684
.github/workflows/qc_checks.yaml
vendored
@@ -1,136 +1,109 @@
|
||||
# Checks for each PR / push
|
||||
|
||||
name: QC
|
||||
name: QC checks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches-ignore: ["l10*"]
|
||||
branches-ignore:
|
||||
- l10*
|
||||
|
||||
pull_request:
|
||||
branches-ignore: ["l10*"]
|
||||
branches-ignore:
|
||||
- l10*
|
||||
|
||||
env:
|
||||
python_version: 3.9
|
||||
node_version: 20
|
||||
node_version: 16
|
||||
# The OS version must be set per job
|
||||
|
||||
server_start_sleep: 60
|
||||
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INVENTREE_DB_ENGINE: sqlite3
|
||||
INVENTREE_DB_NAME: inventree
|
||||
INVENTREE_MEDIA_ROOT: /home/runner/work/InvenTree/test_inventree_media
|
||||
INVENTREE_STATIC_ROOT: /home/runner/work/InvenTree/test_inventree_static
|
||||
INVENTREE_BACKUP_DIR: /home/runner/work/InvenTree/test_inventree_backup
|
||||
INVENTREE_SITE_URL: http://localhost:8000
|
||||
INVENTREE_DEBUG: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
INVENTREE_MEDIA_ROOT: ../test_inventree_media
|
||||
INVENTREE_STATIC_ROOT: ../test_inventree_static
|
||||
|
||||
jobs:
|
||||
paths-filter:
|
||||
name: Filter
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
outputs:
|
||||
server: ${{ steps.filter.outputs.server }}
|
||||
migrations: ${{ steps.filter.outputs.migrations }}
|
||||
frontend: ${{ steps.filter.outputs.frontend }}
|
||||
api: ${{ steps.filter.outputs.api }}
|
||||
force: ${{ steps.force.outputs.force }}
|
||||
cicd: ${{ steps.filter.outputs.cicd }}
|
||||
requirements: ${{ steps.filter.outputs.requirements }}
|
||||
pep_style:
|
||||
name: Style [Python]
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
- uses: actions/checkout@v1
|
||||
- name: Enviroment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # pin@v3.0.2
|
||||
id: filter
|
||||
dev-install: true
|
||||
- name: Run flake8
|
||||
run: flake8 InvenTree --extend-ignore=D
|
||||
|
||||
javascript:
|
||||
name: Style [JS]
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: pep_style
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Enviroment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
filters: |
|
||||
server:
|
||||
- 'src/backend/InvenTree/**'
|
||||
- 'src/backend/requirements.txt'
|
||||
- 'src/backend/requirements-dev.txt'
|
||||
migrations:
|
||||
- '**/test_migrations.py'
|
||||
- '**/migrations/**'
|
||||
- '.github/workflows**'
|
||||
- 'src/backend/requirements.txt'
|
||||
api:
|
||||
- 'src/backend/InvenTree/InvenTree/api_version.py'
|
||||
frontend:
|
||||
- 'src/frontend/**'
|
||||
cicd:
|
||||
- '.github/workflows/**'
|
||||
requirements:
|
||||
- 'src/backend/requirements.txt'
|
||||
- 'src/backend/requirements-dev.txt'
|
||||
- 'docs/requirements.txt'
|
||||
- 'contrib/dev_reqs/requirements.txt'
|
||||
- name: Is CI being forced?
|
||||
run: echo "force=true" >> $GITHUB_OUTPUT
|
||||
id: force
|
||||
if: |
|
||||
contains(github.event.pull_request.labels.*.name, 'dependency') ||
|
||||
contains(github.event.pull_request.labels.*.name, 'full-run')
|
||||
npm: true
|
||||
install: true
|
||||
- name: Check Templated JS Files
|
||||
run: |
|
||||
cd ci
|
||||
python3 check_js_templates.py
|
||||
- name: Lint Javascript Files
|
||||
run: |
|
||||
python InvenTree/manage.py prerender
|
||||
npx eslint InvenTree/InvenTree/static_i18n/i18n/*.js
|
||||
|
||||
html:
|
||||
name: Style [HTML]
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: pep_style
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Enviroment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
npm: true
|
||||
install: true
|
||||
- name: Check HTML Files
|
||||
run: npx markuplint **/templates/*.html
|
||||
|
||||
pre-commit:
|
||||
name: Style [pre-commit]
|
||||
runs-on: ubuntu-24.04
|
||||
needs: paths-filter
|
||||
if: needs.paths-filter.outputs.cicd == 'true' || needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.frontend == 'true' || needs.paths-filter.outputs.requirements == 'true' || needs.paths-filter.outputs.force == 'true'
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: pep_style
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.python_version }}
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # pin@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.python_version }}
|
||||
cache: "pip"
|
||||
- name: Run pre-commit Checks
|
||||
uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # pin@v3.0.1
|
||||
- name: Check Version
|
||||
run: |
|
||||
pip install --require-hashes -r contrib/dev_reqs/requirements.txt
|
||||
python3 .github/scripts/version_check.py
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.python_version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ env.python_version }}
|
||||
cache: 'pip'
|
||||
- name: Run pre-commit Checks
|
||||
uses: pre-commit/action@v2.0.3
|
||||
- name: Check Version
|
||||
run: |
|
||||
pip install requests
|
||||
python3 ci/version_check.py
|
||||
|
||||
mkdocs:
|
||||
name: Style [Documentation]
|
||||
runs-on: ubuntu-24.04
|
||||
python:
|
||||
name: Tests - inventree-python
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: paths-filter
|
||||
needs: pre-commit
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.python_version }}
|
||||
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # pin@v5.6.0
|
||||
with:
|
||||
python-version: ${{ env.python_version }}
|
||||
- name: Check Config
|
||||
run: |
|
||||
pip install --require-hashes -r contrib/dev_reqs/requirements.txt
|
||||
pip install --require-hashes -r docs/requirements.txt
|
||||
python docs/ci/check_mkdocs_config.py
|
||||
- name: Check Links
|
||||
uses: gaurav-nelson/github-action-markdown-link-check@5c5dfc0ac2e225883c0e5f03a85311ec2830d368 # pin@v1
|
||||
with:
|
||||
folder-path: docs
|
||||
config-file: docs/mlc_config.json
|
||||
check-modified-files-only: "yes"
|
||||
use-quiet-mode: "yes"
|
||||
|
||||
schema:
|
||||
name: Tests - API Schema Documentation
|
||||
runs-on: ubuntu-24.04
|
||||
needs: paths-filter
|
||||
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
|
||||
env:
|
||||
wrapper_name: inventree-python
|
||||
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
|
||||
INVENTREE_DB_NAME: ../inventree_unit_test_db.sqlite3
|
||||
INVENTREE_ADMIN_USER: testuser
|
||||
@@ -139,244 +112,96 @@ jobs:
|
||||
INVENTREE_PYTHON_TEST_SERVER: http://localhost:12345
|
||||
INVENTREE_PYTHON_TEST_USERNAME: testuser
|
||||
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
|
||||
outputs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Environment Setup
|
||||
- uses: actions/checkout@v1
|
||||
- name: Enviroment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
apt-dependency: gettext poppler-utils
|
||||
dev-install: true
|
||||
update: true
|
||||
- name: Export API Documentation
|
||||
run: invoke dev.schema --ignore-warnings --filename src/backend/InvenTree/schema.yml
|
||||
- name: Upload schema
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # pin@v4.6.2
|
||||
with:
|
||||
name: schema.yml
|
||||
path: src/backend/InvenTree/schema.yml
|
||||
- name: Download public schema
|
||||
env:
|
||||
API: ${{ needs.paths-filter.outputs.api }}
|
||||
run: |
|
||||
pip install --require-hashes -r contrib/dev_reqs/requirements.txt >/dev/null 2>&1
|
||||
version="$(python3 .github/scripts/version_check.py --show-api-version --decrement-api=${API} 2>&1)"
|
||||
echo "API Version: $version"
|
||||
url="https://raw.githubusercontent.com/inventree/schema/main/export/${version}/api.yaml"
|
||||
echo "URL: $url"
|
||||
code=$(curl -s -o api.yaml $url --write-out '%{http_code}' --silent)
|
||||
if [ "$code" != "200" ]; then
|
||||
exit 1
|
||||
fi
|
||||
echo "Downloaded api.yaml"
|
||||
- name: Running OpenAPI Spec diff action
|
||||
id: breaking_changes
|
||||
uses: oasdiff/oasdiff-action/diff@1c611ffb1253a72924624aa4fb662e302b3565d3 # pin@main
|
||||
with:
|
||||
base: "api.yaml"
|
||||
revision: "src/backend/InvenTree/schema.yml"
|
||||
format: "html"
|
||||
- name: Echoing diff to step
|
||||
continue-on-error: true
|
||||
env:
|
||||
DIFF: ${{ steps.breaking_changes.outputs.diff }}
|
||||
run: echo "${DIFF}" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Check for differences in API Schema
|
||||
if: needs.paths-filter.outputs.api == 'false'
|
||||
run: |
|
||||
diff --color -u src/backend/InvenTree/schema.yml api.yaml
|
||||
diff -u src/backend/InvenTree/schema.yml api.yaml && echo "no difference in API schema " || exit 2
|
||||
- name: Check schema - including warnings
|
||||
run: invoke dev.schema
|
||||
- name: Extract version for publishing
|
||||
id: version
|
||||
if: github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true'
|
||||
run: |
|
||||
pip install --require-hashes -r contrib/dev_reqs/requirements.txt >/dev/null 2>&1
|
||||
version="$(python3 .github/scripts/version_check.py --show-api-version 2>&1)"
|
||||
echo "API Version: $version"
|
||||
echo "version=$version" >> "$GITHUB_OUTPUT"
|
||||
- name: Extract settings / tags
|
||||
run: invoke int.export-definitions --basedir docs
|
||||
- name: Upload settings
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # pin@v4.6.2
|
||||
with:
|
||||
name: inventree_settings.json
|
||||
path: docs/generated/inventree_settings.json
|
||||
- name: Upload tags
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # pin@v4.6.2
|
||||
with:
|
||||
name: inventree_tags.yml
|
||||
path: docs/generated/inventree_tags.yml
|
||||
- name: Upload filters
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # pin@v4.6.2
|
||||
with:
|
||||
name: inventree_filters.yml
|
||||
path: docs/generated/inventree_filters.yml
|
||||
|
||||
schema-push:
|
||||
name: Push new schema
|
||||
runs-on: ubuntu-24.04
|
||||
needs: [paths-filter, schema]
|
||||
if: needs.schema.result == 'success' && github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true' && github.repository_owner == 'inventree'
|
||||
env:
|
||||
version: ${{ needs.schema.outputs.version }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
name: Checkout Code
|
||||
with:
|
||||
repository: inventree/schema
|
||||
token: ${{ secrets.SCHEMA_PAT }}
|
||||
persist-credentials: true
|
||||
- name: Create artifact directory
|
||||
run: mkdir -p artifact
|
||||
- name: Download schema artifact
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # pin@v5.0.0
|
||||
with:
|
||||
path: artifact
|
||||
merge-multiple: true
|
||||
- name: Move files to correct location
|
||||
run: |
|
||||
echo "Version: ${version}"
|
||||
echo "before move"
|
||||
ls -la artifact
|
||||
mkdir export/${version}
|
||||
mv artifact/schema.yml export/${version}/api.yaml
|
||||
mv artifact/inventree_settings.json export/${version}/inventree_settings.json
|
||||
mv artifact/inventree_tags.yml export/${version}/inventree_tags.yml
|
||||
mv artifact/inventree_filters.yml export/${version}/inventree_filters.yml
|
||||
echo "after move"
|
||||
ls -la artifact
|
||||
rm -rf artifact
|
||||
- uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 # pin@v6.0.1
|
||||
name: Commit schema changes
|
||||
with:
|
||||
commit_message: "Update API schema for ${{ env.version }} / ${{ github.sha }}"
|
||||
|
||||
python:
|
||||
name: Tests - inventree-python
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
needs: ["pre-commit", "paths-filter"]
|
||||
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
|
||||
|
||||
env:
|
||||
WRAPPER_NAME: inventree-python
|
||||
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
|
||||
INVENTREE_DB_NAME: ../inventree_unit_test_db.sqlite3
|
||||
INVENTREE_ADMIN_USER: testuser
|
||||
INVENTREE_ADMIN_PASSWORD: testpassword
|
||||
INVENTREE_ADMIN_EMAIL: test@test.com
|
||||
INVENTREE_PYTHON_TEST_SERVER: http://127.0.0.1:12345
|
||||
INVENTREE_PYTHON_TEST_USERNAME: testuser
|
||||
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
|
||||
INVENTREE_SITE_URL: http://127.0.0.1:12345
|
||||
INVENTREE_DEBUG: true
|
||||
INVENTREE_LOG_LEVEL: WARNING
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: true
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
apt-dependency: gettext poppler-utils
|
||||
dev-install: true
|
||||
update: true
|
||||
- name: Download Python Code For `${WRAPPER_NAME}`
|
||||
run: git clone --depth 1 https://github.com/inventree/${WRAPPER_NAME} ./${WRAPPER_NAME}
|
||||
- name: Download Python Code For `${{ env.wrapper_name }}`
|
||||
run: git clone --depth 1 https://github.com/inventree/${{ env.wrapper_name }} ./${{ env.wrapper_name }}
|
||||
- name: Start InvenTree Server
|
||||
run: |
|
||||
invoke dev.delete-data -f
|
||||
invoke dev.import-fixtures
|
||||
invoke dev.server -a 127.0.0.1:12345 &
|
||||
invoke delete-data -f
|
||||
invoke import-fixtures
|
||||
invoke server -a 127.0.0.1:12345 &
|
||||
invoke wait
|
||||
- name: Run Tests For `${WRAPPER_NAME}`
|
||||
- name: Run Tests For `${{ env.wrapper_name }}`
|
||||
run: |
|
||||
cd ${WRAPPER_NAME}
|
||||
cd ${{ env.wrapper_name }}
|
||||
invoke check-server
|
||||
coverage run -m unittest discover -s test/
|
||||
|
||||
coverage:
|
||||
name: Tests - DB [SQLite] + Coverage ${{ matrix.python_version }}
|
||||
runs-on: ubuntu-24.04
|
||||
docstyle:
|
||||
name: Style [Python Docstrings]
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: ["pre-commit", "paths-filter"]
|
||||
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
|
||||
continue-on-error: true # continue if a step fails so that coverage gets pushed
|
||||
strategy:
|
||||
matrix:
|
||||
python_version: [3.9]
|
||||
# python_version: [3.9, 3.12] # Disabled due to requirement issues
|
||||
needs: pre-commit
|
||||
continue-on-error: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Enviroment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
install: true
|
||||
- name: Run flake8
|
||||
run: flake8 InvenTree --statistics
|
||||
|
||||
coverage:
|
||||
name: Tests - DB [SQLite] + Coverage
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: ['javascript', 'html', 'pre-commit']
|
||||
continue-on-error: true # continue if a step fails so that coverage gets pushed
|
||||
|
||||
env:
|
||||
INVENTREE_DB_NAME: ./inventree.sqlite
|
||||
INVENTREE_DB_ENGINE: sqlite3
|
||||
INVENTREE_PLUGINS_ENABLED: true
|
||||
INVENTREE_CONSOLE_LOG: false
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
python_version: ${{ matrix.python_version }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Environment Setup
|
||||
- uses: actions/checkout@v1
|
||||
- name: Enviroment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
apt-dependency: gettext poppler-utils
|
||||
dev-install: true
|
||||
update: true
|
||||
- name: Coverage Tests
|
||||
run: invoke coverage
|
||||
- name: Data Export Test
|
||||
uses: ./.github/actions/migration
|
||||
- name: Test Translations
|
||||
run: invoke dev.translate
|
||||
run: invoke translate
|
||||
- name: Check Migration Files
|
||||
run: python3 .github/scripts/check_migration_files.py
|
||||
- name: Coverage Tests
|
||||
run: invoke dev.test --check --coverage --translations
|
||||
- name: Upload raw coverage to artifacts
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # pin@v4.6.2
|
||||
with:
|
||||
name: coverage
|
||||
path: .coverage
|
||||
retention-days: 14
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@fdcc8476540edceab3de004e990f80d881c6cc00 # pin@v5.5.0
|
||||
if: always()
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
slug: inventree/InvenTree
|
||||
flags: backend
|
||||
run: python3 ci/check_migration_files.py
|
||||
- name: Upload Coverage Report
|
||||
run: coveralls
|
||||
|
||||
postgres:
|
||||
name: Tests - DB [PostgreSQL]
|
||||
runs-on: ubuntu-24.04
|
||||
needs: ["pre-commit", "paths-filter"]
|
||||
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: ['javascript', 'html', 'pre-commit']
|
||||
if: github.event_name == 'push'
|
||||
|
||||
env:
|
||||
INVENTREE_DB_ENGINE: django.db.backends.postgresql
|
||||
INVENTREE_DB_USER: inventree
|
||||
INVENTREE_DB_PASSWORD: password
|
||||
INVENTREE_DB_HOST: "127.0.0.1"
|
||||
INVENTREE_DB_HOST: '127.0.0.1'
|
||||
INVENTREE_DB_PORT: 5432
|
||||
INVENTREE_DEBUG: true
|
||||
INVENTREE_LOG_LEVEL: INFO
|
||||
INVENTREE_CONSOLE_LOG: false
|
||||
INVENTREE_DEBUG: info
|
||||
INVENTREE_CACHE_HOST: localhost
|
||||
INVENTREE_PLUGINS_ENABLED: true
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:14
|
||||
image: postgres
|
||||
env:
|
||||
POSTGRES_USER: inventree
|
||||
POSTGRES_PASSWORD: password
|
||||
@@ -389,38 +214,34 @@ jobs:
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Environment Setup
|
||||
- uses: actions/checkout@v1
|
||||
- name: Enviroment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
apt-dependency: gettext poppler-utils libpq-dev
|
||||
pip-dependency: psycopg django-redis>=5.0.0
|
||||
pip-dependency: psycopg2 django-redis>=5.0.0
|
||||
dev-install: true
|
||||
update: true
|
||||
- name: Run Tests
|
||||
run: invoke dev.test --check --translations
|
||||
run: invoke test
|
||||
- name: Data Export Test
|
||||
uses: ./.github/actions/migration
|
||||
|
||||
mysql:
|
||||
name: Tests - DB [MySQL]
|
||||
runs-on: ubuntu-24.04
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
needs: ["pre-commit", "paths-filter"]
|
||||
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
|
||||
needs: ['javascript', 'html', 'pre-commit']
|
||||
if: github.event_name == 'push'
|
||||
|
||||
env:
|
||||
# Database backend configuration
|
||||
INVENTREE_DB_ENGINE: django.db.backends.mysql
|
||||
INVENTREE_DB_USER: root
|
||||
INVENTREE_DB_PASSWORD: password
|
||||
INVENTREE_DB_HOST: "127.0.0.1"
|
||||
INVENTREE_DB_HOST: '127.0.0.1'
|
||||
INVENTREE_DB_PORT: 3306
|
||||
INVENTREE_DEBUG: true
|
||||
INVENTREE_LOG_LEVEL: WARNING
|
||||
INVENTREE_CONSOLE_LOG: false
|
||||
INVENTREE_DEBUG: info
|
||||
INVENTREE_PLUGINS_ENABLED: true
|
||||
|
||||
services:
|
||||
@@ -437,10 +258,8 @@ jobs:
|
||||
- 3306:3306
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Environment Setup
|
||||
- uses: actions/checkout@v1
|
||||
- name: Enviroment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
apt-dependency: gettext poppler-utils libmysqlclient-dev
|
||||
@@ -448,243 +267,6 @@ jobs:
|
||||
dev-install: true
|
||||
update: true
|
||||
- name: Run Tests
|
||||
run: invoke dev.test --check --translations
|
||||
run: invoke test
|
||||
- name: Data Export Test
|
||||
uses: ./.github/actions/migration
|
||||
|
||||
migration-tests:
|
||||
name: Tests - Migrations [PostgreSQL]
|
||||
runs-on: ubuntu-latest
|
||||
needs: paths-filter
|
||||
if: ${{ (needs.paths-filter.outputs.force == 'true') || (github.ref == 'refs/heads/master' && needs.paths-filter.outputs.migrations == 'true') }}
|
||||
|
||||
env:
|
||||
INVENTREE_DB_ENGINE: django.db.backends.postgresql
|
||||
INVENTREE_DB_NAME: inventree
|
||||
INVENTREE_DB_USER: inventree
|
||||
INVENTREE_DB_PASSWORD: password
|
||||
INVENTREE_DB_HOST: "127.0.0.1"
|
||||
INVENTREE_DB_PORT: 5432
|
||||
INVENTREE_DEBUG: False
|
||||
INVENTREE_LOG_LEVEL: WARNING
|
||||
INVENTREE_PLUGINS_ENABLED: false
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:14
|
||||
env:
|
||||
POSTGRES_USER: inventree
|
||||
POSTGRES_PASSWORD: password
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
apt-dependency: gettext poppler-utils libpq-dev
|
||||
pip-dependency: psycopg
|
||||
dev-install: true
|
||||
update: true
|
||||
- name: Run Tests
|
||||
run: invoke dev.test --check --migrations --report --coverage --translations
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@fdcc8476540edceab3de004e990f80d881c6cc00 # pin@v5.5.0
|
||||
if: always()
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
slug: inventree/InvenTree
|
||||
flags: migrations
|
||||
|
||||
migrations-checks:
|
||||
name: Tests - Full Migration [SQLite]
|
||||
runs-on: ubuntu-latest
|
||||
needs: paths-filter
|
||||
if: ${{ (needs.paths-filter.outputs.force == 'true') || (github.ref == 'refs/heads/master' && needs.paths-filter.outputs.migrations == 'true') }}
|
||||
|
||||
env:
|
||||
INVENTREE_DB_ENGINE: sqlite3
|
||||
INVENTREE_DB_NAME: /home/runner/work/InvenTree/db.sqlite3
|
||||
INVENTREE_DEBUG: true
|
||||
INVENTREE_LOG_LEVEL: WARNING
|
||||
INVENTREE_PLUGINS_ENABLED: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
name: Checkout Code
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
install: true
|
||||
- name: Fetch Database
|
||||
run: git clone --depth 1 https://github.com/inventree/test-db ./test-db
|
||||
|
||||
- name: 0.10.0 Database
|
||||
run: |
|
||||
rm /home/runner/work/InvenTree/db.sqlite3
|
||||
cp test-db/stable_0.10.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
|
||||
chmod +rw /home/runner/work/InvenTree/db.sqlite3
|
||||
invoke migrate
|
||||
|
||||
- name: 0.11.0 Database
|
||||
run: |
|
||||
rm /home/runner/work/InvenTree/db.sqlite3
|
||||
cp test-db/stable_0.11.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
|
||||
chmod +rw /home/runner/work/InvenTree/db.sqlite3
|
||||
invoke migrate
|
||||
|
||||
- name: 0.13.5 Database
|
||||
run: |
|
||||
rm /home/runner/work/InvenTree/db.sqlite3
|
||||
cp test-db/stable_0.13.5.sqlite3 /home/runner/work/InvenTree/db.sqlite3
|
||||
chmod +rw /home/runner/work/InvenTree/db.sqlite3
|
||||
invoke migrate
|
||||
|
||||
- name: 0.16.0 Database
|
||||
run: |
|
||||
rm /home/runner/work/InvenTree/db.sqlite3
|
||||
cp test-db/stable_0.16.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
|
||||
chmod +rw /home/runner/work/InvenTree/db.sqlite3
|
||||
invoke migrate
|
||||
|
||||
- name: 0.17.0 Database
|
||||
run: |
|
||||
rm /home/runner/work/InvenTree/db.sqlite3
|
||||
cp test-db/stable_0.17.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
|
||||
chmod +rw /home/runner/work/InvenTree/db.sqlite3
|
||||
invoke migrate
|
||||
|
||||
web_ui:
|
||||
name: Tests - Web UI
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 60
|
||||
needs: ["pre-commit", "paths-filter"]
|
||||
if: needs.paths-filter.outputs.frontend == 'true' || needs.paths-filter.outputs.force == 'true'
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
env:
|
||||
POSTGRES_DB: inventree
|
||||
POSTGRES_USER: inventree_user
|
||||
POSTGRES_PASSWORD: inventree_password
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
--health-cmd "pg_isready -U testuser"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
env:
|
||||
INVENTREE_DB_ENGINE: postgresql
|
||||
INVENTREE_DB_NAME: inventree
|
||||
INVENTREE_DB_HOST: "127.0.0.1"
|
||||
INVENTREE_DB_PORT: 5432
|
||||
INVENTREE_DB_USER: inventree_user
|
||||
INVENTREE_DB_PASSWORD: inventree_password
|
||||
INVENTREE_DEBUG: true
|
||||
INVENTREE_PLUGINS_ENABLED: false
|
||||
VITE_COVERAGE_BUILD: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
npm: true
|
||||
install: true
|
||||
update: true
|
||||
apt-dependency: postgresql-client libpq-dev
|
||||
pip-dependency: psycopg2
|
||||
- name: Set up test data
|
||||
run: |
|
||||
invoke dev.setup-test -iv
|
||||
invoke int.rebuild-thumbnails
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
invoke int.frontend-compile --extract
|
||||
cd src/frontend && npx playwright install --with-deps
|
||||
- name: Run Playwright tests
|
||||
id: tests
|
||||
run: cd src/frontend && npx nyc playwright test
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # pin@v4.6.2
|
||||
if: ${{ !cancelled() && steps.tests.outcome == 'failure' }}
|
||||
with:
|
||||
name: playwright-report
|
||||
path: src/frontend/playwright-report/
|
||||
retention-days: 14
|
||||
- name: Report coverage
|
||||
run: cd src/frontend && npx nyc report --report-dir ./coverage --temp-dir .nyc_output --reporter=lcov --exclude-after-remap false
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@fdcc8476540edceab3de004e990f80d881c6cc00 # pin@v5.5.0
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
slug: inventree/InvenTree
|
||||
flags: web
|
||||
- name: Upload bundler info
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
run: |
|
||||
cd src/frontend
|
||||
yarn install
|
||||
yarn run build
|
||||
|
||||
web_ui_build:
|
||||
name: Build - Web UI
|
||||
runs-on: ubuntu-24.04
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
npm: true
|
||||
- name: Install dependencies
|
||||
run: cd src/frontend && yarn install
|
||||
- name: Build frontend
|
||||
run: cd src/frontend && yarn run compile && yarn run build
|
||||
- name: Write version file - SHA
|
||||
run: cd src/backend/InvenTree/web/static/web/.vite && echo "$GITHUB_SHA" > sha.txt
|
||||
- name: Zip frontend
|
||||
run: |
|
||||
cd src/backend/InvenTree/web/static
|
||||
zip -r frontend-build.zip web/ web/.vite
|
||||
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # pin@v4.6.2
|
||||
with:
|
||||
name: frontend-build
|
||||
path: src/backend/InvenTree/web/static/web
|
||||
include-hidden-files: true
|
||||
|
||||
zizmor:
|
||||
name: Security [Zizmor]
|
||||
runs-on: ubuntu-24.04
|
||||
needs: ["pre-commit", "paths-filter"]
|
||||
if: needs.paths-filter.outputs.cicd == 'true' || needs.paths-filter.outputs.force == 'true'
|
||||
|
||||
permissions:
|
||||
security-events: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: hynek/setup-cached-uv@757bedc3f972eb7227a1aa657651f15a8527c817 # pin@v2
|
||||
- name: Run zizmor
|
||||
run: uvx zizmor --format sarif . > results.sarif
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Upload SARIF file
|
||||
uses: github/codeql-action/upload-sarif@2d92b76c45b91eb80fc44c74ce3fce0ee94e8f9d # pin@v3
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
category: zizmor
|
||||
|
||||
136
.github/workflows/release.yaml
vendored
136
.github/workflows/release.yaml
vendored
@@ -1,136 +0,0 @@
|
||||
# Runs on releases
|
||||
|
||||
name: Publish release
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
python_version: 3.9
|
||||
|
||||
jobs:
|
||||
stable:
|
||||
runs-on: ubuntu-24.04
|
||||
name: Write release to stable branch
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Version Check
|
||||
run: |
|
||||
pip install --require-hashes -r contrib/dev_reqs/requirements.txt
|
||||
python3 .github/scripts/version_check.py
|
||||
- name: Push to Stable Branch
|
||||
uses: ad-m/github-push-action@d91a481090679876dfc4178fef17f286781251df # pin@v0.8.0
|
||||
if: env.stable_release == 'true'
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: stable
|
||||
force: true
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-24.04
|
||||
name: Build and attest frontend
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: write
|
||||
attestations: write
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
npm: true
|
||||
- name: Install dependencies
|
||||
run: cd src/frontend && yarn install
|
||||
- name: Build frontend
|
||||
run: cd src/frontend && npm run compile && npm run build
|
||||
- name: Create SBOM for frontend
|
||||
uses: anchore/sbom-action@da167eac915b4e86f08b264dbdbc867b61be6f0c # pin@v0
|
||||
with:
|
||||
artifact-name: frontend-build.spdx
|
||||
path: src/frontend
|
||||
- name: Write version file - SHA
|
||||
run: cd src/backend/InvenTree/web/static/web/.vite && echo "$GITHUB_SHA" > sha.txt
|
||||
- name: Write version file - TAG
|
||||
run: cd src/backend/InvenTree/web/static/web/.vite && echo "${REF_NAME}" > tag.txt
|
||||
env:
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
- name: Zip frontend
|
||||
run: |
|
||||
cd src/backend/InvenTree/web/static/web
|
||||
zip -r ../frontend-build.zip * .vite
|
||||
- name: Attest Build Provenance
|
||||
id: attest
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # pin@v1
|
||||
with:
|
||||
subject-path: "${{ github.workspace }}/src/backend/InvenTree/web/static/frontend-build.zip"
|
||||
|
||||
- name: Upload frontend
|
||||
uses: svenstaro/upload-release-action@81c65b7cd4de9b2570615ce3aad67a41de5b1a13 # pin@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
file: src/backend/InvenTree/web/static/frontend-build.zip
|
||||
asset_name: frontend-build.zip
|
||||
tag: ${{ github.ref }}
|
||||
overwrite: true
|
||||
- name: Upload Attestation
|
||||
uses: svenstaro/upload-release-action@81c65b7cd4de9b2570615ce3aad67a41de5b1a13 # pin@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
asset_name: frontend-build.intoto.jsonl
|
||||
file: ${{ steps.attest.outputs.bundle-path}}
|
||||
tag: ${{ github.ref }}
|
||||
overwrite: true
|
||||
|
||||
docs:
|
||||
runs-on: ubuntu-24.04
|
||||
name: Build and publish documentation
|
||||
permissions:
|
||||
contents: write
|
||||
env:
|
||||
INVENTREE_DB_ENGINE: sqlite3
|
||||
INVENTREE_DB_NAME: inventree
|
||||
INVENTREE_MEDIA_ROOT: /home/runner/work/InvenTree/test_inventree_media
|
||||
INVENTREE_STATIC_ROOT: /home/runner/work/InvenTree/test_inventree_static
|
||||
INVENTREE_BACKUP_DIR: /home/runner/work/InvenTree/test_inventree_backup
|
||||
INVENTREE_SITE_URL: http://localhost:8000
|
||||
INVENTREE_DEBUG: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
install: true
|
||||
npm: true
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install --require-hashes -r contrib/dev_reqs/requirements.txt
|
||||
pip install --require-hashes -r docs/requirements.txt
|
||||
- name: Build documentation
|
||||
run: |
|
||||
invoke build-docs --mkdocs
|
||||
- name: Zip build docs
|
||||
run: |
|
||||
cd docs/site
|
||||
zip -r docs-html.zip *
|
||||
- name: Publish documentation
|
||||
uses: svenstaro/upload-release-action@81c65b7cd4de9b2570615ce3aad67a41de5b1a13 # pin@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
file: docs/site/docs-html.zip
|
||||
asset_name: docs-html.zip
|
||||
tag: ${{ github.ref }}
|
||||
overwrite: true
|
||||
32
.github/workflows/release.yml
vendored
Normal file
32
.github/workflows/release.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# Runs on releases
|
||||
|
||||
name: Publish release notes
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
tweet:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: Eomm/why-don-t-you-tweet@v1
|
||||
with:
|
||||
tweet-message: "InvenTree release ${{ github.event.release.tag_name }} is out now! Release notes: ${{ github.event.release.html_url }} #opensource #inventree"
|
||||
env:
|
||||
TWITTER_CONSUMER_API_KEY: ${{ secrets.TWITTER_CONSUMER_API_KEY }}
|
||||
TWITTER_CONSUMER_API_SECRET: ${{ secrets.TWITTER_CONSUMER_API_SECRET }}
|
||||
TWITTER_ACCESS_TOKEN: ${{ secrets.TWITTER_ACCESS_TOKEN }}
|
||||
TWITTER_ACCESS_TOKEN_SECRET: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}
|
||||
|
||||
reddit:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: bluwy/release-for-reddit-action@v1
|
||||
with:
|
||||
username: ${{ secrets.REDDIT_USERNAME }}
|
||||
password: ${{ secrets.REDDIT_PASSWORD }}
|
||||
app-id: ${{ secrets.REDDIT_APP_ID }}
|
||||
app-secret: ${{ secrets.REDDIT_APP_SECRET }}
|
||||
subreddit: InvenTree
|
||||
title: "InvenTree version ${{ github.event.release.tag_name }} released"
|
||||
comment: "${{ github.event.release.body }}"
|
||||
72
.github/workflows/scorecard.yaml
vendored
72
.github/workflows/scorecard.yaml
vendored
@@ -1,72 +0,0 @@
|
||||
# This workflow uses actions that are not certified by GitHub. They are provided
|
||||
# by a third-party and are governed by separate terms of service, privacy
|
||||
# policy, and support documentation.
|
||||
|
||||
name: Scorecard supply-chain security
|
||||
on:
|
||||
# For Branch-Protection check. Only the default branch is supported. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
|
||||
branch_protection_rule:
|
||||
# To guarantee Maintained check is occasionally updated. See
|
||||
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
|
||||
schedule:
|
||||
- cron: "32 0 * * 0"
|
||||
push:
|
||||
branches: ["master"]
|
||||
|
||||
# Declare default permissions as read only.
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
analysis:
|
||||
name: Scorecard analysis
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
# Needed to upload the results to code-scanning dashboard.
|
||||
security-events: write
|
||||
# Needed to publish results and get a badge (see publish_results below).
|
||||
id-token: write
|
||||
# Uncomment the permissions below if installing in a private repository.
|
||||
# contents: read
|
||||
# actions: read
|
||||
|
||||
steps:
|
||||
- name: "Checkout code"
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: "Run analysis"
|
||||
uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2
|
||||
with:
|
||||
results_file: results.sarif
|
||||
results_format: sarif
|
||||
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
|
||||
# - you want to enable the Branch-Protection check on a *public* repository, or
|
||||
# - you are installing Scorecard on a *private* repository
|
||||
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
|
||||
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
|
||||
|
||||
# Public repositories:
|
||||
# - Publish results to OpenSSF REST API for easy access by consumers
|
||||
# - Allows the repository to include the Scorecard badge.
|
||||
# - See https://github.com/ossf/scorecard-action#publishing-results.
|
||||
# For private repositories:
|
||||
# - `publish_results` will always be set to `false`, regardless
|
||||
# of the value entered here.
|
||||
publish_results: true
|
||||
|
||||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||
# format to the repository Actions tab.
|
||||
- name: "Upload artifact"
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: SARIF file
|
||||
path: results.sarif
|
||||
retention-days: 5
|
||||
|
||||
# Upload the results to GitHub's code scanning dashboard.
|
||||
- name: "Upload to code-scanning"
|
||||
uses: github/codeql-action/upload-sarif@2d92b76c45b91eb80fc44c74ce3fce0ee94e8f9d # v3.30.0
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
27
.github/workflows/stale.yaml
vendored
27
.github/workflows/stale.yaml
vendored
@@ -1,27 +0,0 @@
|
||||
# Marks all issues that do not receive activity stale starting 2022
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "24 11 * * *"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # pin@v9.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: "This issue seems stale. Please react to show this is still important."
|
||||
stale-pr-message: "This PR seems stale. Please react to show this is still important."
|
||||
stale-issue-label: "inactive"
|
||||
stale-pr-label: "inactive"
|
||||
start-date: "2022-01-01"
|
||||
exempt-all-milestones: true
|
||||
25
.github/workflows/stale.yml
vendored
Normal file
25
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
# Marks all issues that do not receive activity stale starting 2022
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '24 11 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v3
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: 'This issue seems stale. Please react to show this is still important.'
|
||||
stale-pr-message: 'This PR seems stale. Please react to show this is still important.'
|
||||
stale-issue-label: 'inactive'
|
||||
stale-pr-label: 'inactive'
|
||||
start-date: '2022-01-01'
|
||||
exempt-all-milestones: true
|
||||
73
.github/workflows/translations.yaml
vendored
73
.github/workflows/translations.yaml
vendored
@@ -1,73 +0,0 @@
|
||||
name: Update Translation Files
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
env:
|
||||
python_version: 3.9
|
||||
node_version: 20
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
synchronize-with-crowdin:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INVENTREE_DB_NAME: "./test_db.sqlite"
|
||||
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
|
||||
INVENTREE_DEBUG: true
|
||||
INVENTREE_LOG_LEVEL: INFO
|
||||
INVENTREE_MEDIA_ROOT: ./media
|
||||
INVENTREE_STATIC_ROOT: ./static
|
||||
INVENTREE_BACKUP_DIR: ./backup
|
||||
INVENTREE_SITE_URL: http://localhost:8000
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # pin@v5.0.0
|
||||
with:
|
||||
persist-credentials: true
|
||||
- name: Environment Setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
install: true
|
||||
apt-dependency: gettext
|
||||
- name: Make Translations
|
||||
run: invoke dev.translate
|
||||
- name: Remove compiled static files
|
||||
run: rm -rf src/backend/InvenTree/static
|
||||
- name: Remove all local changes that are not *.po files
|
||||
run: |
|
||||
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git add src/backend/InvenTree/locale/en/LC_MESSAGES/django.po src/frontend/src/locales/en/messages.po
|
||||
echo "Adding commit (or ignoring if no changes)"
|
||||
git commit -m "add translations" || true
|
||||
echo "Removing all other changes"
|
||||
git reset --hard
|
||||
echo "Resetting to HEAD~"
|
||||
git reset HEAD~ || true
|
||||
- name: crowdin action
|
||||
uses: crowdin/github-action@9787f4fcb6a8450929673f1e8db841e8a5c35a2f # pin@v2
|
||||
with:
|
||||
upload_sources: true
|
||||
upload_translations: false
|
||||
download_translations: true
|
||||
localization_branch_name: l10_crowdin
|
||||
create_pull_request: true
|
||||
pull_request_title: 'New Crowdin updates'
|
||||
pull_request_body: 'New Crowdin translations by [Crowdin GH Action](https://github.com/crowdin/github-action)'
|
||||
pull_request_base_branch_name: 'master'
|
||||
pull_request_labels: 'translations'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
|
||||
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
|
||||
49
.github/workflows/translations.yml
vendored
Normal file
49
.github/workflows/translations.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Update Translation Files
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INVENTREE_DB_NAME: './test_db.sqlite'
|
||||
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
|
||||
INVENTREE_DEBUG: info
|
||||
INVENTREE_MEDIA_ROOT: ./media
|
||||
INVENTREE_STATIC_ROOT: ./static
|
||||
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gettext
|
||||
pip3 install invoke
|
||||
invoke install
|
||||
- name: Make Translations
|
||||
run: |
|
||||
invoke translate
|
||||
- name: Commit files
|
||||
run: |
|
||||
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git checkout -b l10_local
|
||||
git add "*.po"
|
||||
git commit -m "updated translation base"
|
||||
- name: Push changes
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: l10
|
||||
force: true
|
||||
22
.github/workflows/update.yml
vendored
Normal file
22
.github/workflows/update.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
name: Update dependency files regularly
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup
|
||||
run: pip install -r requirements-dev.txt
|
||||
- name: Update requirements.txt
|
||||
run: pip-compile --output-file=requirements.txt requirements.in -U
|
||||
- name: Update requirements-dev.txt
|
||||
run: pip-compile --generate-hashes --output-file=requirements-dev.txt requirements-dev.in -U
|
||||
- uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
commit_message: "[Bot] Updated dependency"
|
||||
branch: dep-update
|
||||
24
.github/workflows/update.yml.disabled
vendored
24
.github/workflows/update.yml.disabled
vendored
@@ -1,24 +0,0 @@
|
||||
name: Update dependency files regularly
|
||||
|
||||
on:
|
||||
workflow_dispatch: null
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # pin@v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Setup
|
||||
run: pip install --require-hashes -r requirements-dev.txt
|
||||
- name: Update requirements.txt
|
||||
run: pip-compile --output-file=requirements.txt requirements.in -U
|
||||
- name: Update requirements-dev.txt
|
||||
run: pip-compile --generate-hashes --output-file=requirements-dev.txt requirements-dev.in -U
|
||||
- uses: stefanzweifel/git-auto-commit-action@fd157da78fa13d9383e5580d1fd1184d89554b51 # pin@v4.15.1
|
||||
with:
|
||||
commit_message: "[Bot] Updated dependency"
|
||||
branch: dep-update
|
||||
25
.github/workflows/welcome.yml
vendored
Normal file
25
.github/workflows/welcome.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
# welcome new contributers
|
||||
name: Welcome
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/first-interaction@v1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-message: |
|
||||
Welcome to InvenTree! Please check the [contributing docs](https://inventree.readthedocs.io/en/latest/contribute/) on how to help.
|
||||
If you experience setup / install issues please read all [install docs]( https://inventree.readthedocs.io/en/latest/start/intro/).
|
||||
pr-message: |
|
||||
This is your first PR, welcome!
|
||||
Please check [Contributing](https://github.com/inventree/InvenTree/blob/master/CONTRIBUTING.md) to make sure your submission fits our general code-style and workflow.
|
||||
Make sure to document why this PR is needed and to link connected issues so we can review it faster.
|
||||
46
.gitignore
vendored
46
.gitignore
vendored
@@ -7,7 +7,6 @@ __pycache__/
|
||||
.Python
|
||||
env/
|
||||
inventree-env/
|
||||
.venv/
|
||||
./build/
|
||||
.cache/
|
||||
develop-eggs/
|
||||
@@ -19,6 +18,7 @@ share/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
@@ -26,12 +26,10 @@ var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
*.DS_Store
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
*.sqlite
|
||||
*.sqlite3
|
||||
*.sqlite3-journal
|
||||
*.backup
|
||||
@@ -40,36 +38,36 @@ local_settings.py
|
||||
# Files used for testing
|
||||
inventree-demo-dataset/
|
||||
inventree-data/
|
||||
dummy_image.*
|
||||
_tmp.csv
|
||||
inventree/label.pdf
|
||||
inventree/label.png
|
||||
|
||||
# Sphinx files
|
||||
docs/_build
|
||||
|
||||
# Local static and media file storage (only when running in development mode)
|
||||
inventree_media
|
||||
inventree_static
|
||||
static_i18n
|
||||
|
||||
# Local config files
|
||||
# Local config file
|
||||
config.yaml
|
||||
plugins.txt
|
||||
secret_key.txt
|
||||
oidc.pem
|
||||
|
||||
# Default data file
|
||||
data.json
|
||||
*.json.tmp
|
||||
*.tmp.json
|
||||
|
||||
# Key file
|
||||
secret_key.txt
|
||||
|
||||
# IDE / development files
|
||||
.idea/
|
||||
*.code-workspace
|
||||
.vscode/
|
||||
.bash_history
|
||||
.DS_Store
|
||||
|
||||
# https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||
.vscode/*
|
||||
#!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
#!.vscode/extensions.json
|
||||
#!.vscode/*.code-snippets
|
||||
|
||||
# Coverage reports
|
||||
.coverage
|
||||
@@ -80,17 +78,11 @@ js_tmp/
|
||||
|
||||
# Development files
|
||||
dev/
|
||||
dev-db/
|
||||
data/
|
||||
env/
|
||||
|
||||
# Locale stats file
|
||||
src/backend/InvenTree/InvenTree/locale_stats.json
|
||||
src/backend/InvenTree/InvenTree/licenses.txt
|
||||
|
||||
# Logs
|
||||
src/backend/InvenTree/logs.json
|
||||
src/backend/InvenTree/logs.log
|
||||
locale_stats.json
|
||||
|
||||
# node.js
|
||||
node_modules/
|
||||
@@ -99,15 +91,7 @@ node_modules/
|
||||
maintenance_mode_state.txt
|
||||
|
||||
# plugin dev directory
|
||||
src/backend/InvenTree/plugins/
|
||||
plugins/
|
||||
|
||||
# Compiled translation files
|
||||
*.mo
|
||||
messages.ts
|
||||
|
||||
# Generated API schema file
|
||||
api.yaml
|
||||
|
||||
# web frontend (static files)
|
||||
src/backend/InvenTree/web/static
|
||||
InvenTree/web/static
|
||||
|
||||
41
.gitpod.yml
Normal file
41
.gitpod.yml
Normal file
@@ -0,0 +1,41 @@
|
||||
tasks:
|
||||
- name: Setup django
|
||||
before: |
|
||||
export INVENTREE_DB_ENGINE='sqlite3'
|
||||
export INVENTREE_DB_NAME='/workspace/InvenTree/dev/database.sqlite3'
|
||||
export INVENTREE_MEDIA_ROOT='/workspace/InvenTree/inventree-data/media'
|
||||
export INVENTREE_STATIC_ROOT='/workspace/InvenTree/dev/static'
|
||||
export PIP_USER='no'
|
||||
|
||||
sudo apt install gettext
|
||||
python3 -m venv venv
|
||||
source venv/bin/activate
|
||||
pip install invoke
|
||||
mkdir dev
|
||||
inv setup-test
|
||||
gp sync-done start_server
|
||||
|
||||
- name: Start server
|
||||
init: gp sync-await start_server
|
||||
command: |
|
||||
gp sync-await start_server
|
||||
export INVENTREE_DB_ENGINE='sqlite3'
|
||||
export INVENTREE_DB_NAME='/workspace/InvenTree/dev/database.sqlite3'
|
||||
export INVENTREE_MEDIA_ROOT='/workspace/InvenTree/inventree-data/media'
|
||||
export INVENTREE_STATIC_ROOT='/workspace/InvenTree/dev/static'
|
||||
|
||||
source venv/bin/activate
|
||||
inv server
|
||||
|
||||
ports:
|
||||
- port: 8000
|
||||
onOpen: open-preview
|
||||
|
||||
github:
|
||||
prebuilds:
|
||||
master: true
|
||||
pullRequests: false
|
||||
pullRequestsFromForks: true
|
||||
addBadge: true
|
||||
addLabel: gitpod-ready
|
||||
addCheck: false
|
||||
41
.pkgr.yml
41
.pkgr.yml
@@ -1,41 +0,0 @@
|
||||
name: inventree
|
||||
description: Open Source Inventory Management System
|
||||
homepage: https://inventree.org
|
||||
notifications: true
|
||||
buildpack: https://github.com/mjmair/heroku-buildpack-python#v216-mjmair
|
||||
env:
|
||||
- STACK=heroku-20
|
||||
- DISABLE_COLLECTSTATIC=1
|
||||
- INVENTREE_DB_ENGINE=sqlite3
|
||||
- INVENTREE_DB_NAME=database.sqlite3
|
||||
- INVENTREE_PLUGINS_ENABLED
|
||||
- INVENTREE_MEDIA_ROOT=/opt/inventree/media
|
||||
- INVENTREE_STATIC_ROOT=/opt/inventree/static
|
||||
- INVENTREE_BACKUP_DIR=/opt/inventree/backup
|
||||
- INVENTREE_PLUGIN_FILE=/opt/inventree/plugins.txt
|
||||
- INVENTREE_CONFIG_FILE=/opt/inventree/config.yaml
|
||||
- APP_REPO=inventree/InvenTree
|
||||
before_install: contrib/packager.io/preinstall.sh
|
||||
after_install: contrib/packager.io/postinstall.sh
|
||||
before_remove: contrib/packager.io/preinstall.sh
|
||||
before:
|
||||
- contrib/packager.io/before.sh
|
||||
dependencies:
|
||||
- curl
|
||||
- "python3.9 | python3.10 | python3.11"
|
||||
- "python3.9-venv | python3.10-venv | python3.11-venv"
|
||||
- "python3.9-dev | python3.10-dev | python3.11-dev"
|
||||
- python3-pip
|
||||
- python3-cffi
|
||||
- python3-brotli
|
||||
- python3-wheel
|
||||
- libpango-1.0-0
|
||||
- libharfbuzz0b
|
||||
- libpangoft2-1.0-0
|
||||
- gettext
|
||||
- nginx
|
||||
- jq
|
||||
- "libffi7 | libffi8"
|
||||
targets:
|
||||
ubuntu-20.04: true
|
||||
debian-11: true
|
||||
@@ -2,90 +2,40 @@
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
exclude: |
|
||||
(?x)^(
|
||||
src/backend/InvenTree/InvenTree/static/.*|
|
||||
src/backend/InvenTree/locale/.*|
|
||||
src/frontend/src/locales/.* |
|
||||
.*/migrations/.* |
|
||||
src/frontend/yarn.lock
|
||||
InvenTree/InvenTree/static/.*|
|
||||
InvenTree/locale/.*
|
||||
)$
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
rev: v4.3.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
exclude: mkdocs.yml
|
||||
- id: check-added-large-files
|
||||
- id: mixed-line-ending
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.13
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: '4.0.1'
|
||||
hooks:
|
||||
- id: ruff-format
|
||||
args: [--preview]
|
||||
- id: ruff
|
||||
args: [
|
||||
--fix,
|
||||
# --unsafe-fixes,
|
||||
--preview
|
||||
]
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
rev: 0.7.12
|
||||
- id: flake8
|
||||
additional_dependencies: [
|
||||
'flake8-bugbear',
|
||||
'flake8-docstrings',
|
||||
'flake8-string-format',
|
||||
'pep8-naming ',
|
||||
]
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: '5.10.1'
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/jazzband/pip-tools
|
||||
rev: 6.6.2
|
||||
hooks:
|
||||
- id: pip-compile
|
||||
name: pip-compile requirements-dev.in
|
||||
args: [src/backend/requirements-dev.in, -o, src/backend/requirements-dev.txt, --no-strip-extras, --generate-hashes]
|
||||
files: src/backend/requirements-dev\.(in|txt)$
|
||||
args: [--generate-hashes, requirements-dev.in, -o, requirements-dev.txt]
|
||||
files: ^requirements-dev\.(in|txt)$
|
||||
- id: pip-compile
|
||||
name: pip-compile requirements.txt
|
||||
args: [src/backend/requirements.in, -o, src/backend/requirements.txt, --no-strip-extras, --generate-hashes]
|
||||
files: src/backend/requirements\.(in|txt)$
|
||||
- id: pip-compile
|
||||
name: pip-compile requirements.txt
|
||||
args: [contrib/dev_reqs/requirements.in, -o, contrib/dev_reqs/requirements.txt, --no-strip-extras, --generate-hashes, -b, src/backend/requirements.txt]
|
||||
files: contrib/dev_reqs/requirements\.(in|txt)$
|
||||
- id: pip-compile
|
||||
name: pip-compile requirements.txt
|
||||
args: [docs/requirements.in, -o, docs/requirements.txt, --no-strip-extras, --generate-hashes, -b, src/backend/requirements.txt]
|
||||
files: docs/requirements\.(in|txt)$
|
||||
- id: pip-compile
|
||||
name: pip-compile requirements.txt
|
||||
args: [contrib/container/requirements.in, -o, contrib/container/requirements.txt, --python-version=3.11, --no-strip-extras, --generate-hashes, -b, src/backend/requirements.txt]
|
||||
files: contrib/container/requirements\.(in|txt)$
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.36.4
|
||||
hooks:
|
||||
- id: djlint-django
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.4.1
|
||||
hooks:
|
||||
- id: codespell
|
||||
additional_dependencies:
|
||||
- tomli
|
||||
exclude: >
|
||||
(?x)^(
|
||||
docs/docs/stylesheets/.*|
|
||||
docs/docs/javascripts/.*|
|
||||
docs/docs/webfonts/.* |
|
||||
src/frontend/src/locales/.* |
|
||||
pyproject.toml |
|
||||
src/frontend/vite.config.ts |
|
||||
)$
|
||||
- repo: https://github.com/biomejs/pre-commit
|
||||
rev: v2.0.0-beta.5
|
||||
hooks:
|
||||
- id: biome-check
|
||||
additional_dependencies: ["@biomejs/biome@1.9.4"]
|
||||
files: ^src/frontend/.*\.(js|ts|tsx)$
|
||||
- repo: https://github.com/gitleaks/gitleaks
|
||||
rev: v8.27.2
|
||||
hooks:
|
||||
- id: gitleaks
|
||||
language_version: 1.23.6
|
||||
#- repo: https://github.com/jumanjihouse/pre-commit-hooks
|
||||
# rev: 3.0.0
|
||||
# hooks:
|
||||
# - id: shellcheck
|
||||
- repo: https://github.com/isidentical/teyit
|
||||
rev: 0.4.3
|
||||
hooks:
|
||||
- id: teyit
|
||||
args: [requirements.in, -o, requirements.txt]
|
||||
files: ^requirements\.(in|txt)$
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
3.9.2
|
||||
5
.vscode/extensions.json
vendored
5
.vscode/extensions.json
vendored
@@ -1,5 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"biomejs.biome"
|
||||
]
|
||||
}
|
||||
74
.vscode/launch.json
vendored
74
.vscode/launch.json
vendored
@@ -1,74 +0,0 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "InvenTree Server",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
|
||||
"args": [
|
||||
"runserver",
|
||||
// "0.0.0.0:8000", // expose server in network (useful for testing with mobile app)
|
||||
// "--noreload" // disable auto-reload
|
||||
],
|
||||
"django": true,
|
||||
"justMyCode": true
|
||||
},
|
||||
{
|
||||
"name": "InvenTree Server - Tests",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
|
||||
"args": [
|
||||
"test",
|
||||
// "part.test_api.PartCategoryAPITest", // run only a specific test
|
||||
],
|
||||
"django": true,
|
||||
"justMyCode": true
|
||||
},
|
||||
{
|
||||
"name": "InvenTree Server - 3rd party",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
|
||||
"args": [
|
||||
"runserver"
|
||||
],
|
||||
"django": true,
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "InvenTree invoke schema",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/.venv/lib/python3.9/site-packages/invoke/__main__.py",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [
|
||||
"dev.schema","--ignore-warnings"
|
||||
],
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "schema generation",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
|
||||
"args": [
|
||||
"schema",
|
||||
"--file","src/frontend/schema.yml"
|
||||
],
|
||||
"django": true,
|
||||
"justMyCode": false
|
||||
},
|
||||
{
|
||||
"name": "InvenTree Frontend - Vite",
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"url": "http://localhost:5173",
|
||||
"webRoot": "${workspaceFolder}/src/frontend"
|
||||
}
|
||||
]
|
||||
}
|
||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "biomejs.biome"
|
||||
},
|
||||
"editor.codeActionsOnSave": {
|
||||
"quickfix.biome": "explicit"
|
||||
}
|
||||
}
|
||||
70
.vscode/tasks.json
vendored
70
.vscode/tasks.json
vendored
@@ -1,70 +0,0 @@
|
||||
{
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=733558
|
||||
// for the documentation about the tasks.json format
|
||||
|
||||
// the problemMatchers should prevent vscode from asking how it should check the output
|
||||
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "worker",
|
||||
"type": "shell",
|
||||
"command": "invoke worker",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
"label": "clean-settings",
|
||||
"type": "shell",
|
||||
"command": "invoke int.clean-settings",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
"label": "delete-data",
|
||||
"type": "shell",
|
||||
"command": "invoke dev.delete-data",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
"label": "migrate",
|
||||
"type": "shell",
|
||||
"command": "invoke migrate",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
"label": "server",
|
||||
"type": "shell",
|
||||
"command": "invoke dev.server",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
"label": "setup-dev",
|
||||
"type": "shell",
|
||||
"command": "invoke dev.setup-dev",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
"label": "setup-test",
|
||||
"type": "shell",
|
||||
"command": "invoke dev.setup-test -i --path dev/inventree-demo-dataset",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
"label": "superuser",
|
||||
"type": "shell",
|
||||
"command": "invoke superuser",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
"label": "test",
|
||||
"type": "shell",
|
||||
"command": "invoke dev.test",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
{
|
||||
"label": "update",
|
||||
"type": "shell",
|
||||
"command": "invoke update",
|
||||
"problemMatcher": [],
|
||||
},
|
||||
]
|
||||
}
|
||||
33
CHANGELOG.md
33
CHANGELOG.md
@@ -1,33 +0,0 @@
|
||||
# Changelog
|
||||
|
||||
All notable changes to this project will be documented in this file (starting with 1.0.0).
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased] - yyyy-mm-dd (in UTC)
|
||||
|
||||
### Added
|
||||
|
||||
### Changed
|
||||
|
||||
- Changed site URL check to allow protocol mismatches if `INVENTREE_SITE_LAX_PROTOCOL` is set to `True` (default) in [#10454](https://github.com/inventree/InvenTree/pull/10454)
|
||||
|
||||
### Removed
|
||||
|
||||
|
||||
## [Unreleased - 1.0.0 ] - 2025-07-xx
|
||||
|
||||
The first "stable" release following semver but not extensively other than the previous releases. The use of 1.0 indicates the stability that users already expect from InvenTree.
|
||||
|
||||
An overarching theme of this release is the complete switch to a new UI framework and paradigm (PUI). The old templating based UI (CUI) is now removed. This makes major improvements in the security and portability of InvenTree possible.
|
||||
|
||||
Our blog holds [a few articles](https://inventree.org/blog/2024/09/23/ui-roadmap) on the topic. This journey started in [March 2022](https://github.com/inventree/InvenTree/issues/2789) and was announced [in 2023](https://inventree.org/blog/2023/08/28/react).
|
||||
|
||||
### Added
|
||||
|
||||
|
||||
### Changed
|
||||
|
||||
|
||||
### Removed
|
||||
@@ -1,128 +0,0 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
support AT inventree DOR org.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
236
CONTRIBUTING.md
236
CONTRIBUTING.md
@@ -1,50 +1,200 @@
|
||||
### Contributing to InvenTree
|
||||
Hi there, thank you for your intrest in contributing!
|
||||
Please read the contribution guidelines below, before submitting your first pull request to the InvenTree codebase.
|
||||
|
||||
Hi there, thank you for your interest in contributing!
|
||||
Please read our contribution guidelines, before submitting your first pull request to the InvenTree codebase.
|
||||
## Quickstart
|
||||
|
||||
### Project File Structure
|
||||
The following commands will get you quickly configure and run a development server, complete with a demo dataset to work with:
|
||||
|
||||
The InvenTree project is split into two main components: frontend and backend. This source is located in the `src` directory. All other files are used for project management, documentation, and testing.
|
||||
### Bare Metal
|
||||
|
||||
```bash
|
||||
InvenTree/
|
||||
├─ .devops/ # Files for Azure DevOps
|
||||
├─ .github/ # Files for GitHub
|
||||
│ ├─ actions/ # Reused actions
|
||||
│ ├─ ISSUE_TEMPLATE/ # Templates for issues and pull requests
|
||||
│ ├─ workflows/ # CI/CD flows
|
||||
│ ├─ scripts/ # CI scripts
|
||||
├─ .vscode/ # Settings for Visual Code IDE
|
||||
├─ assets/ # General project assets
|
||||
├─ contrib/ # Files needed for deployments
|
||||
│ ├─ container/ # Files related to building container images
|
||||
│ ├─ installer/ # Files needed to build single-file installer
|
||||
│ ├─ packager.io/ # Files needed for Debian/Ubuntu packages
|
||||
├─ docs/ # Directory for documentation / General helper files
|
||||
│ ├─ ci/ # CI for documentation
|
||||
│ ├─ docs/ # Source for documentation
|
||||
├─ src/ # Source for application
|
||||
│ ├─ backend/ # Directory for backend parts
|
||||
│ │ ├─ InvenTree/ # Source for backend
|
||||
│ │ ├─ requirements.txt # Dependencies for backend
|
||||
│ │ ├─ package.json # Dependencies for backend HTML linting
|
||||
│ ├─ frontend/ # Directory for frontend parts
|
||||
│ │ ├─ src/ # Source for frontend
|
||||
│ │ │ ├─ main.tsx # Entry point for frontend
|
||||
│ │ ├─ tests/ # Tests for frontend
|
||||
│ │ ├─ netlify.toml # Settings for frontend previews (Netlify)
|
||||
│ │ ├─ package.json # Dependencies for frontend
|
||||
│ │ ├─ playwright.config.ts # Settings for frontend tests
|
||||
│ │ ├─ tsconfig.json # Settings for frontend compilation
|
||||
├─ .pkgr.yml # Build definition for Debian/Ubuntu packages
|
||||
├─ .pre-commit-config.yaml # Code formatter/linter configuration
|
||||
├─ CONTRIBUTING.md # Contribution guidelines and overview
|
||||
├─ Procfile # Process definition for Debian/Ubuntu packages
|
||||
├─ README.md # General project information and overview
|
||||
├─ runtime.txt # Python runtime settings for Debian/Ubuntu packages build
|
||||
├─ SECURITY.md # Project security policy
|
||||
├─ tasks.py # Action definitions for development, testing and deployment
|
||||
git clone https://github.com/inventree/InvenTree.git && cd InvenTree
|
||||
python3 -m venv env && source env/bin/activate
|
||||
pip install invoke && invoke
|
||||
pip install invoke && invoke setup-dev --tests
|
||||
```
|
||||
|
||||
Refer to our [contribution guidelines](https://docs.inventree.org/en/latest/develop/contributing/) for more information!
|
||||
### Docker
|
||||
|
||||
```bash
|
||||
git clone https://github.com/inventree/InvenTree.git && cd InvenTree
|
||||
docker compose run inventree-dev-server invoke install
|
||||
docker compose run inventree-dev-server invoke setup-test
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
Read the [InvenTree setup documentation](https://inventree.readthedocs.io/en/latest/start/intro/) for a complete installation reference guide.
|
||||
|
||||
### Setup Devtools
|
||||
|
||||
Run the following command to set up all toolsets for development.
|
||||
|
||||
```bash
|
||||
invoke setup-dev
|
||||
```
|
||||
|
||||
*We recommend you run this command before starting to contribute. This will install and set up `pre-commit` to run some checks before each commit and help reduce the style errors.*
|
||||
|
||||
## Branches and Versioning
|
||||
|
||||
InvenTree roughly follow the [GitLab flow](https://docs.gitlab.com/ee/topics/gitlab_flow.html) branching style, to allow simple management of multiple tagged releases, short-lived branches, and development on the main branch.
|
||||
|
||||
### Version Numbering
|
||||
|
||||
InvenTree version numbering follows the [semantic versioning](https://semver.org/) specification.
|
||||
|
||||
### Master Branch
|
||||
|
||||
The HEAD of the "main" or "master" branch of InvenTree represents the current "latest" state of code development.
|
||||
|
||||
- All feature branches are merged into master
|
||||
- All bug fixes are merged into master
|
||||
|
||||
**No pushing to master:** New featues must be submitted as a pull request from a separate branch (one branch per feature).
|
||||
|
||||
### Feature Branches
|
||||
|
||||
Feature branches should be branched *from* the *master* branch.
|
||||
|
||||
- One major feature per branch / pull request
|
||||
- Feature pull requests are merged back *into* the master branch
|
||||
- Features *may* also be merged into a release candidate branch
|
||||
|
||||
### Stable Branch
|
||||
|
||||
The HEAD of the "stable" branch represents the latest stable release code.
|
||||
|
||||
- Versioned releases are merged into the "stable" branch
|
||||
- Bug fix branches are made *from* the "stable" branch
|
||||
|
||||
#### Release Candidate Branches
|
||||
|
||||
- Release candidate branches are made from master, and merged into stable.
|
||||
- RC branches are targetted at a major/minor version e.g. "0.5"
|
||||
- When a release candidate branch is merged into *stable*, the release is tagged
|
||||
|
||||
#### Bugfix Branches
|
||||
|
||||
- If a bug is discovered in a tagged release version of InvenTree, a "bugfix" or "hotfix" branch should be made *from* that tagged release
|
||||
- When approved, the branch is merged back *into* stable, with an incremented PATCH number (e.g. 0.4.1 -> 0.4.2)
|
||||
- The bugfix *must* also be cherry picked into the *master* branch.
|
||||
|
||||
## Environment
|
||||
### Target version
|
||||
We are currently targeting:
|
||||
| Name | Minimum version |
|
||||
|---|---|
|
||||
| Python | 3.9 |
|
||||
| Django | 3.2 |
|
||||
|
||||
### Auto creating updates
|
||||
The following tools can be used to auto-upgrade syntax that was depreciated in new versions:
|
||||
```bash
|
||||
pip install pyupgrade
|
||||
pip install django-upgrade
|
||||
```
|
||||
|
||||
To update the codebase run the following script.
|
||||
```bash
|
||||
pyupgrade `find . -name "*.py"`
|
||||
django-upgrade --target-version 3.2 `find . -name "*.py"`
|
||||
```
|
||||
|
||||
## Credits
|
||||
If you add any new dependencies / libraries, they need to be added to [the docs](https://github.com/inventree/inventree-docs/blob/master/docs/credits.md). Please try to do that as timely as possible.
|
||||
|
||||
|
||||
## Migration Files
|
||||
|
||||
Any required migration files **must** be included in the commit, or the pull-request will be rejected. If you change the underlying database schema, make sure you run `invoke migrate` and commit the migration files before submitting the PR.
|
||||
|
||||
*Note: A github action checks for unstaged migration files and will reject the PR if it finds any!*
|
||||
|
||||
## Unit Testing
|
||||
|
||||
Any new code should be covered by unit tests - a submitted PR may not be accepted if the code coverage for any new features is insufficient, or the overall code coverage is decreased.
|
||||
|
||||
The InvenTree code base makes use of [GitHub actions](https://github.com/features/actions) to run a suite of automated tests against the code base every time a new pull request is received. These actions include (but are not limited to):
|
||||
|
||||
- Checking Python and Javascript code against standard style guides
|
||||
- Running unit test suite
|
||||
- Automated building and pushing of docker images
|
||||
- Generating translation files
|
||||
|
||||
The various github actions can be found in the `./github/workflows` directory
|
||||
|
||||
## Code Style
|
||||
|
||||
Sumbitted Python code is automatically checked against PEP style guidelines. Locally you can run `invoke style` to ensure the style checks will pass, before submitting the PR.
|
||||
Please write docstrings for each function and class - we follow the [google doc-style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings) for python. Docstrings for general javascript code is encouraged! Docstyles are checked by `invoke style`.
|
||||
|
||||
## Documentation
|
||||
|
||||
New features or updates to existing features should be accompanied by user documentation. A PR with associated documentation should link to the matching PR at https://github.com/inventree/inventree-docs/
|
||||
|
||||
## Translations
|
||||
|
||||
Any user-facing strings *must* be passed through the translation engine.
|
||||
|
||||
- InvenTree code is written in English
|
||||
- User translatable strings are provided in English as the primary language
|
||||
- Secondary language translations are provided [via Crowdin](https://crowdin.com/project/inventree)
|
||||
|
||||
*Note: Translation files are updated via GitHub actions - you do not need to compile translations files before submitting a pull request!*
|
||||
|
||||
### Python Code
|
||||
|
||||
For strings exposed via Python code, use the following format:
|
||||
|
||||
```python
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
user_facing_string = _('This string will be exposed to the translation engine!')
|
||||
```
|
||||
|
||||
### Templated Strings
|
||||
|
||||
HTML and javascript files are passed through the django templating engine. Translatable strings are implemented as follows:
|
||||
|
||||
```html
|
||||
{% load i18n %}
|
||||
|
||||
<span>{% trans "This string will be translated" %} - this string will not!</span>
|
||||
```
|
||||
|
||||
## Github use
|
||||
### Tags
|
||||
The tags describe issues and PRs in multiple areas:
|
||||
| Area | Name | Description |
|
||||
|---|---|---|
|
||||
| Type Labels | | |
|
||||
| | breaking | Indicates a major update or change which breaks compatibility |
|
||||
| | bug | Identifies a bug which needs to be addressed |
|
||||
| | dependency | Relates to a project dependency |
|
||||
| | duplicate | Duplicate of another issue or PR |
|
||||
| | enhancement | This is an suggested enhancement or new feature |
|
||||
| | help wanted | Assistance required |
|
||||
| | invalid | This issue or PR is considered invalid |
|
||||
| | inactive | Indicates lack of activity |
|
||||
| | question | This is a question |
|
||||
| | roadmap | This is a roadmap feature with no immediate plans for implementation |
|
||||
| | security | Relates to a security issue |
|
||||
| | starter | Good issue for a developer new to the project |
|
||||
| | wontfix | No work will be done against this issue or PR |
|
||||
| Feature Labels | | |
|
||||
| | API | Relates to the API |
|
||||
| | barcode | Barcode scanning and integration |
|
||||
| | build | Build orders |
|
||||
| | importer | Data importing and processing |
|
||||
| | order | Purchase order and sales orders |
|
||||
| | part | Parts |
|
||||
| | plugin | Plugin ecosystem |
|
||||
| | pricing | Pricing functionality |
|
||||
| | report | Report generation |
|
||||
| | stock | Stock item management |
|
||||
| | user interface | User interface |
|
||||
| Ecosystem Labels | | |
|
||||
| | demo | Relates to the InvenTree demo server or dataset |
|
||||
| | docker | Docker / docker-compose |
|
||||
| | CI | CI / unit testing ecosystem |
|
||||
| | setup | Relates to the InvenTree setup / installation process |
|
||||
|
||||
140
Dockerfile
Normal file
140
Dockerfile
Normal file
@@ -0,0 +1,140 @@
|
||||
# The InvenTree dockerfile provides two build targets:
|
||||
#
|
||||
# production:
|
||||
# - Required files are copied into the image
|
||||
# - Runs InvenTree web server under gunicorn
|
||||
#
|
||||
# dev:
|
||||
# - Expects source directories to be loaded as a run-time volume
|
||||
# - Runs InvenTree web server under django development server
|
||||
# - Monitors source files for any changes, and live-reloads server
|
||||
|
||||
|
||||
FROM python:3.9-slim as base
|
||||
|
||||
# Build arguments for this image
|
||||
ARG commit_hash=""
|
||||
ARG commit_date=""
|
||||
ARG commit_tag=""
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
# Ref: https://github.com/pyca/cryptography/issues/5776
|
||||
ENV CRYPTOGRAPHY_DONT_BUILD_RUST 1
|
||||
|
||||
ENV INVENTREE_LOG_LEVEL="WARNING"
|
||||
ENV INVENTREE_DOCKER="true"
|
||||
|
||||
# InvenTree paths
|
||||
ENV INVENTREE_HOME="/home/inventree"
|
||||
ENV INVENTREE_MNG_DIR="${INVENTREE_HOME}/InvenTree"
|
||||
ENV INVENTREE_DATA_DIR="${INVENTREE_HOME}/data"
|
||||
ENV INVENTREE_STATIC_ROOT="${INVENTREE_DATA_DIR}/static"
|
||||
ENV INVENTREE_MEDIA_ROOT="${INVENTREE_DATA_DIR}/media"
|
||||
ENV INVENTREE_PLUGIN_DIR="${INVENTREE_DATA_DIR}/plugins"
|
||||
|
||||
# InvenTree configuration files
|
||||
ENV INVENTREE_CONFIG_FILE="${INVENTREE_DATA_DIR}/config.yaml"
|
||||
ENV INVENTREE_SECRET_KEY_FILE="${INVENTREE_DATA_DIR}/secret_key.txt"
|
||||
ENV INVENTREE_PLUGIN_FILE="${INVENTREE_DATA_DIR}/plugins.txt"
|
||||
|
||||
# Worker configuration (can be altered by user)
|
||||
ENV INVENTREE_GUNICORN_WORKERS="4"
|
||||
ENV INVENTREE_BACKGROUND_WORKERS="4"
|
||||
|
||||
# Default web server address:port
|
||||
ENV INVENTREE_WEB_ADDR=0.0.0.0
|
||||
ENV INVENTREE_WEB_PORT=8000
|
||||
|
||||
LABEL org.label-schema.schema-version="1.0" \
|
||||
org.label-schema.build-date=${DATE} \
|
||||
org.label-schema.vendor="inventree" \
|
||||
org.label-schema.name="inventree/inventree" \
|
||||
org.label-schema.url="https://hub.docker.com/r/inventree/inventree" \
|
||||
org.label-schema.vcs-url="https://github.com/inventree/InvenTree.git" \
|
||||
org.label-schema.vcs-ref=${commit_tag}
|
||||
|
||||
# RUN apt-get upgrade && apt-get update
|
||||
RUN apt-get update
|
||||
|
||||
# Install required system packages
|
||||
RUN apt-get install -y --no-install-recommends \
|
||||
git gcc g++ gettext gnupg libffi-dev \
|
||||
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#debian-11
|
||||
poppler-utils libpango-1.0-0 libpangoft2-1.0-0 \
|
||||
# Image format support
|
||||
libjpeg-dev webp \
|
||||
# SQLite support
|
||||
sqlite3 \
|
||||
# PostgreSQL support
|
||||
libpq-dev \
|
||||
# MySQL / MariaDB support
|
||||
default-libmysqlclient-dev mariadb-client && \
|
||||
apt-get autoclean && apt-get autoremove
|
||||
|
||||
# Update pip
|
||||
RUN pip install --upgrade pip
|
||||
|
||||
# Install required base-level python packages
|
||||
COPY ./docker/requirements.txt base_requirements.txt
|
||||
RUN pip install --disable-pip-version-check -U -r base_requirements.txt
|
||||
|
||||
# InvenTree production image:
|
||||
# - Copies required files from local directory
|
||||
# - Installs required python packages from requirements.txt
|
||||
# - Starts a gunicorn webserver
|
||||
|
||||
FROM base as production
|
||||
|
||||
ENV INVENTREE_DEBUG=False
|
||||
|
||||
# As .git directory is not available in production image, we pass the commit information via ENV
|
||||
ENV INVENTREE_COMMIT_HASH="${commit_hash}"
|
||||
ENV INVENTREE_COMMIT_DATE="${commit_date}"
|
||||
|
||||
# Copy source code
|
||||
COPY InvenTree ${INVENTREE_HOME}/InvenTree
|
||||
|
||||
# Copy other key files
|
||||
COPY requirements.txt ${INVENTREE_HOME}/requirements.txt
|
||||
COPY tasks.py ${INVENTREE_HOME}/tasks.py
|
||||
COPY docker/gunicorn.conf.py ${INVENTREE_HOME}/gunicorn.conf.py
|
||||
COPY docker/init.sh ${INVENTREE_MNG_DIR}/init.sh
|
||||
|
||||
# Need to be running from within this directory
|
||||
WORKDIR ${INVENTREE_MNG_DIR}
|
||||
|
||||
# Drop to the inventree user for the production image
|
||||
#RUN adduser inventree
|
||||
#RUN chown -R inventree:inventree ${INVENTREE_HOME}
|
||||
#USER inventree
|
||||
|
||||
# Install InvenTree packages
|
||||
RUN pip3 install --user --disable-pip-version-check -r ${INVENTREE_HOME}/requirements.txt
|
||||
|
||||
# Server init entrypoint
|
||||
ENTRYPOINT ["/bin/bash", "./init.sh"]
|
||||
|
||||
# Launch the production server
|
||||
# TODO: Work out why environment variables cannot be interpolated in this command
|
||||
# TODO: e.g. -b ${INVENTREE_WEB_ADDR}:${INVENTREE_WEB_PORT} fails here
|
||||
CMD gunicorn -c ./gunicorn.conf.py InvenTree.wsgi -b 0.0.0.0:8000 --chdir ./InvenTree
|
||||
|
||||
FROM base as dev
|
||||
|
||||
# The development image requires the source code to be mounted to /home/inventree/
|
||||
# So from here, we don't actually "do" anything, apart from some file management
|
||||
|
||||
ENV INVENTREE_DEBUG=True
|
||||
|
||||
# Location for python virtual environment
|
||||
# If the INVENTREE_PY_ENV variable is set, the entrypoint script will use it!
|
||||
ENV INVENTREE_PY_ENV="${INVENTREE_DATA_DIR}/env"
|
||||
|
||||
WORKDIR ${INVENTREE_HOME}
|
||||
|
||||
# Entrypoint ensures that we are running in the python virtual environment
|
||||
ENTRYPOINT ["/bin/bash", "./docker/init.sh"]
|
||||
|
||||
# Launch the development server
|
||||
CMD ["invoke", "server", "-a", "${INVENTREE_WEB_ADDR}:${INVENTREE_WEB_PORT}"]
|
||||
33
InvenTree/InvenTree/admin.py
Normal file
33
InvenTree/InvenTree/admin.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Admin classes"""
|
||||
|
||||
from import_export.resources import ModelResource
|
||||
|
||||
|
||||
class InvenTreeResource(ModelResource):
|
||||
"""Custom subclass of the ModelResource class provided by django-import-export"
|
||||
|
||||
Ensures that exported data are escaped to prevent malicious formula injection.
|
||||
Ref: https://owasp.org/www-community/attacks/CSV_Injection
|
||||
"""
|
||||
|
||||
def export_resource(self, obj):
|
||||
"""Custom function to override default row export behaviour.
|
||||
|
||||
Specifically, strip illegal leading characters to prevent formula injection
|
||||
"""
|
||||
row = super().export_resource(obj)
|
||||
|
||||
illegal_start_vals = ['@', '=', '+', '-', '@', '\t', '\r', '\n']
|
||||
|
||||
for idx, val in enumerate(row):
|
||||
if type(val) is str:
|
||||
val = val.strip()
|
||||
|
||||
# If the value starts with certain 'suspicious' values, remove it!
|
||||
while len(val) > 0 and val[0] in illegal_start_vals:
|
||||
# Remove the first character
|
||||
val = val[1:]
|
||||
|
||||
row[idx] = val
|
||||
|
||||
return row
|
||||
201
InvenTree/InvenTree/api.py
Normal file
201
InvenTree/InvenTree/api.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""Main JSON interface views."""
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.http import JsonResponse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from django_filters.rest_framework import DjangoFilterBackend
|
||||
from rest_framework import filters, permissions
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from InvenTree.mixins import ListCreateAPI
|
||||
from InvenTree.permissions import RolePermission
|
||||
|
||||
from .status import is_worker_running
|
||||
from .version import (inventreeApiVersion, inventreeInstanceName,
|
||||
inventreeVersion)
|
||||
from .views import AjaxView
|
||||
|
||||
|
||||
class InfoView(AjaxView):
|
||||
"""Simple JSON endpoint for InvenTree information.
|
||||
|
||||
Use to confirm that the server is running, etc.
|
||||
"""
|
||||
|
||||
permission_classes = [permissions.AllowAny]
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Serve current server information."""
|
||||
data = {
|
||||
'server': 'InvenTree',
|
||||
'version': inventreeVersion(),
|
||||
'instance': inventreeInstanceName(),
|
||||
'apiVersion': inventreeApiVersion(),
|
||||
'worker_running': is_worker_running(),
|
||||
'plugins_enabled': settings.PLUGINS_ENABLED,
|
||||
}
|
||||
|
||||
return JsonResponse(data)
|
||||
|
||||
|
||||
class NotFoundView(AjaxView):
|
||||
"""Simple JSON view when accessing an invalid API view."""
|
||||
|
||||
permission_classes = [permissions.AllowAny]
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Proces an `not found` event on the API."""
|
||||
data = {
|
||||
'details': _('API endpoint not found'),
|
||||
'url': request.build_absolute_uri(),
|
||||
}
|
||||
|
||||
return JsonResponse(data, status=404)
|
||||
|
||||
|
||||
class BulkDeleteMixin:
|
||||
"""Mixin class for enabling 'bulk delete' operations for various models.
|
||||
|
||||
Bulk delete allows for multiple items to be deleted in a single API query,
|
||||
rather than using multiple API calls to the various detail endpoints.
|
||||
|
||||
This is implemented for two major reasons:
|
||||
- Atomicity (guaranteed that either *all* items are deleted, or *none*)
|
||||
- Speed (single API call and DB query)
|
||||
"""
|
||||
|
||||
def filter_delete_queryset(self, queryset, request):
|
||||
"""Provide custom filtering for the queryset *before* it is deleted"""
|
||||
return queryset
|
||||
|
||||
def delete(self, request, *args, **kwargs):
|
||||
"""Perform a DELETE operation against this list endpoint.
|
||||
|
||||
We expect a list of primary-key (ID) values to be supplied as a JSON object, e.g.
|
||||
{
|
||||
items: [4, 8, 15, 16, 23, 42]
|
||||
}
|
||||
|
||||
"""
|
||||
model = self.serializer_class.Meta.model
|
||||
|
||||
# Extract the items from the request body
|
||||
try:
|
||||
items = request.data.getlist('items', None)
|
||||
except AttributeError:
|
||||
items = request.data.get('items', None)
|
||||
|
||||
# Extract the filters from the request body
|
||||
try:
|
||||
filters = request.data.getlist('filters', None)
|
||||
except AttributeError:
|
||||
filters = request.data.get('filters', None)
|
||||
|
||||
if not items and not filters:
|
||||
raise ValidationError({
|
||||
"non_field_errors": ["List of items or filters must be provided for bulk deletion"],
|
||||
})
|
||||
|
||||
if items and type(items) is not list:
|
||||
raise ValidationError({
|
||||
"items": ["'items' must be supplied as a list object"]
|
||||
})
|
||||
|
||||
if filters and type(filters) is not dict:
|
||||
raise ValidationError({
|
||||
"filters": ["'filters' must be supplied as a dict object"]
|
||||
})
|
||||
|
||||
# Keep track of how many items we deleted
|
||||
n_deleted = 0
|
||||
|
||||
with transaction.atomic():
|
||||
|
||||
# Start with *all* models and perform basic filtering
|
||||
queryset = model.objects.all()
|
||||
queryset = self.filter_delete_queryset(queryset, request)
|
||||
|
||||
# Filter by provided item ID values
|
||||
if items:
|
||||
queryset = queryset.filter(id__in=items)
|
||||
|
||||
# Filter by provided filters
|
||||
if filters:
|
||||
queryset = queryset.filter(**filters)
|
||||
|
||||
n_deleted = queryset.count()
|
||||
queryset.delete()
|
||||
|
||||
return Response(
|
||||
{
|
||||
'success': f"Deleted {n_deleted} items",
|
||||
},
|
||||
status=204
|
||||
)
|
||||
|
||||
|
||||
class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
|
||||
"""Custom API endpoint which provides BulkDelete functionality in addition to List and Create"""
|
||||
...
|
||||
|
||||
|
||||
class APIDownloadMixin:
|
||||
"""Mixin for enabling a LIST endpoint to be downloaded a file.
|
||||
|
||||
To download the data, add the ?export=<fmt> to the query string.
|
||||
|
||||
The implementing class must provided a download_queryset method,
|
||||
e.g.
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
dataset = StockItemResource().export(queryset=queryset)
|
||||
|
||||
filedata = dataset.export(export_format)
|
||||
|
||||
filename = 'InvenTree_Stocktake_{date}.{fmt}'.format(
|
||||
date=datetime.now().strftime("%d-%b-%Y"),
|
||||
fmt=export_format
|
||||
)
|
||||
|
||||
return DownloadFile(filedata, filename)
|
||||
"""
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
"""Generic handler for a download request."""
|
||||
export_format = request.query_params.get('export', None)
|
||||
|
||||
if export_format and export_format in ['csv', 'tsv', 'xls', 'xlsx']:
|
||||
queryset = self.filter_queryset(self.get_queryset())
|
||||
return self.download_queryset(queryset, export_format)
|
||||
|
||||
else:
|
||||
# Default to the parent class implementation
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
def download_queryset(self, queryset, export_format):
|
||||
"""This function must be implemented to provide a downloadFile request."""
|
||||
raise NotImplementedError("download_queryset method not implemented!")
|
||||
|
||||
|
||||
class AttachmentMixin:
|
||||
"""Mixin for creating attachment objects, and ensuring the user information is saved correctly."""
|
||||
|
||||
permission_classes = [
|
||||
permissions.IsAuthenticated,
|
||||
RolePermission,
|
||||
]
|
||||
|
||||
filter_backends = [
|
||||
DjangoFilterBackend,
|
||||
filters.OrderingFilter,
|
||||
filters.SearchFilter,
|
||||
]
|
||||
|
||||
def perform_create(self, serializer):
|
||||
"""Save the user information when a file is uploaded."""
|
||||
attachment = serializer.save()
|
||||
attachment.user = self.request.user
|
||||
attachment.save()
|
||||
268
InvenTree/InvenTree/api_tester.py
Normal file
268
InvenTree/InvenTree/api_tester.py
Normal file
@@ -0,0 +1,268 @@
|
||||
"""Helper functions for performing API unit tests."""
|
||||
|
||||
import csv
|
||||
import io
|
||||
import re
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import Group
|
||||
from django.http.response import StreamingHttpResponse
|
||||
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
|
||||
class UserMixin:
|
||||
"""Mixin to setup a user and login for tests.
|
||||
|
||||
Use parameters to set username, password, email, roles and permissions.
|
||||
"""
|
||||
|
||||
# User information
|
||||
username = 'testuser'
|
||||
password = 'mypassword'
|
||||
email = 'test@testing.com'
|
||||
|
||||
superuser = False
|
||||
is_staff = True
|
||||
auto_login = True
|
||||
|
||||
# Set list of roles automatically associated with the user
|
||||
roles = []
|
||||
|
||||
def setUp(self):
|
||||
"""Setup for all tests."""
|
||||
super().setUp()
|
||||
|
||||
# Create a user to log in with
|
||||
self.user = get_user_model().objects.create_user(
|
||||
username=self.username,
|
||||
password=self.password,
|
||||
email=self.email
|
||||
)
|
||||
|
||||
# Create a group for the user
|
||||
self.group = Group.objects.create(name='my_test_group')
|
||||
self.user.groups.add(self.group)
|
||||
|
||||
if self.superuser:
|
||||
self.user.is_superuser = True
|
||||
|
||||
if self.is_staff:
|
||||
self.user.is_staff = True
|
||||
|
||||
self.user.save()
|
||||
|
||||
# Assign all roles if set
|
||||
if self.roles == 'all':
|
||||
self.assignRole(assign_all=True)
|
||||
# else filter the roles
|
||||
else:
|
||||
for role in self.roles:
|
||||
self.assignRole(role)
|
||||
|
||||
if self.auto_login:
|
||||
self.client.login(username=self.username, password=self.password)
|
||||
|
||||
def assignRole(self, role=None, assign_all: bool = False):
|
||||
"""Set the user roles for the registered user."""
|
||||
# role is of the format 'rule.permission' e.g. 'part.add'
|
||||
|
||||
if not assign_all and role:
|
||||
rule, perm = role.split('.')
|
||||
|
||||
for ruleset in self.group.rule_sets.all():
|
||||
|
||||
if assign_all or ruleset.name == rule:
|
||||
|
||||
if assign_all or perm == 'view':
|
||||
ruleset.can_view = True
|
||||
elif assign_all or perm == 'change':
|
||||
ruleset.can_change = True
|
||||
elif assign_all or perm == 'delete':
|
||||
ruleset.can_delete = True
|
||||
elif assign_all or perm == 'add':
|
||||
ruleset.can_add = True
|
||||
|
||||
ruleset.save()
|
||||
break
|
||||
|
||||
|
||||
class InvenTreeAPITestCase(UserMixin, APITestCase):
|
||||
"""Base class for running InvenTree API tests."""
|
||||
|
||||
def getActions(self, url):
|
||||
"""Return a dict of the 'actions' available at a given endpoint.
|
||||
|
||||
Makes use of the HTTP 'OPTIONS' method to request this.
|
||||
"""
|
||||
response = self.client.options(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
actions = response.data.get('actions', None)
|
||||
|
||||
if not actions:
|
||||
actions = {}
|
||||
|
||||
return actions
|
||||
|
||||
def get(self, url, data=None, expected_code=200):
|
||||
"""Issue a GET request."""
|
||||
# Set default - see B006
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
response = self.client.get(url, data, format='json')
|
||||
|
||||
if expected_code is not None:
|
||||
|
||||
if response.status_code != expected_code:
|
||||
print(f"Unexpected response at '{url}':")
|
||||
print(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, expected_code)
|
||||
|
||||
return response
|
||||
|
||||
def post(self, url, data=None, expected_code=None, format='json'):
|
||||
"""Issue a POST request."""
|
||||
response = self.client.post(url, data=data, format=format)
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
if expected_code is not None:
|
||||
|
||||
if response.status_code != expected_code:
|
||||
print(f"Unexpected response at '{url}': status code = {response.status_code}")
|
||||
|
||||
if hasattr(response, 'data'):
|
||||
print(response.data)
|
||||
else:
|
||||
print(f"(response object {type(response)} has no 'data' attribute")
|
||||
|
||||
self.assertEqual(response.status_code, expected_code)
|
||||
|
||||
return response
|
||||
|
||||
def delete(self, url, data=None, expected_code=None, format='json'):
|
||||
"""Issue a DELETE request."""
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
response = self.client.delete(url, data=data, format=format)
|
||||
|
||||
if expected_code is not None:
|
||||
self.assertEqual(response.status_code, expected_code)
|
||||
|
||||
return response
|
||||
|
||||
def patch(self, url, data, expected_code=None, format='json'):
|
||||
"""Issue a PATCH request."""
|
||||
response = self.client.patch(url, data=data, format=format)
|
||||
|
||||
if expected_code is not None:
|
||||
self.assertEqual(response.status_code, expected_code)
|
||||
|
||||
return response
|
||||
|
||||
def put(self, url, data, expected_code=None, format='json'):
|
||||
"""Issue a PUT request."""
|
||||
response = self.client.put(url, data=data, format=format)
|
||||
|
||||
if expected_code is not None:
|
||||
|
||||
if response.status_code != expected_code:
|
||||
print(f"Unexpected response at '{url}':")
|
||||
print(response.data)
|
||||
|
||||
self.assertEqual(response.status_code, expected_code)
|
||||
|
||||
return response
|
||||
|
||||
def options(self, url, expected_code=None):
|
||||
"""Issue an OPTIONS request."""
|
||||
response = self.client.options(url, format='json')
|
||||
|
||||
if expected_code is not None:
|
||||
self.assertEqual(response.status_code, expected_code)
|
||||
|
||||
return response
|
||||
|
||||
def download_file(self, url, data, expected_code=None, expected_fn=None, decode=True):
|
||||
"""Download a file from the server, and return an in-memory file."""
|
||||
response = self.client.get(url, data=data, format='json')
|
||||
|
||||
if expected_code is not None:
|
||||
self.assertEqual(response.status_code, expected_code)
|
||||
|
||||
# Check that the response is of the correct type
|
||||
if not isinstance(response, StreamingHttpResponse):
|
||||
raise ValueError("Response is not a StreamingHttpResponse object as expected")
|
||||
|
||||
# Extract filename
|
||||
disposition = response.headers['Content-Disposition']
|
||||
|
||||
result = re.search(r'attachment; filename="([\w.]+)"', disposition)
|
||||
|
||||
fn = result.groups()[0]
|
||||
|
||||
if expected_fn is not None:
|
||||
self.assertEqual(expected_fn, fn)
|
||||
|
||||
if decode:
|
||||
# Decode data and return as StringIO file object
|
||||
fo = io.StringIO()
|
||||
fo.name = fo
|
||||
fo.write(response.getvalue().decode('UTF-8'))
|
||||
else:
|
||||
# Return a a BytesIO file object
|
||||
fo = io.BytesIO()
|
||||
fo.name = fn
|
||||
fo.write(response.getvalue())
|
||||
|
||||
fo.seek(0)
|
||||
|
||||
return fo
|
||||
|
||||
def process_csv(self, fo, delimiter=',', required_cols=None, excluded_cols=None, required_rows=None):
|
||||
"""Helper function to process and validate a downloaded csv file."""
|
||||
# Check that the correct object type has been passed
|
||||
self.assertTrue(isinstance(fo, io.StringIO))
|
||||
|
||||
fo.seek(0)
|
||||
|
||||
reader = csv.reader(fo, delimiter=delimiter)
|
||||
|
||||
headers = []
|
||||
rows = []
|
||||
|
||||
for idx, row in enumerate(reader):
|
||||
if idx == 0:
|
||||
headers = row
|
||||
else:
|
||||
rows.append(row)
|
||||
|
||||
if required_cols is not None:
|
||||
for col in required_cols:
|
||||
self.assertIn(col, headers)
|
||||
|
||||
if excluded_cols is not None:
|
||||
for col in excluded_cols:
|
||||
self.assertNotIn(col, headers)
|
||||
|
||||
if required_rows is not None:
|
||||
self.assertEqual(len(rows), required_rows)
|
||||
|
||||
# Return the file data as a list of dict items, based on the headers
|
||||
data = []
|
||||
|
||||
for row in rows:
|
||||
entry = {}
|
||||
|
||||
for idx, col in enumerate(headers):
|
||||
entry[col] = row[idx]
|
||||
|
||||
data.append(entry)
|
||||
|
||||
return data
|
||||
256
InvenTree/InvenTree/api_version.py
Normal file
256
InvenTree/InvenTree/api_version.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""InvenTree API version information."""
|
||||
|
||||
|
||||
# InvenTree API version
|
||||
INVENTREE_API_VERSION = 69
|
||||
|
||||
"""
|
||||
Increment this API version number whenever there is a significant change to the API that any clients need to know about
|
||||
|
||||
v69 -> 2022-08-01 : https://github.com/inventree/InvenTree/pull/3443
|
||||
- Updates the PartCategory list API:
|
||||
- Improve query efficiency: O(n) becomes O(1)
|
||||
- Rename 'parts' field to 'part_count'
|
||||
- Updates the StockLocation list API:
|
||||
- Improve query efficiency: O(n) becomes O(1)
|
||||
|
||||
v68 -> 2022-07-27 : https://github.com/inventree/InvenTree/pull/3417
|
||||
- Allows SupplierPart list to be filtered by SKU value
|
||||
- Allows SupplierPart list to be filtered by MPN value
|
||||
|
||||
v67 -> 2022-07-25 : https://github.com/inventree/InvenTree/pull/3395
|
||||
- Adds a 'requirements' endpoint for Part instance
|
||||
- Provides information on outstanding order requirements for a given part
|
||||
|
||||
v66 -> 2022-07-24 : https://github.com/inventree/InvenTree/pull/3393
|
||||
- Part images can now be downloaded from a remote URL via the API
|
||||
- Company images can now be downloaded from a remote URL via the API
|
||||
|
||||
v65 -> 2022-07-15 : https://github.com/inventree/InvenTree/pull/3335
|
||||
- Annotates 'in_stock' quantity to the SupplierPart API
|
||||
|
||||
v64 -> 2022-07-08 : https://github.com/inventree/InvenTree/pull/3310
|
||||
- Annotate 'on_order' quantity to BOM list API
|
||||
- Allow BOM List API endpoint to be filtered by "on_order" parameter
|
||||
|
||||
v63 -> 2022-07-06 : https://github.com/inventree/InvenTree/pull/3301
|
||||
- Allow BOM List API endpoint to be filtered by "available_stock" paramater
|
||||
|
||||
v62 -> 2022-07-05 : https://github.com/inventree/InvenTree/pull/3296
|
||||
- Allows search on BOM List API endpoint
|
||||
- Allows ordering on BOM List API endpoint
|
||||
|
||||
v61 -> 2022-06-12 : https://github.com/inventree/InvenTree/pull/3183
|
||||
- Migrate the "Convert Stock Item" form class to use the API
|
||||
- There is now an API endpoint for converting a stock item to a valid variant
|
||||
|
||||
v60 -> 2022-06-08 : https://github.com/inventree/InvenTree/pull/3148
|
||||
- Add availability data fields to the SupplierPart model
|
||||
|
||||
v59 -> 2022-06-07 : https://github.com/inventree/InvenTree/pull/3154
|
||||
- Adds further improvements to BulkDelete mixin class
|
||||
- Fixes multiple bugs in custom OPTIONS metadata implementation
|
||||
- Adds 'bulk delete' for Notifications
|
||||
|
||||
v58 -> 2022-06-06 : https://github.com/inventree/InvenTree/pull/3146
|
||||
- Adds a BulkDelete API mixin class for fast, safe deletion of multiple objects with a single API request
|
||||
|
||||
v57 -> 2022-06-05 : https://github.com/inventree/InvenTree/pull/3130
|
||||
- Transfer PartCategoryTemplateParameter actions to the API
|
||||
|
||||
v56 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3123
|
||||
- Expose the PartParameterTemplate model to use the API
|
||||
|
||||
v55 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3120
|
||||
- Converts the 'StockItemReturn' functionality to make use of the API
|
||||
|
||||
v54 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3117
|
||||
- Adds 'available_stock' annotation on the SalesOrderLineItem API
|
||||
- Adds (well, fixes) 'overdue' annotation on the SalesOrderLineItem API
|
||||
|
||||
v53 -> 2022-06-01 : https://github.com/inventree/InvenTree/pull/3110
|
||||
- Adds extra search fields to the BuildOrder list API endpoint
|
||||
|
||||
v52 -> 2022-05-31 : https://github.com/inventree/InvenTree/pull/3103
|
||||
- Allow part list API to be searched by supplier SKU
|
||||
|
||||
v51 -> 2022-05-24 : https://github.com/inventree/InvenTree/pull/3058
|
||||
- Adds new fields to the SalesOrderShipment model
|
||||
|
||||
v50 -> 2022-05-18 : https://github.com/inventree/InvenTree/pull/2912
|
||||
- Implement Attachments for manufacturer parts
|
||||
|
||||
v49 -> 2022-05-09 : https://github.com/inventree/InvenTree/pull/2957
|
||||
- Allows filtering of plugin list by 'active' status
|
||||
- Allows filtering of plugin list by 'mixin' support
|
||||
- Adds endpoint to "identify" or "locate" stock items and locations (using plugins)
|
||||
|
||||
v48 -> 2022-05-12 : https://github.com/inventree/InvenTree/pull/2977
|
||||
- Adds "export to file" functionality for PurchaseOrder API endpoint
|
||||
- Adds "export to file" functionality for SalesOrder API endpoint
|
||||
- Adds "export to file" functionality for BuildOrder API endpoint
|
||||
|
||||
v47 -> 2022-05-10 : https://github.com/inventree/InvenTree/pull/2964
|
||||
- Fixes barcode API error response when scanning a StockItem which does not exist
|
||||
- Fixes barcode API error response when scanning a StockLocation which does not exist
|
||||
|
||||
v46 -> 2022-05-09
|
||||
- Fixes read permissions on settings API
|
||||
- Allows non-staff users to read global settings via the API
|
||||
|
||||
v45 -> 2022-05-08 : https://github.com/inventree/InvenTree/pull/2944
|
||||
- Settings are now accessed via the API using their unique key, not their PK
|
||||
- This allows the settings to be accessed without prior knowledge of the PK
|
||||
|
||||
v44 -> 2022-05-04 : https://github.com/inventree/InvenTree/pull/2931
|
||||
- Converting more server-side rendered forms to the API
|
||||
- Exposes more core functionality to API endpoints
|
||||
|
||||
v43 -> 2022-04-26 : https://github.com/inventree/InvenTree/pull/2875
|
||||
- Adds API detail endpoint for PartSalePrice model
|
||||
- Adds API detail endpoint for PartInternalPrice model
|
||||
|
||||
v42 -> 2022-04-26 : https://github.com/inventree/InvenTree/pull/2833
|
||||
- Adds variant stock information to the Part and BomItem serializers
|
||||
|
||||
v41 -> 2022-04-26
|
||||
- Fixes 'variant_of' filter for Part list endpoint
|
||||
|
||||
v40 -> 2022-04-19
|
||||
- Adds ability to filter StockItem list by "tracked" parameter
|
||||
- This checks the serial number or batch code fields
|
||||
|
||||
v39 -> 2022-04-18
|
||||
- Adds ability to filter StockItem list by "has_batch" parameter
|
||||
|
||||
v38 -> 2022-04-14 : https://github.com/inventree/InvenTree/pull/2828
|
||||
- Adds the ability to include stock test results for "installed items"
|
||||
|
||||
v37 -> 2022-04-07 : https://github.com/inventree/InvenTree/pull/2806
|
||||
- Adds extra stock availability information to the BomItem serializer
|
||||
|
||||
v36 -> 2022-04-03
|
||||
- Adds ability to filter part list endpoint by unallocated_stock argument
|
||||
|
||||
v35 -> 2022-04-01 : https://github.com/inventree/InvenTree/pull/2797
|
||||
- Adds stock allocation information to the Part API
|
||||
- Adds calculated field for "unallocated_quantity"
|
||||
|
||||
v34 -> 2022-03-25
|
||||
- Change permissions for "plugin list" API endpoint (now allows any authenticated user)
|
||||
|
||||
v33 -> 2022-03-24
|
||||
- Adds "plugins_enabled" information to root API endpoint
|
||||
|
||||
v32 -> 2022-03-19
|
||||
- Adds "parameters" detail to Part API endpoint (use ¶meters=true)
|
||||
- Adds ability to filter PartParameterTemplate API by Part instance
|
||||
- Adds ability to filter PartParameterTemplate API by PartCategory instance
|
||||
|
||||
v31 -> 2022-03-14
|
||||
- Adds "updated" field to SupplierPriceBreakList and SupplierPriceBreakDetail API endpoints
|
||||
|
||||
v30 -> 2022-03-09
|
||||
- Adds "exclude_location" field to BuildAutoAllocation API endpoint
|
||||
- Allows BuildItem API endpoint to be filtered by BomItem relation
|
||||
|
||||
v29 -> 2022-03-08
|
||||
- Adds "scheduling" endpoint for predicted stock scheduling information
|
||||
|
||||
v28 -> 2022-03-04
|
||||
- Adds an API endpoint for auto allocation of stock items against a build order
|
||||
- Ref: https://github.com/inventree/InvenTree/pull/2713
|
||||
|
||||
v27 -> 2022-02-28
|
||||
- Adds target_date field to individual line items for purchase orders and sales orders
|
||||
|
||||
v26 -> 2022-02-17
|
||||
- Adds API endpoint for uploading a BOM file and extracting data
|
||||
|
||||
v25 -> 2022-02-17
|
||||
- Adds ability to filter "part" list endpoint by "in_bom_for" argument
|
||||
|
||||
v24 -> 2022-02-10
|
||||
- Adds API endpoint for deleting (cancelling) build order outputs
|
||||
|
||||
v23 -> 2022-02-02
|
||||
- Adds API endpoints for managing plugin classes
|
||||
- Adds API endpoints for managing plugin settings
|
||||
|
||||
v22 -> 2021-12-20
|
||||
- Adds API endpoint to "merge" multiple stock items
|
||||
|
||||
v21 -> 2021-12-04
|
||||
- Adds support for multiple "Shipments" against a SalesOrder
|
||||
- Refactors process for stock allocation against a SalesOrder
|
||||
|
||||
v20 -> 2021-12-03
|
||||
- Adds ability to filter POLineItem endpoint by "base_part"
|
||||
- Adds optional "order_detail" to POLineItem list endpoint
|
||||
|
||||
v19 -> 2021-12-02
|
||||
- Adds the ability to filter the StockItem API by "part_tree"
|
||||
- Returns only stock items which match a particular part.tree_id field
|
||||
|
||||
v18 -> 2021-11-15
|
||||
- Adds the ability to filter BomItem API by "uses" field
|
||||
- This returns a list of all BomItems which "use" the specified part
|
||||
- Includes inherited BomItem objects
|
||||
|
||||
v17 -> 2021-11-09
|
||||
- Adds API endpoints for GLOBAL and USER settings objects
|
||||
- Ref: https://github.com/inventree/InvenTree/pull/2275
|
||||
|
||||
v16 -> 2021-10-17
|
||||
- Adds API endpoint for completing build order outputs
|
||||
|
||||
v15 -> 2021-10-06
|
||||
- Adds detail endpoint for SalesOrderAllocation model
|
||||
- Allows use of the API forms interface for adjusting SalesOrderAllocation objects
|
||||
|
||||
v14 -> 2021-10-05
|
||||
- Stock adjustment actions API is improved, using native DRF serializer support
|
||||
- However adjustment actions now only support 'pk' as a lookup field
|
||||
|
||||
v13 -> 2021-10-05
|
||||
- Adds API endpoint to allocate stock items against a BuildOrder
|
||||
- Updates StockItem API with improved filtering against BomItem data
|
||||
|
||||
v12 -> 2021-09-07
|
||||
- Adds API endpoint to receive stock items against a PurchaseOrder
|
||||
|
||||
v11 -> 2021-08-26
|
||||
- Adds "units" field to PartBriefSerializer
|
||||
- This allows units to be introspected from the "part_detail" field in the StockItem serializer
|
||||
|
||||
v10 -> 2021-08-23
|
||||
- Adds "purchase_price_currency" to StockItem serializer
|
||||
- Adds "purchase_price_string" to StockItem serializer
|
||||
- Purchase price is now writable for StockItem serializer
|
||||
|
||||
v9 -> 2021-08-09
|
||||
- Adds "price_string" to part pricing serializers
|
||||
|
||||
v8 -> 2021-07-19
|
||||
- Refactors the API interface for SupplierPart and ManufacturerPart models
|
||||
- ManufacturerPart objects can no longer be created via the SupplierPart API endpoint
|
||||
|
||||
v7 -> 2021-07-03
|
||||
- Introduced the concept of "API forms" in https://github.com/inventree/InvenTree/pull/1716
|
||||
- API OPTIONS endpoints provide comprehensive field metedata
|
||||
- Multiple new API endpoints added for database models
|
||||
|
||||
v6 -> 2021-06-23
|
||||
- Part and Company images can now be directly uploaded via the REST API
|
||||
|
||||
v5 -> 2021-06-21
|
||||
- Adds API interface for manufacturer part parameters
|
||||
|
||||
v4 -> 2021-06-01
|
||||
- BOM items can now accept "variant stock" to be assigned against them
|
||||
- Many slight API tweaks were needed to get this to work properly!
|
||||
|
||||
v3 -> 2021-05-22:
|
||||
- The updated StockItem "history tracking" now uses a different interface
|
||||
|
||||
"""
|
||||
216
InvenTree/InvenTree/apps.py
Normal file
216
InvenTree/InvenTree/apps.py
Normal file
@@ -0,0 +1,216 @@
|
||||
"""AppConfig for inventree app."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.exceptions import AppRegistryNotReady
|
||||
from django.db import transaction
|
||||
from django.db.utils import IntegrityError
|
||||
|
||||
import InvenTree.tasks
|
||||
from InvenTree.ready import canAppAccessDatabase, isInTestMode
|
||||
|
||||
from .config import get_setting
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
|
||||
|
||||
class InvenTreeConfig(AppConfig):
|
||||
"""AppConfig for inventree app."""
|
||||
name = 'InvenTree'
|
||||
|
||||
def ready(self):
|
||||
"""Setup background tasks and update exchange rates."""
|
||||
if canAppAccessDatabase():
|
||||
|
||||
self.remove_obsolete_tasks()
|
||||
|
||||
self.start_background_tasks()
|
||||
|
||||
if not isInTestMode(): # pragma: no cover
|
||||
self.update_exchange_rates()
|
||||
|
||||
self.collect_notification_methods()
|
||||
|
||||
if canAppAccessDatabase() or settings.TESTING_ENV:
|
||||
self.add_user_on_startup()
|
||||
|
||||
def remove_obsolete_tasks(self):
|
||||
"""Delete any obsolete scheduled tasks in the database."""
|
||||
obsolete = [
|
||||
'InvenTree.tasks.delete_expired_sessions',
|
||||
'stock.tasks.delete_old_stock_items',
|
||||
]
|
||||
|
||||
try:
|
||||
from django_q.models import Schedule
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
return
|
||||
|
||||
# Remove any existing obsolete tasks
|
||||
Schedule.objects.filter(func__in=obsolete).delete()
|
||||
|
||||
def start_background_tasks(self):
|
||||
"""Start all background tests for InvenTree."""
|
||||
try:
|
||||
from django_q.models import Schedule
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
logger.warning("Cannot start background tasks - app registry not ready")
|
||||
return
|
||||
|
||||
logger.info("Starting background tasks...")
|
||||
|
||||
# Remove successful task results from the database
|
||||
InvenTree.tasks.schedule_task(
|
||||
'InvenTree.tasks.delete_successful_tasks',
|
||||
schedule_type=Schedule.DAILY,
|
||||
)
|
||||
|
||||
# Check for InvenTree updates
|
||||
InvenTree.tasks.schedule_task(
|
||||
'InvenTree.tasks.check_for_updates',
|
||||
schedule_type=Schedule.DAILY
|
||||
)
|
||||
|
||||
# Heartbeat to let the server know the background worker is running
|
||||
InvenTree.tasks.schedule_task(
|
||||
'InvenTree.tasks.heartbeat',
|
||||
schedule_type=Schedule.MINUTES,
|
||||
minutes=15
|
||||
)
|
||||
|
||||
# Keep exchange rates up to date
|
||||
InvenTree.tasks.schedule_task(
|
||||
'InvenTree.tasks.update_exchange_rates',
|
||||
schedule_type=Schedule.DAILY,
|
||||
)
|
||||
|
||||
# Delete old error messages
|
||||
InvenTree.tasks.schedule_task(
|
||||
'InvenTree.tasks.delete_old_error_logs',
|
||||
schedule_type=Schedule.DAILY,
|
||||
)
|
||||
|
||||
# Delete old notification records
|
||||
InvenTree.tasks.schedule_task(
|
||||
'common.tasks.delete_old_notifications',
|
||||
schedule_type=Schedule.DAILY,
|
||||
)
|
||||
|
||||
# Check for overdue purchase orders
|
||||
InvenTree.tasks.schedule_task(
|
||||
'order.tasks.check_overdue_purchase_orders',
|
||||
schedule_type=Schedule.DAILY
|
||||
)
|
||||
|
||||
# Check for overdue sales orders
|
||||
InvenTree.tasks.schedule_task(
|
||||
'order.tasks.check_overdue_sales_orders',
|
||||
schedule_type=Schedule.DAILY,
|
||||
)
|
||||
|
||||
# Check for overdue build orders
|
||||
InvenTree.tasks.schedule_task(
|
||||
'build.tasks.check_overdue_build_orders',
|
||||
schedule_type=Schedule.DAILY
|
||||
)
|
||||
|
||||
def update_exchange_rates(self): # pragma: no cover
|
||||
"""Update exchange rates each time the server is started.
|
||||
|
||||
Only runs *if*:
|
||||
a) Have not been updated recently (one day or less)
|
||||
b) The base exchange rate has been altered
|
||||
"""
|
||||
try:
|
||||
from djmoney.contrib.exchange.models import ExchangeBackend
|
||||
|
||||
from common.settings import currency_code_default
|
||||
from InvenTree.tasks import update_exchange_rates
|
||||
except AppRegistryNotReady: # pragma: no cover
|
||||
pass
|
||||
|
||||
base_currency = currency_code_default()
|
||||
|
||||
update = False
|
||||
|
||||
try:
|
||||
backend = ExchangeBackend.objects.get(name='InvenTreeExchange')
|
||||
|
||||
last_update = backend.last_update
|
||||
|
||||
if last_update is None:
|
||||
# Never been updated
|
||||
logger.info("Exchange backend has never been updated")
|
||||
update = True
|
||||
|
||||
# Backend currency has changed?
|
||||
if base_currency != backend.base_currency:
|
||||
logger.info(f"Base currency changed from {backend.base_currency} to {base_currency}")
|
||||
update = True
|
||||
|
||||
except (ExchangeBackend.DoesNotExist):
|
||||
logger.info("Exchange backend not found - updating")
|
||||
update = True
|
||||
|
||||
except Exception:
|
||||
# Some other error - potentially the tables are not ready yet
|
||||
return
|
||||
|
||||
if update:
|
||||
try:
|
||||
update_exchange_rates()
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating exchange rates: {e}")
|
||||
|
||||
def add_user_on_startup(self):
|
||||
"""Add a user on startup."""
|
||||
# stop if checks were already created
|
||||
if hasattr(settings, 'USER_ADDED') and settings.USER_ADDED:
|
||||
return
|
||||
|
||||
# get values
|
||||
add_user = get_setting('INVENTREE_ADMIN_USER', 'admin_user')
|
||||
add_email = get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email')
|
||||
add_password = get_setting('INVENTREE_ADMIN_PASSWORD', 'admin_password')
|
||||
|
||||
# check if all values are present
|
||||
set_variables = 0
|
||||
|
||||
for tested_var in [add_user, add_email, add_password]:
|
||||
if tested_var:
|
||||
set_variables += 1
|
||||
|
||||
# no variable set -> do not try anything
|
||||
if set_variables == 0:
|
||||
settings.USER_ADDED = True
|
||||
return
|
||||
|
||||
# not all needed variables set
|
||||
if set_variables < 3:
|
||||
logger.warn('Not all required settings for adding a user on startup are present:\nINVENTREE_ADMIN_USER, INVENTREE_ADMIN_EMAIL, INVENTREE_ADMIN_PASSWORD')
|
||||
settings.USER_ADDED = True
|
||||
return
|
||||
|
||||
# good to go -> create user
|
||||
user = get_user_model()
|
||||
try:
|
||||
with transaction.atomic():
|
||||
if user.objects.filter(username=add_user).exists():
|
||||
logger.info(f"User {add_user} already exists - skipping creation")
|
||||
else:
|
||||
new_user = user.objects.create_superuser(add_user, add_email, add_password)
|
||||
logger.info(f'User {str(new_user)} was created!')
|
||||
except IntegrityError as _e:
|
||||
logger.warning(f'The user "{add_user}" could not be created due to the following error:\n{str(_e)}')
|
||||
|
||||
# do not try again
|
||||
settings.USER_ADDED = True
|
||||
|
||||
def collect_notification_methods(self):
|
||||
"""Collect all notification methods."""
|
||||
from common.notifications import storage
|
||||
|
||||
storage.collect()
|
||||
84
InvenTree/InvenTree/ci_render_js.py
Normal file
84
InvenTree/InvenTree/ci_render_js.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Pull rendered copies of the templated.
|
||||
|
||||
Only used for testing the js files! - This file is omited from coverage.
|
||||
"""
|
||||
|
||||
import os # pragma: no cover
|
||||
import pathlib # pragma: no cover
|
||||
|
||||
from InvenTree.helpers import InvenTreeTestCase # pragma: no cover
|
||||
|
||||
|
||||
class RenderJavascriptFiles(InvenTreeTestCase): # pragma: no cover
|
||||
"""A unit test to "render" javascript files.
|
||||
|
||||
The server renders templated javascript files,
|
||||
we need the fully-rendered files for linting and static tests.
|
||||
"""
|
||||
|
||||
def download_file(self, filename, prefix):
|
||||
"""Function to `download`(copy) a file to a temporay firectory."""
|
||||
url = os.path.join(prefix, filename)
|
||||
|
||||
response = self.client.get(url)
|
||||
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
output_dir = os.path.join(
|
||||
here,
|
||||
'..',
|
||||
'..',
|
||||
'js_tmp',
|
||||
)
|
||||
|
||||
output_dir = os.path.abspath(output_dir)
|
||||
|
||||
if not os.path.exists(output_dir):
|
||||
os.mkdir(output_dir)
|
||||
|
||||
output_file = os.path.join(
|
||||
output_dir,
|
||||
filename,
|
||||
)
|
||||
|
||||
with open(output_file, 'wb') as output:
|
||||
output.write(response.content)
|
||||
|
||||
def download_files(self, subdir, prefix):
|
||||
"""Download files in directory."""
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
js_template_dir = os.path.join(
|
||||
here,
|
||||
'..',
|
||||
'templates',
|
||||
'js',
|
||||
)
|
||||
|
||||
directory = os.path.join(js_template_dir, subdir)
|
||||
|
||||
directory = os.path.abspath(directory)
|
||||
|
||||
js_files = pathlib.Path(directory).rglob('*.js')
|
||||
|
||||
n = 0
|
||||
|
||||
for f in js_files:
|
||||
js = os.path.basename(f)
|
||||
|
||||
self.download_file(js, prefix)
|
||||
|
||||
n += 1
|
||||
|
||||
return n
|
||||
|
||||
def test_render_files(self):
|
||||
"""Look for all javascript files."""
|
||||
n = 0
|
||||
|
||||
print("Rendering javascript files...")
|
||||
|
||||
n += self.download_files('translated', '/js/i18n')
|
||||
n += self.download_files('dynamic', '/js/dynamic')
|
||||
|
||||
print(f"Rendered {n} javascript files.")
|
||||
205
InvenTree/InvenTree/config.py
Normal file
205
InvenTree/InvenTree/config.py
Normal file
@@ -0,0 +1,205 @@
|
||||
"""Helper functions for loading InvenTree configuration options."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
import string
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def is_true(x):
|
||||
"""Shortcut function to determine if a value "looks" like a boolean"""
|
||||
return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on']
|
||||
|
||||
|
||||
def get_base_dir() -> Path:
|
||||
"""Returns the base (top-level) InvenTree directory."""
|
||||
return Path(__file__).parent.parent.resolve()
|
||||
|
||||
|
||||
def get_config_file(create=True) -> Path:
|
||||
"""Returns the path of the InvenTree configuration file.
|
||||
|
||||
Note: It will be created it if does not already exist!
|
||||
"""
|
||||
base_dir = get_base_dir()
|
||||
|
||||
cfg_filename = os.getenv('INVENTREE_CONFIG_FILE')
|
||||
|
||||
if cfg_filename:
|
||||
cfg_filename = Path(cfg_filename.strip()).resolve()
|
||||
else:
|
||||
# Config file is *not* specified - use the default
|
||||
cfg_filename = base_dir.joinpath('config.yaml').resolve()
|
||||
|
||||
if not cfg_filename.exists() and create:
|
||||
print("InvenTree configuration file 'config.yaml' not found - creating default file")
|
||||
|
||||
cfg_template = base_dir.joinpath("config_template.yaml")
|
||||
shutil.copyfile(cfg_template, cfg_filename)
|
||||
print(f"Created config file {cfg_filename}")
|
||||
|
||||
return cfg_filename
|
||||
|
||||
|
||||
def load_config_data() -> map:
|
||||
"""Load configuration data from the config file."""
|
||||
|
||||
cfg_file = get_config_file()
|
||||
|
||||
with open(cfg_file, 'r') as cfg:
|
||||
data = yaml.safe_load(cfg)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_setting(env_var=None, config_key=None, default_value=None):
|
||||
"""Helper function for retrieving a configuration setting value.
|
||||
|
||||
- First preference is to look for the environment variable
|
||||
- Second preference is to look for the value of the settings file
|
||||
- Third preference is the default value
|
||||
|
||||
Arguments:
|
||||
env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT'
|
||||
config_key: Key to lookup in the configuration file
|
||||
default_value: Value to return if first two options are not provided
|
||||
|
||||
"""
|
||||
|
||||
# First, try to load from the environment variables
|
||||
if env_var is not None:
|
||||
val = os.getenv(env_var, None)
|
||||
|
||||
if val is not None:
|
||||
return val
|
||||
|
||||
# Next, try to load from configuration file
|
||||
if config_key is not None:
|
||||
cfg_data = load_config_data()
|
||||
|
||||
result = None
|
||||
|
||||
# Hack to allow 'path traversal' in configuration file
|
||||
for key in config_key.strip().split('.'):
|
||||
|
||||
if type(cfg_data) is not dict or key not in cfg_data:
|
||||
result = None
|
||||
break
|
||||
|
||||
result = cfg_data[key]
|
||||
cfg_data = cfg_data[key]
|
||||
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
# Finally, return the default value
|
||||
return default_value
|
||||
|
||||
|
||||
def get_boolean_setting(env_var=None, config_key=None, default_value=False):
|
||||
"""Helper function for retreiving a boolean configuration setting"""
|
||||
|
||||
return is_true(get_setting(env_var, config_key, default_value))
|
||||
|
||||
|
||||
def get_media_dir(create=True):
|
||||
"""Return the absolute path for the 'media' directory (where uploaded files are stored)"""
|
||||
|
||||
md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root')
|
||||
|
||||
if not md:
|
||||
raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified')
|
||||
|
||||
md = Path(md).resolve()
|
||||
|
||||
if create:
|
||||
md.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return md
|
||||
|
||||
|
||||
def get_static_dir(create=True):
|
||||
"""Return the absolute path for the 'static' directory (where static files are stored)"""
|
||||
|
||||
sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root')
|
||||
|
||||
if not sd:
|
||||
raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified')
|
||||
|
||||
sd = Path(sd).resolve()
|
||||
|
||||
if create:
|
||||
sd.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return sd
|
||||
|
||||
|
||||
def get_plugin_file():
|
||||
"""Returns the path of the InvenTree plugins specification file.
|
||||
|
||||
Note: It will be created if it does not already exist!
|
||||
"""
|
||||
|
||||
# Check if the plugin.txt file (specifying required plugins) is specified
|
||||
plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file')
|
||||
|
||||
if not plugin_file:
|
||||
# If not specified, look in the same directory as the configuration file
|
||||
config_dir = get_config_file().parent
|
||||
plugin_file = config_dir.joinpath('plugins.txt')
|
||||
else:
|
||||
# Make sure we are using a modern Path object
|
||||
plugin_file = Path(plugin_file)
|
||||
|
||||
if not plugin_file.exists():
|
||||
logger.warning("Plugin configuration file does not exist - creating default file")
|
||||
logger.info(f"Creating plugin file at '{plugin_file}'")
|
||||
|
||||
# If opening the file fails (no write permission, for example), then this will throw an error
|
||||
plugin_file.write_text("# InvenTree Plugins (uses PIP framework to install)\n\n")
|
||||
|
||||
return plugin_file
|
||||
|
||||
|
||||
def get_secret_key():
|
||||
"""Return the secret key value which will be used by django.
|
||||
|
||||
Following options are tested, in descending order of preference:
|
||||
|
||||
A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data
|
||||
B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file
|
||||
C) Look for default key file "secret_key.txt"
|
||||
D) Create "secret_key.txt" if it does not exist
|
||||
"""
|
||||
|
||||
# Look for environment variable
|
||||
if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'):
|
||||
logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY") # pragma: no cover
|
||||
return secret_key
|
||||
|
||||
# Look for secret key file
|
||||
if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'):
|
||||
secret_key_file = Path(secret_key_file).resolve()
|
||||
else:
|
||||
# Default location for secret key file
|
||||
secret_key_file = get_base_dir().joinpath("secret_key.txt").resolve()
|
||||
|
||||
if not secret_key_file.exists():
|
||||
logger.info(f"Generating random key file at '{secret_key_file}'")
|
||||
|
||||
# Create a random key file
|
||||
options = string.digits + string.ascii_letters + string.punctuation
|
||||
key = ''.join([random.choice(options) for i in range(100)])
|
||||
secret_key_file.write_text(key)
|
||||
|
||||
logger.info(f"Loading SECRET_KEY from '{secret_key_file}'")
|
||||
|
||||
key_data = secret_key_file.read_text().strip()
|
||||
|
||||
return key_data
|
||||
95
InvenTree/InvenTree/context.py
Normal file
95
InvenTree/InvenTree/context.py
Normal file
@@ -0,0 +1,95 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Provides extra global data to all templates."""
|
||||
|
||||
import InvenTree.status
|
||||
from InvenTree.status_codes import (BuildStatus, PurchaseOrderStatus,
|
||||
SalesOrderStatus, StockHistoryCode,
|
||||
StockStatus)
|
||||
from users.models import RuleSet, check_user_role
|
||||
|
||||
|
||||
def health_status(request):
|
||||
"""Provide system health status information to the global context.
|
||||
|
||||
- Not required for AJAX requests
|
||||
- Do not provide if it is already provided to the context
|
||||
"""
|
||||
if request.path.endswith('.js'):
|
||||
# Do not provide to script requests
|
||||
return {} # pragma: no cover
|
||||
|
||||
if hasattr(request, '_inventree_health_status'):
|
||||
# Do not duplicate efforts
|
||||
return {}
|
||||
|
||||
request._inventree_health_status = True
|
||||
|
||||
status = {
|
||||
'django_q_running': InvenTree.status.is_worker_running(),
|
||||
'email_configured': InvenTree.status.is_email_configured(),
|
||||
}
|
||||
|
||||
# The following keys are required to denote system health
|
||||
health_keys = [
|
||||
'django_q_running',
|
||||
]
|
||||
|
||||
all_healthy = True
|
||||
|
||||
for k in health_keys:
|
||||
if status[k] is not True:
|
||||
all_healthy = False
|
||||
|
||||
status['system_healthy'] = all_healthy
|
||||
|
||||
status['up_to_date'] = InvenTree.version.isInvenTreeUpToDate()
|
||||
|
||||
return status
|
||||
|
||||
|
||||
def status_codes(request):
|
||||
"""Provide status code enumerations."""
|
||||
if hasattr(request, '_inventree_status_codes'):
|
||||
# Do not duplicate efforts
|
||||
return {}
|
||||
|
||||
request._inventree_status_codes = True
|
||||
|
||||
return {
|
||||
# Expose the StatusCode classes to the templates
|
||||
'SalesOrderStatus': SalesOrderStatus,
|
||||
'PurchaseOrderStatus': PurchaseOrderStatus,
|
||||
'BuildStatus': BuildStatus,
|
||||
'StockStatus': StockStatus,
|
||||
'StockHistoryCode': StockHistoryCode,
|
||||
}
|
||||
|
||||
|
||||
def user_roles(request):
|
||||
"""Return a map of the current roles assigned to the user.
|
||||
|
||||
Roles are denoted by their simple names, and then the permission type.
|
||||
|
||||
Permissions can be access as follows:
|
||||
|
||||
- roles.part.view
|
||||
- roles.build.delete
|
||||
|
||||
Each value will return a boolean True / False
|
||||
"""
|
||||
user = request.user
|
||||
|
||||
roles = {
|
||||
}
|
||||
|
||||
for role in RuleSet.RULESET_MODELS.keys():
|
||||
|
||||
permissions = {}
|
||||
|
||||
for perm in ['view', 'add', 'change', 'delete']:
|
||||
permissions[perm] = user.is_superuser or check_user_role(user, role, perm)
|
||||
|
||||
roles[role] = permissions
|
||||
|
||||
return {'roles': roles}
|
||||
88
InvenTree/InvenTree/exceptions.py
Normal file
88
InvenTree/InvenTree/exceptions.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""Custom exception handling for the DRF API."""
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import rest_framework.views as drfviews
|
||||
from error_report.models import Error
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import ValidationError as DRFValidationError
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
||||
def log_error(path):
|
||||
"""Log an error to the database.
|
||||
|
||||
- Uses python exception handling to extract error details
|
||||
|
||||
Arguments:
|
||||
path: The 'path' (most likely a URL) associated with this error (optional)
|
||||
"""
|
||||
|
||||
kind, info, data = sys.exc_info()
|
||||
|
||||
# Check if the eror is on the ignore list
|
||||
if kind in settings.IGNORED_ERRORS:
|
||||
return
|
||||
|
||||
Error.objects.create(
|
||||
kind=kind.__name__,
|
||||
info=info,
|
||||
data='\n'.join(traceback.format_exception(kind, info, data)),
|
||||
path=path,
|
||||
)
|
||||
|
||||
|
||||
def exception_handler(exc, context):
|
||||
"""Custom exception handler for DRF framework.
|
||||
|
||||
Ref: https://www.django-rest-framework.org/api-guide/exceptions/#custom-exception-handling
|
||||
Catches any errors not natively handled by DRF, and re-throws as an error DRF can handle
|
||||
"""
|
||||
response = None
|
||||
|
||||
# Catch any django validation error, and re-throw a DRF validation error
|
||||
if isinstance(exc, DjangoValidationError):
|
||||
exc = DRFValidationError(detail=serializers.as_serializer_error(exc))
|
||||
|
||||
# Default to the built-in DRF exception handler
|
||||
response = drfviews.exception_handler(exc, context)
|
||||
|
||||
if response is None:
|
||||
# DRF handler did not provide a default response for this exception
|
||||
|
||||
if settings.TESTING:
|
||||
# If in TESTING mode, re-throw the exception for traceback
|
||||
raise exc
|
||||
elif settings.DEBUG:
|
||||
# If in DEBUG mode, provide error information in the response
|
||||
error_detail = str(exc)
|
||||
else:
|
||||
error_detail = _("Error details can be found in the admin panel")
|
||||
|
||||
response_data = {
|
||||
'error': type(exc).__name__,
|
||||
'error_class': str(type(exc)),
|
||||
'detail': error_detail,
|
||||
'path': context['request'].path,
|
||||
'status_code': 500,
|
||||
}
|
||||
|
||||
response = Response(response_data, status=500)
|
||||
|
||||
log_error(context['request'].path)
|
||||
|
||||
if response is not None:
|
||||
# Convert errors returned under the label '__all__' to 'non_field_errors'
|
||||
if '__all__' in response.data:
|
||||
response.data['non_field_errors'] = response.data['__all__']
|
||||
del response.data['__all__']
|
||||
|
||||
return response
|
||||
74
InvenTree/InvenTree/exchange.py
Normal file
74
InvenTree/InvenTree/exchange.py
Normal file
@@ -0,0 +1,74 @@
|
||||
"""Exchangerate backend to use `exchangerate.host` to get rates."""
|
||||
|
||||
import ssl
|
||||
from urllib.error import URLError
|
||||
from urllib.request import urlopen
|
||||
|
||||
from django.db.utils import OperationalError
|
||||
|
||||
import certifi
|
||||
from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend
|
||||
|
||||
from common.settings import currency_code_default, currency_codes
|
||||
|
||||
|
||||
class InvenTreeExchange(SimpleExchangeBackend):
|
||||
"""Backend for automatically updating currency exchange rates.
|
||||
|
||||
Uses the `exchangerate.host` service API
|
||||
"""
|
||||
|
||||
name = "InvenTreeExchange"
|
||||
|
||||
def __init__(self):
|
||||
"""Set API url."""
|
||||
self.url = "https://api.exchangerate.host/latest"
|
||||
|
||||
super().__init__()
|
||||
|
||||
def get_params(self):
|
||||
"""Placeholder to set API key. Currently not required by `exchangerate.host`."""
|
||||
# No API key is required
|
||||
return {
|
||||
}
|
||||
|
||||
def get_response(self, **kwargs):
|
||||
"""Custom code to get response from server.
|
||||
|
||||
Note: Adds a 5-second timeout
|
||||
"""
|
||||
url = self.get_url(**kwargs)
|
||||
|
||||
try:
|
||||
context = ssl.create_default_context(cafile=certifi.where())
|
||||
response = urlopen(url, timeout=5, context=context)
|
||||
return response.read()
|
||||
except Exception:
|
||||
# Something has gone wrong, but we can just try again next time
|
||||
# Raise a TypeError so the outer function can handle this
|
||||
raise TypeError
|
||||
|
||||
def update_rates(self, base_currency=None):
|
||||
"""Set the requested currency codes and get rates."""
|
||||
# Set default - see B008
|
||||
if base_currency is None:
|
||||
base_currency = currency_code_default()
|
||||
|
||||
symbols = ','.join(currency_codes())
|
||||
|
||||
try:
|
||||
super().update_rates(base=base_currency, symbols=symbols)
|
||||
# catch connection errors
|
||||
except URLError:
|
||||
print('Encountered connection error while updating')
|
||||
except TypeError:
|
||||
print('Exchange returned invalid response')
|
||||
except OperationalError as e:
|
||||
if 'SerializationFailure' in e.__cause__.__class__.__name__:
|
||||
print('Serialization Failure while updating exchange rates')
|
||||
# We are just going to swallow this exception because the
|
||||
# exchange rates will be updated later by the scheduled task
|
||||
else:
|
||||
# Other operational errors probably are still show stoppers
|
||||
# so reraise them so that the log contains the stacktrace
|
||||
raise
|
||||
190
InvenTree/InvenTree/fields.py
Normal file
190
InvenTree/InvenTree/fields.py
Normal file
@@ -0,0 +1,190 @@
|
||||
"""Custom fields used in InvenTree."""
|
||||
|
||||
import sys
|
||||
from decimal import Decimal
|
||||
|
||||
from django import forms
|
||||
from django.core import validators
|
||||
from django.db import models as models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from djmoney.forms.fields import MoneyField
|
||||
from djmoney.models.fields import MoneyField as ModelMoneyField
|
||||
from djmoney.models.validators import MinMoneyValidator
|
||||
from rest_framework.fields import URLField as RestURLField
|
||||
|
||||
import InvenTree.helpers
|
||||
|
||||
from .validators import allowable_url_schemes
|
||||
|
||||
|
||||
class InvenTreeRestURLField(RestURLField):
|
||||
"""Custom field for DRF with custom scheme vaildators."""
|
||||
def __init__(self, **kwargs):
|
||||
"""Update schemes."""
|
||||
|
||||
# Enforce 'max length' parameter in form validation
|
||||
if 'max_length' not in kwargs:
|
||||
kwargs['max_length'] = 200
|
||||
|
||||
super().__init__(**kwargs)
|
||||
self.validators[-1].schemes = allowable_url_schemes()
|
||||
|
||||
|
||||
class InvenTreeURLField(models.URLField):
|
||||
"""Custom URL field which has custom scheme validators."""
|
||||
|
||||
default_validators = [validators.URLValidator(schemes=allowable_url_schemes())]
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Initialization method for InvenTreeURLField"""
|
||||
|
||||
# Max length for InvenTreeURLField defaults to 200
|
||||
if 'max_length' not in kwargs:
|
||||
kwargs['max_length'] = 200
|
||||
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
def money_kwargs():
|
||||
"""Returns the database settings for MoneyFields."""
|
||||
from common.settings import currency_code_default, currency_code_mappings
|
||||
|
||||
kwargs = {}
|
||||
kwargs['currency_choices'] = currency_code_mappings()
|
||||
kwargs['default_currency'] = currency_code_default()
|
||||
return kwargs
|
||||
|
||||
|
||||
class InvenTreeModelMoneyField(ModelMoneyField):
|
||||
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Overwrite default values and validators."""
|
||||
# detect if creating migration
|
||||
if 'migrate' in sys.argv or 'makemigrations' in sys.argv:
|
||||
# remove currency information for a clean migration
|
||||
kwargs['default_currency'] = ''
|
||||
kwargs['currency_choices'] = []
|
||||
else:
|
||||
# set defaults
|
||||
kwargs.update(money_kwargs())
|
||||
|
||||
# Set a minimum value validator
|
||||
validators = kwargs.get('validators', [])
|
||||
|
||||
allow_negative = kwargs.pop('allow_negative', False)
|
||||
|
||||
# If no validators are provided, add some "standard" ones
|
||||
if len(validators) == 0:
|
||||
|
||||
if not allow_negative:
|
||||
validators.append(
|
||||
MinMoneyValidator(0),
|
||||
)
|
||||
|
||||
kwargs['validators'] = validators
|
||||
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
"""Override form class to use own function."""
|
||||
kwargs['form_class'] = InvenTreeMoneyField
|
||||
return super().formfield(**kwargs)
|
||||
|
||||
|
||||
class InvenTreeMoneyField(MoneyField):
|
||||
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Override initial values with the real info from database."""
|
||||
kwargs.update(money_kwargs())
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class DatePickerFormField(forms.DateField):
|
||||
"""Custom date-picker field."""
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Set up custom values."""
|
||||
help_text = kwargs.get('help_text', _('Enter date'))
|
||||
label = kwargs.get('label', None)
|
||||
required = kwargs.get('required', False)
|
||||
initial = kwargs.get('initial', None)
|
||||
|
||||
widget = forms.DateInput(
|
||||
attrs={
|
||||
'type': 'date',
|
||||
}
|
||||
)
|
||||
|
||||
forms.DateField.__init__(
|
||||
self,
|
||||
required=required,
|
||||
initial=initial,
|
||||
help_text=help_text,
|
||||
widget=widget,
|
||||
label=label
|
||||
)
|
||||
|
||||
|
||||
def round_decimal(value, places):
|
||||
"""Round value to the specified number of places."""
|
||||
if value is not None:
|
||||
# see https://docs.python.org/2/library/decimal.html#decimal.Decimal.quantize for options
|
||||
return value.quantize(Decimal(10) ** -places)
|
||||
return value
|
||||
|
||||
|
||||
class RoundingDecimalFormField(forms.DecimalField):
|
||||
"""Custom FormField that automatically rounds inputs."""
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert value to python type."""
|
||||
value = super().to_python(value)
|
||||
value = round_decimal(value, self.decimal_places)
|
||||
return value
|
||||
|
||||
def prepare_value(self, value):
|
||||
"""Override the 'prepare_value' method, to remove trailing zeros when displaying.
|
||||
|
||||
Why? It looks nice!
|
||||
"""
|
||||
if type(value) == Decimal:
|
||||
return InvenTree.helpers.normalize(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
class RoundingDecimalField(models.DecimalField):
|
||||
"""Custom Field that automatically rounds inputs."""
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert value to python type."""
|
||||
value = super().to_python(value)
|
||||
return round_decimal(value, self.decimal_places)
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
"""Return a Field instance for this field."""
|
||||
defaults = {
|
||||
'form_class': RoundingDecimalFormField
|
||||
}
|
||||
|
||||
defaults.update(kwargs)
|
||||
|
||||
return super().formfield(**kwargs)
|
||||
|
||||
|
||||
class InvenTreeNotesField(models.TextField):
|
||||
"""Custom implementation of a 'notes' field"""
|
||||
|
||||
# Maximum character limit for the various 'notes' fields
|
||||
NOTES_MAX_LENGTH = 50000
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
"""Configure default initial values for this field"""
|
||||
kwargs['max_length'] = self.NOTES_MAX_LENGTH
|
||||
kwargs['verbose_name'] = _('Notes')
|
||||
kwargs['blank'] = True
|
||||
kwargs['null'] = True
|
||||
|
||||
super().__init__(**kwargs)
|
||||
76
InvenTree/InvenTree/filters.py
Normal file
76
InvenTree/InvenTree/filters.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""General filters for InvenTree."""
|
||||
|
||||
from rest_framework.filters import OrderingFilter
|
||||
|
||||
|
||||
class InvenTreeOrderingFilter(OrderingFilter):
|
||||
"""Custom OrderingFilter class which allows aliased filtering of related fields.
|
||||
|
||||
To use, simply specify this filter in the "filter_backends" section.
|
||||
|
||||
filter_backends = [
|
||||
InvenTreeOrderingFilter,
|
||||
]
|
||||
|
||||
Then, specify a ordering_field_aliases attribute:
|
||||
|
||||
ordering_field_alises = {
|
||||
'name': 'part__part__name',
|
||||
'SKU': 'part__SKU',
|
||||
}
|
||||
"""
|
||||
|
||||
def get_ordering(self, request, queryset, view):
|
||||
"""Override ordering for supporting aliases."""
|
||||
ordering = super().get_ordering(request, queryset, view)
|
||||
|
||||
aliases = getattr(view, 'ordering_field_aliases', None)
|
||||
|
||||
# Attempt to map ordering fields based on provided aliases
|
||||
if ordering is not None and aliases is not None:
|
||||
"""Ordering fields should be mapped to separate fields."""
|
||||
|
||||
ordering_initial = ordering
|
||||
ordering = []
|
||||
|
||||
for field in ordering_initial:
|
||||
|
||||
reverse = field.startswith('-')
|
||||
|
||||
if reverse:
|
||||
field = field[1:]
|
||||
|
||||
# Are aliases defined for this field?
|
||||
if field in aliases:
|
||||
alias = aliases[field]
|
||||
else:
|
||||
alias = field
|
||||
|
||||
"""
|
||||
Potentially, a single field could be "aliased" to multiple field,
|
||||
|
||||
(For example to enforce a particular ordering sequence)
|
||||
|
||||
e.g. to filter first by the integer value...
|
||||
|
||||
ordering_field_aliases = {
|
||||
"reference": ["integer_ref", "reference"]
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
if type(alias) is str:
|
||||
alias = [alias]
|
||||
elif type(alias) in [list, tuple]:
|
||||
pass
|
||||
else:
|
||||
# Unsupported alias type
|
||||
continue
|
||||
|
||||
for a in alias:
|
||||
if reverse:
|
||||
a = '-' + a
|
||||
|
||||
ordering.append(a)
|
||||
|
||||
return ordering
|
||||
156
InvenTree/InvenTree/format.py
Normal file
156
InvenTree/InvenTree/format.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""Custom string formatting functions and helpers"""
|
||||
|
||||
import re
|
||||
import string
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
def parse_format_string(fmt_string: str) -> dict:
|
||||
"""Extract formatting information from the provided format string.
|
||||
|
||||
Returns a dict object which contains structured information about the format groups
|
||||
"""
|
||||
|
||||
groups = string.Formatter().parse(fmt_string)
|
||||
|
||||
info = {}
|
||||
|
||||
for group in groups:
|
||||
# Skip any group which does not have a named value
|
||||
if not group[1]:
|
||||
continue
|
||||
|
||||
info[group[1]] = {
|
||||
'format': group[1],
|
||||
'prefix': group[0],
|
||||
}
|
||||
|
||||
return info
|
||||
|
||||
|
||||
def construct_format_regex(fmt_string: str) -> str:
|
||||
r"""Construct a regular expression based on a provided format string
|
||||
|
||||
This function turns a python format string into a regular expression,
|
||||
which can be used for two purposes:
|
||||
|
||||
- Ensure that a particular string matches the specified format
|
||||
- Extract named variables from a matching string
|
||||
|
||||
This function also provides support for wildcard characters:
|
||||
|
||||
- '?' provides single character matching; is converted to a '.' (period) for regex
|
||||
- '#' provides single digit matching; is converted to '\d'
|
||||
|
||||
Args:
|
||||
fmt_string: A typical format string e.g. "PO-???-{ref:04d}"
|
||||
|
||||
Returns:
|
||||
str: A regular expression pattern e.g. ^PO\-...\-(?P<ref>.*)$
|
||||
|
||||
Raises:
|
||||
ValueError: Format string is invalid
|
||||
"""
|
||||
|
||||
pattern = "^"
|
||||
|
||||
for group in string.Formatter().parse(fmt_string):
|
||||
prefix = group[0] # Prefix (literal text appearing before this group)
|
||||
name = group[1] # Name of this format variable
|
||||
format = group[2] # Format specifier e.g :04d
|
||||
|
||||
rep = [
|
||||
'+', '-', '.',
|
||||
'{', '}', '(', ')',
|
||||
'^', '$', '~', '!', '@', ':', ';', '|', '\'', '"',
|
||||
]
|
||||
|
||||
# Escape any special regex characters
|
||||
for ch in rep:
|
||||
prefix = prefix.replace(ch, '\\' + ch)
|
||||
|
||||
# Replace ? with single-character match
|
||||
prefix = prefix.replace('?', '.')
|
||||
|
||||
# Replace # with single-digit match
|
||||
prefix = prefix.replace('#', r'\d')
|
||||
|
||||
pattern += prefix
|
||||
|
||||
# Add a named capture group for the format entry
|
||||
if name:
|
||||
|
||||
# Check if integer values are requried
|
||||
if format.endswith('d'):
|
||||
chr = '\d'
|
||||
else:
|
||||
chr = '.'
|
||||
|
||||
# Specify width
|
||||
# TODO: Introspect required width
|
||||
w = '+'
|
||||
|
||||
pattern += f"(?P<{name}>{chr}{w})"
|
||||
|
||||
pattern += "$"
|
||||
|
||||
return pattern
|
||||
|
||||
|
||||
def validate_string(value: str, fmt_string: str) -> str:
|
||||
"""Validate that the provided string matches the specified format.
|
||||
|
||||
Args:
|
||||
value: The string to be tested e.g. 'SO-1234-ABC',
|
||||
fmt_string: The required format e.g. 'SO-{ref}-???',
|
||||
|
||||
Returns:
|
||||
bool: True if the value matches the required format, else False
|
||||
|
||||
Raises:
|
||||
ValueError: The provided format string is invalid
|
||||
"""
|
||||
|
||||
pattern = construct_format_regex(fmt_string)
|
||||
|
||||
result = re.match(pattern, value)
|
||||
|
||||
return result is not None
|
||||
|
||||
|
||||
def extract_named_group(name: str, value: str, fmt_string: str) -> str:
|
||||
"""Extract a named value from the provided string, given the provided format string
|
||||
|
||||
Args:
|
||||
name: Name of group to extract e.g. 'ref'
|
||||
value: Raw string e.g. 'PO-ABC-1234'
|
||||
fmt_string: Format pattern e.g. 'PO-???-{ref}
|
||||
|
||||
Returns:
|
||||
str: String value of the named group
|
||||
|
||||
Raises:
|
||||
ValueError: format string is incorrectly specified, or provided value does not match format string
|
||||
NameError: named value does not exist in the format string
|
||||
IndexError: named value could not be found in the provided entry
|
||||
"""
|
||||
|
||||
info = parse_format_string(fmt_string)
|
||||
|
||||
if name not in info.keys():
|
||||
raise NameError(_(f"Value '{name}' does not appear in pattern format"))
|
||||
|
||||
# Construct a regular expression for matching against the provided format string
|
||||
# Note: This will raise a ValueError if 'fmt_string' is incorrectly specified
|
||||
pattern = construct_format_regex(fmt_string)
|
||||
|
||||
# Run the regex matcher against the raw string
|
||||
result = re.match(pattern, value)
|
||||
|
||||
if not result:
|
||||
raise ValueError(_("Provided value does not match required pattern: ") + fmt_string)
|
||||
|
||||
# And return the value we are interested in
|
||||
# Note: This will raise an IndexError if the named group was not matched
|
||||
return result.group(name)
|
||||
271
InvenTree/InvenTree/forms.py
Normal file
271
InvenTree/InvenTree/forms.py
Normal file
@@ -0,0 +1,271 @@
|
||||
"""Helper forms which subclass Django forms to provide additional functionality."""
|
||||
|
||||
import logging
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import Group, User
|
||||
from django.http import HttpResponseRedirect
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from allauth.account.adapter import DefaultAccountAdapter
|
||||
from allauth.account.forms import SignupForm, set_form_field_order
|
||||
from allauth.exceptions import ImmediateHttpResponse
|
||||
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
|
||||
from allauth_2fa.adapter import OTPAdapter
|
||||
from allauth_2fa.utils import user_has_valid_totp_device
|
||||
from crispy_forms.bootstrap import (AppendedText, PrependedAppendedText,
|
||||
PrependedText)
|
||||
from crispy_forms.helper import FormHelper
|
||||
from crispy_forms.layout import Field, Layout
|
||||
|
||||
from common.models import InvenTreeSetting
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class HelperForm(forms.ModelForm):
|
||||
"""Provides simple integration of crispy_forms extension."""
|
||||
|
||||
# Custom field decorations can be specified here, per form class
|
||||
field_prefix = {}
|
||||
field_suffix = {}
|
||||
field_placeholder = {}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Setup layout."""
|
||||
super(forms.ModelForm, self).__init__(*args, **kwargs)
|
||||
self.helper = FormHelper()
|
||||
|
||||
self.helper.form_tag = False
|
||||
self.helper.form_show_errors = True
|
||||
|
||||
"""
|
||||
Create a default 'layout' for this form.
|
||||
Ref: https://django-crispy-forms.readthedocs.io/en/latest/layouts.html
|
||||
This is required to do fancy things later (like adding PrependedText, etc).
|
||||
|
||||
Simply create a 'blank' layout for each available field.
|
||||
"""
|
||||
|
||||
self.rebuild_layout()
|
||||
|
||||
def rebuild_layout(self):
|
||||
"""Build crispy layout out of current fields."""
|
||||
layouts = []
|
||||
|
||||
for field in self.fields:
|
||||
prefix = self.field_prefix.get(field, None)
|
||||
suffix = self.field_suffix.get(field, None)
|
||||
placeholder = self.field_placeholder.get(field, '')
|
||||
|
||||
# Look for font-awesome icons
|
||||
if prefix and prefix.startswith('fa-'):
|
||||
prefix = r"<i class='fas {fa}'/>".format(fa=prefix)
|
||||
|
||||
if suffix and suffix.startswith('fa-'):
|
||||
suffix = r"<i class='fas {fa}'/>".format(fa=suffix)
|
||||
|
||||
if prefix and suffix:
|
||||
layouts.append(
|
||||
Field(
|
||||
PrependedAppendedText(
|
||||
field,
|
||||
prepended_text=prefix,
|
||||
appended_text=suffix,
|
||||
placeholder=placeholder
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
elif prefix:
|
||||
layouts.append(
|
||||
Field(
|
||||
PrependedText(
|
||||
field,
|
||||
prefix,
|
||||
placeholder=placeholder
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
elif suffix:
|
||||
layouts.append(
|
||||
Field(
|
||||
AppendedText(
|
||||
field,
|
||||
suffix,
|
||||
placeholder=placeholder
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
else:
|
||||
layouts.append(Field(field, placeholder=placeholder))
|
||||
|
||||
self.helper.layout = Layout(*layouts)
|
||||
|
||||
|
||||
class EditUserForm(HelperForm):
|
||||
"""Form for editing user information."""
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
'first_name',
|
||||
'last_name',
|
||||
]
|
||||
|
||||
|
||||
class SetPasswordForm(HelperForm):
|
||||
"""Form for setting user password."""
|
||||
|
||||
enter_password = forms.CharField(
|
||||
max_length=100,
|
||||
min_length=8,
|
||||
required=True,
|
||||
initial='',
|
||||
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
|
||||
label=_('Enter password'),
|
||||
help_text=_('Enter new password')
|
||||
)
|
||||
|
||||
confirm_password = forms.CharField(
|
||||
max_length=100,
|
||||
min_length=8,
|
||||
required=True,
|
||||
initial='',
|
||||
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
|
||||
label=_('Confirm password'),
|
||||
help_text=_('Confirm new password')
|
||||
)
|
||||
|
||||
old_password = forms.CharField(
|
||||
label=_("Old password"),
|
||||
strip=False,
|
||||
widget=forms.PasswordInput(attrs={'autocomplete': 'current-password', 'autofocus': True}),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
model = User
|
||||
fields = [
|
||||
'enter_password',
|
||||
'confirm_password',
|
||||
'old_password',
|
||||
]
|
||||
|
||||
|
||||
# override allauth
|
||||
class CustomSignupForm(SignupForm):
|
||||
"""Override to use dynamic settings."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Check settings to influence which fields are needed."""
|
||||
kwargs['email_required'] = InvenTreeSetting.get_setting('LOGIN_MAIL_REQUIRED')
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# check for two mail fields
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
|
||||
self.fields["email2"] = forms.EmailField(
|
||||
label=_("Email (again)"),
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"type": "email",
|
||||
"placeholder": _("Email address confirmation"),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# check for two password fields
|
||||
if not InvenTreeSetting.get_setting('LOGIN_SIGNUP_PWD_TWICE'):
|
||||
self.fields.pop("password2")
|
||||
|
||||
# reorder fields
|
||||
set_form_field_order(self, ["username", "email", "email2", "password1", "password2", ])
|
||||
|
||||
def clean(self):
|
||||
"""Make sure the supllied emails match if enabled in settings."""
|
||||
cleaned_data = super().clean()
|
||||
|
||||
# check for two mail fields
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
|
||||
email = cleaned_data.get("email")
|
||||
email2 = cleaned_data.get("email2")
|
||||
if (email and email2) and email != email2:
|
||||
self.add_error("email2", _("You must type the same email each time."))
|
||||
|
||||
return cleaned_data
|
||||
|
||||
|
||||
class RegistratonMixin:
|
||||
"""Mixin to check if registration should be enabled."""
|
||||
|
||||
def is_open_for_signup(self, request, *args, **kwargs):
|
||||
"""Check if signup is enabled in settings."""
|
||||
if settings.EMAIL_HOST and InvenTreeSetting.get_setting('LOGIN_ENABLE_REG', True):
|
||||
return super().is_open_for_signup(request, *args, **kwargs)
|
||||
return False
|
||||
|
||||
def save_user(self, request, user, form, commit=True):
|
||||
"""Check if a default group is set in settings."""
|
||||
user = super().save_user(request, user, form)
|
||||
start_group = InvenTreeSetting.get_setting('SIGNUP_GROUP')
|
||||
if start_group:
|
||||
try:
|
||||
group = Group.objects.get(id=start_group)
|
||||
user.groups.add(group)
|
||||
except Group.DoesNotExist:
|
||||
logger.error('The setting `SIGNUP_GROUP` contains an non existant group', start_group)
|
||||
user.save()
|
||||
return user
|
||||
|
||||
|
||||
class CustomAccountAdapter(RegistratonMixin, OTPAdapter, DefaultAccountAdapter):
|
||||
"""Override of adapter to use dynamic settings."""
|
||||
def send_mail(self, template_prefix, email, context):
|
||||
"""Only send mail if backend configured."""
|
||||
if settings.EMAIL_HOST:
|
||||
return super().send_mail(template_prefix, email, context)
|
||||
return False
|
||||
|
||||
|
||||
class CustomSocialAccountAdapter(RegistratonMixin, DefaultSocialAccountAdapter):
|
||||
"""Override of adapter to use dynamic settings."""
|
||||
|
||||
def is_auto_signup_allowed(self, request, sociallogin):
|
||||
"""Check if auto signup is enabled in settings."""
|
||||
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_SSO_AUTO', True):
|
||||
return super().is_auto_signup_allowed(request, sociallogin)
|
||||
return False
|
||||
|
||||
# from OTPAdapter
|
||||
def has_2fa_enabled(self, user):
|
||||
"""Returns True if the user has 2FA configured."""
|
||||
return user_has_valid_totp_device(user)
|
||||
|
||||
def login(self, request, user):
|
||||
"""Ensure user is send to 2FA before login if enabled."""
|
||||
# Require two-factor authentication if it has been configured.
|
||||
if self.has_2fa_enabled(user):
|
||||
# Cast to string for the case when this is not a JSON serializable
|
||||
# object, e.g. a UUID.
|
||||
request.session['allauth_2fa_user_id'] = str(user.id)
|
||||
|
||||
redirect_url = reverse('two-factor-authenticate')
|
||||
# Add GET parameters to the URL if they exist.
|
||||
if request.GET:
|
||||
redirect_url += '?' + urlencode(request.GET)
|
||||
|
||||
raise ImmediateHttpResponse(
|
||||
response=HttpResponseRedirect(redirect_url)
|
||||
)
|
||||
|
||||
# Otherwise defer to the original allauth adapter.
|
||||
return super().login(request, user)
|
||||
933
InvenTree/InvenTree/helpers.py
Normal file
933
InvenTree/InvenTree/helpers.py
Normal file
@@ -0,0 +1,933 @@
|
||||
"""Provides helper functions used throughout the InvenTree project."""
|
||||
|
||||
import io
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
from decimal import Decimal, InvalidOperation
|
||||
from pathlib import Path
|
||||
from wsgiref.util import FileWrapper
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.staticfiles.storage import StaticFilesStorage
|
||||
from django.core.exceptions import FieldError, ValidationError
|
||||
from django.core.files.storage import default_storage
|
||||
from django.core.validators import URLValidator
|
||||
from django.http import StreamingHttpResponse
|
||||
from django.test import TestCase
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import requests
|
||||
from djmoney.money import Money
|
||||
from PIL import Image
|
||||
|
||||
import InvenTree.version
|
||||
from common.models import InvenTreeSetting
|
||||
from common.notifications import (InvenTreeNotificationBodies,
|
||||
NotificationBody, trigger_notification)
|
||||
from common.settings import currency_code_default
|
||||
|
||||
from .api_tester import UserMixin
|
||||
from .settings import MEDIA_URL, STATIC_URL
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def getSetting(key, backup_value=None):
|
||||
"""Shortcut for reading a setting value from the database."""
|
||||
return InvenTreeSetting.get_setting(key, backup_value=backup_value)
|
||||
|
||||
|
||||
def generateTestKey(test_name):
|
||||
"""Generate a test 'key' for a given test name. This must not have illegal chars as it will be used for dict lookup in a template.
|
||||
|
||||
Tests must be named such that they will have unique keys.
|
||||
"""
|
||||
key = test_name.strip().lower()
|
||||
key = key.replace(" ", "")
|
||||
|
||||
# Remove any characters that cannot be used to represent a variable
|
||||
key = re.sub(r'[^a-zA-Z0-9]', '', key)
|
||||
|
||||
return key
|
||||
|
||||
|
||||
def constructPathString(path, max_chars=250):
|
||||
"""Construct a 'path string' for the given path.
|
||||
|
||||
Arguments:
|
||||
path: A list of strings e.g. ['path', 'to', 'location']
|
||||
max_chars: Maximum number of characters
|
||||
"""
|
||||
|
||||
pathstring = '/'.join(path)
|
||||
|
||||
idx = 0
|
||||
|
||||
# Replace middle elements to limit the pathstring
|
||||
if len(pathstring) > max_chars:
|
||||
mid = len(path) // 2
|
||||
path_l = path[0:mid]
|
||||
path_r = path[mid:]
|
||||
|
||||
# Ensure the pathstring length is limited
|
||||
while len(pathstring) > max_chars:
|
||||
|
||||
# Remove an element from the list
|
||||
if idx % 2 == 0:
|
||||
path_l = path_l[:-1]
|
||||
else:
|
||||
path_r = path_r[1:]
|
||||
|
||||
subpath = path_l + ['...'] + path_r
|
||||
|
||||
pathstring = '/'.join(subpath)
|
||||
|
||||
idx += 1
|
||||
|
||||
return pathstring
|
||||
|
||||
|
||||
def getMediaUrl(filename):
|
||||
"""Return the qualified access path for the given file, under the media directory."""
|
||||
return os.path.join(MEDIA_URL, str(filename))
|
||||
|
||||
|
||||
def getStaticUrl(filename):
|
||||
"""Return the qualified access path for the given file, under the static media directory."""
|
||||
return os.path.join(STATIC_URL, str(filename))
|
||||
|
||||
|
||||
def construct_absolute_url(*arg):
|
||||
"""Construct (or attempt to construct) an absolute URL from a relative URL.
|
||||
|
||||
This is useful when (for example) sending an email to a user with a link
|
||||
to something in the InvenTree web framework.
|
||||
|
||||
This requires the BASE_URL configuration option to be set!
|
||||
"""
|
||||
base = str(InvenTreeSetting.get_setting('INVENTREE_BASE_URL'))
|
||||
|
||||
url = '/'.join(arg)
|
||||
|
||||
if not base:
|
||||
return url
|
||||
|
||||
# Strip trailing slash from base url
|
||||
if base.endswith('/'):
|
||||
base = base[:-1]
|
||||
|
||||
if url.startswith('/'):
|
||||
url = url[1:]
|
||||
|
||||
url = f"{base}/{url}"
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def download_image_from_url(remote_url, timeout=2.5):
|
||||
"""Download an image file from a remote URL.
|
||||
|
||||
This is a potentially dangerous operation, so we must perform some checks:
|
||||
|
||||
- The remote URL is available
|
||||
- The Content-Length is provided, and is not too large
|
||||
- The file is a valid image file
|
||||
|
||||
Arguments:
|
||||
remote_url: The remote URL to retrieve image
|
||||
max_size: Maximum allowed image size (default = 1MB)
|
||||
timeout: Connection timeout in seconds (default = 5)
|
||||
|
||||
Returns:
|
||||
An in-memory PIL image file, if the download was successful
|
||||
|
||||
Raises:
|
||||
requests.exceptions.ConnectionError: Connection could not be established
|
||||
requests.exceptions.Timeout: Connection timed out
|
||||
requests.exceptions.HTTPError: Server responded with invalid response code
|
||||
ValueError: Server responded with invalid 'Content-Length' value
|
||||
TypeError: Response is not a valid image
|
||||
"""
|
||||
|
||||
# Check that the provided URL at least looks valid
|
||||
validator = URLValidator()
|
||||
validator(remote_url)
|
||||
|
||||
# Calculate maximum allowable image size (in bytes)
|
||||
max_size = int(InvenTreeSetting.get_setting('INVENTREE_DOWNLOAD_IMAGE_MAX_SIZE')) * 1024 * 1024
|
||||
|
||||
try:
|
||||
response = requests.get(
|
||||
remote_url,
|
||||
timeout=timeout,
|
||||
allow_redirects=True,
|
||||
stream=True,
|
||||
)
|
||||
# Throw an error if anything goes wrong
|
||||
response.raise_for_status()
|
||||
except requests.exceptions.ConnectionError as exc:
|
||||
raise Exception(_("Connection error") + f": {str(exc)}")
|
||||
except requests.exceptions.Timeout as exc:
|
||||
raise exc
|
||||
except requests.exceptions.HTTPError:
|
||||
raise requests.exceptions.HTTPError(_("Server responded with invalid status code") + f": {response.status_code}")
|
||||
except Exception as exc:
|
||||
raise Exception(_("Exception occurred") + f": {str(exc)}")
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(_("Server responded with invalid status code") + f": {response.status_code}")
|
||||
|
||||
try:
|
||||
content_length = int(response.headers.get('Content-Length', 0))
|
||||
except ValueError:
|
||||
raise ValueError(_("Server responded with invalid Content-Length value"))
|
||||
|
||||
if content_length > max_size:
|
||||
raise ValueError(_("Image size is too large"))
|
||||
|
||||
# Download the file, ensuring we do not exceed the reported size
|
||||
fo = io.BytesIO()
|
||||
|
||||
dl_size = 0
|
||||
chunk_size = 64 * 1024
|
||||
|
||||
for chunk in response.iter_content(chunk_size=chunk_size):
|
||||
dl_size += len(chunk)
|
||||
|
||||
if dl_size > max_size:
|
||||
raise ValueError(_("Image download exceeded maximum size"))
|
||||
|
||||
fo.write(chunk)
|
||||
|
||||
if dl_size == 0:
|
||||
raise ValueError(_("Remote server returned empty response"))
|
||||
|
||||
# Now, attempt to convert the downloaded data to a valid image file
|
||||
# img.verify() will throw an exception if the image is not valid
|
||||
try:
|
||||
img = Image.open(fo).convert()
|
||||
img.verify()
|
||||
except Exception:
|
||||
raise TypeError(_("Supplied URL is not a valid image file"))
|
||||
|
||||
return img
|
||||
|
||||
|
||||
def TestIfImage(img):
|
||||
"""Test if an image file is indeed an image."""
|
||||
try:
|
||||
Image.open(img).verify()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def getBlankImage():
|
||||
"""Return the qualified path for the 'blank image' placeholder."""
|
||||
return getStaticUrl("img/blank_image.png")
|
||||
|
||||
|
||||
def getBlankThumbnail():
|
||||
"""Return the qualified path for the 'blank image' thumbnail placeholder."""
|
||||
return getStaticUrl("img/blank_image.thumbnail.png")
|
||||
|
||||
|
||||
def getLogoImage(as_file=False, custom=True):
|
||||
"""Return the InvenTree logo image, or a custom logo if available."""
|
||||
|
||||
"""Return the path to the logo-file."""
|
||||
if custom and settings.CUSTOM_LOGO:
|
||||
|
||||
static_storage = StaticFilesStorage()
|
||||
|
||||
if static_storage.exists(settings.CUSTOM_LOGO):
|
||||
storage = static_storage
|
||||
elif default_storage.exists(settings.CUSTOM_LOGO):
|
||||
storage = default_storage
|
||||
else:
|
||||
storage = None
|
||||
|
||||
if storage is not None:
|
||||
if as_file:
|
||||
return f"file://{storage.path(settings.CUSTOM_LOGO)}"
|
||||
else:
|
||||
return storage.url(settings.CUSTOM_LOGO)
|
||||
|
||||
# If we have got to this point, return the default logo
|
||||
if as_file:
|
||||
path = settings.STATIC_ROOT.joinpath('img/inventree.png')
|
||||
return f"file://{path}"
|
||||
else:
|
||||
return getStaticUrl('img/inventree.png')
|
||||
|
||||
|
||||
def TestIfImageURL(url):
|
||||
"""Test if an image URL (or filename) looks like a valid image format.
|
||||
|
||||
Simply tests the extension against a set of allowed values
|
||||
"""
|
||||
return os.path.splitext(os.path.basename(url))[-1].lower() in [
|
||||
'.jpg', '.jpeg', '.j2k',
|
||||
'.png', '.bmp',
|
||||
'.tif', '.tiff',
|
||||
'.webp', '.gif',
|
||||
]
|
||||
|
||||
|
||||
def str2bool(text, test=True):
|
||||
"""Test if a string 'looks' like a boolean value.
|
||||
|
||||
Args:
|
||||
text: Input text
|
||||
test (default = True): Set which boolean value to look for
|
||||
|
||||
Returns:
|
||||
True if the text looks like the selected boolean value
|
||||
"""
|
||||
if test:
|
||||
return str(text).lower() in ['1', 'y', 'yes', 't', 'true', 'ok', 'on', ]
|
||||
else:
|
||||
return str(text).lower() in ['0', 'n', 'no', 'none', 'f', 'false', 'off', ]
|
||||
|
||||
|
||||
def is_bool(text):
|
||||
"""Determine if a string value 'looks' like a boolean."""
|
||||
if str2bool(text, True):
|
||||
return True
|
||||
elif str2bool(text, False):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def isNull(text):
|
||||
"""Test if a string 'looks' like a null value. This is useful for querying the API against a null key.
|
||||
|
||||
Args:
|
||||
text: Input text
|
||||
|
||||
Returns:
|
||||
True if the text looks like a null value
|
||||
"""
|
||||
return str(text).strip().lower() in ['top', 'null', 'none', 'empty', 'false', '-1', '']
|
||||
|
||||
|
||||
def normalize(d):
|
||||
"""Normalize a decimal number, and remove exponential formatting."""
|
||||
if type(d) is not Decimal:
|
||||
d = Decimal(d)
|
||||
|
||||
d = d.normalize()
|
||||
|
||||
# Ref: https://docs.python.org/3/library/decimal.html
|
||||
return d.quantize(Decimal(1)) if d == d.to_integral() else d.normalize()
|
||||
|
||||
|
||||
def increment(n):
|
||||
"""Attempt to increment an integer (or a string that looks like an integer).
|
||||
|
||||
e.g.
|
||||
|
||||
001 -> 002
|
||||
2 -> 3
|
||||
AB01 -> AB02
|
||||
QQQ -> QQQ
|
||||
"""
|
||||
value = str(n).strip()
|
||||
|
||||
# Ignore empty strings
|
||||
if not value:
|
||||
return value
|
||||
|
||||
pattern = r"(.*?)(\d+)?$"
|
||||
|
||||
result = re.search(pattern, value)
|
||||
|
||||
# No match!
|
||||
if result is None:
|
||||
return value
|
||||
|
||||
groups = result.groups()
|
||||
|
||||
# If we cannot match the regex, then simply return the provided value
|
||||
if len(groups) != 2:
|
||||
return value
|
||||
|
||||
prefix, number = groups
|
||||
|
||||
# No number extracted? Simply return the prefix (without incrementing!)
|
||||
if not number:
|
||||
return prefix
|
||||
|
||||
# Record the width of the number
|
||||
width = len(number)
|
||||
|
||||
try:
|
||||
number = int(number) + 1
|
||||
number = str(number)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
number = number.zfill(width)
|
||||
|
||||
return prefix + number
|
||||
|
||||
|
||||
def decimal2string(d):
|
||||
"""Format a Decimal number as a string, stripping out any trailing zeroes or decimal points. Essentially make it look like a whole number if it is one.
|
||||
|
||||
Args:
|
||||
d: A python Decimal object
|
||||
|
||||
Returns:
|
||||
A string representation of the input number
|
||||
"""
|
||||
if type(d) is Decimal:
|
||||
d = normalize(d)
|
||||
|
||||
try:
|
||||
# Ensure that the provided string can actually be converted to a float
|
||||
float(d)
|
||||
except ValueError:
|
||||
# Not a number
|
||||
return str(d)
|
||||
|
||||
s = str(d)
|
||||
|
||||
# Return entire number if there is no decimal place
|
||||
if '.' not in s:
|
||||
return s
|
||||
|
||||
return s.rstrip("0").rstrip(".")
|
||||
|
||||
|
||||
def decimal2money(d, currency=None):
|
||||
"""Format a Decimal number as Money.
|
||||
|
||||
Args:
|
||||
d: A python Decimal object
|
||||
currency: Currency of the input amount, defaults to default currency in settings
|
||||
|
||||
Returns:
|
||||
A Money object from the input(s)
|
||||
"""
|
||||
if not currency:
|
||||
currency = currency_code_default()
|
||||
return Money(d, currency)
|
||||
|
||||
|
||||
def WrapWithQuotes(text, quote='"'):
|
||||
"""Wrap the supplied text with quotes.
|
||||
|
||||
Args:
|
||||
text: Input text to wrap
|
||||
quote: Quote character to use for wrapping (default = "")
|
||||
|
||||
Returns:
|
||||
Supplied text wrapped in quote char
|
||||
"""
|
||||
if not text.startswith(quote):
|
||||
text = quote + text
|
||||
|
||||
if not text.endswith(quote):
|
||||
text = text + quote
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def MakeBarcode(object_name, object_pk, object_data=None, **kwargs):
|
||||
"""Generate a string for a barcode. Adds some global InvenTree parameters.
|
||||
|
||||
Args:
|
||||
object_type: string describing the object type e.g. 'StockItem'
|
||||
object_id: ID (Primary Key) of the object in the database
|
||||
object_url: url for JSON API detail view of the object
|
||||
data: Python dict object containing extra datawhich will be rendered to string (must only contain stringable values)
|
||||
|
||||
Returns:
|
||||
json string of the supplied data plus some other data
|
||||
"""
|
||||
if object_data is None:
|
||||
object_data = {}
|
||||
|
||||
url = kwargs.get('url', False)
|
||||
brief = kwargs.get('brief', True)
|
||||
|
||||
data = {}
|
||||
|
||||
if url:
|
||||
request = object_data.get('request', None)
|
||||
item_url = object_data.get('item_url', None)
|
||||
absolute_url = None
|
||||
|
||||
if request and item_url:
|
||||
absolute_url = request.build_absolute_uri(item_url)
|
||||
# Return URL (No JSON)
|
||||
return absolute_url
|
||||
|
||||
if item_url:
|
||||
# Return URL (No JSON)
|
||||
return item_url
|
||||
elif brief:
|
||||
data[object_name] = object_pk
|
||||
else:
|
||||
data['tool'] = 'InvenTree'
|
||||
data['version'] = InvenTree.version.inventreeVersion()
|
||||
data['instance'] = InvenTree.version.inventreeInstanceName()
|
||||
|
||||
# Ensure PK is included
|
||||
object_data['id'] = object_pk
|
||||
data[object_name] = object_data
|
||||
|
||||
return json.dumps(data, sort_keys=True)
|
||||
|
||||
|
||||
def GetExportFormats():
|
||||
"""Return a list of allowable file formats for exporting data."""
|
||||
return [
|
||||
'csv',
|
||||
'tsv',
|
||||
'xls',
|
||||
'xlsx',
|
||||
'json',
|
||||
'yaml',
|
||||
]
|
||||
|
||||
|
||||
def DownloadFile(data, filename, content_type='application/text', inline=False) -> StreamingHttpResponse:
|
||||
"""Create a dynamic file for the user to download.
|
||||
|
||||
Args:
|
||||
data: Raw file data (string or bytes)
|
||||
filename: Filename for the file download
|
||||
content_type: Content type for the download
|
||||
inline: Download "inline" or as attachment? (Default = attachment)
|
||||
|
||||
Return:
|
||||
A StreamingHttpResponse object wrapping the supplied data
|
||||
"""
|
||||
filename = WrapWithQuotes(filename)
|
||||
|
||||
if type(data) == str:
|
||||
wrapper = FileWrapper(io.StringIO(data))
|
||||
else:
|
||||
wrapper = FileWrapper(io.BytesIO(data))
|
||||
|
||||
response = StreamingHttpResponse(wrapper, content_type=content_type)
|
||||
response['Content-Length'] = len(data)
|
||||
|
||||
disposition = "inline" if inline else "attachment"
|
||||
|
||||
response['Content-Disposition'] = f'{disposition}; filename={filename}'
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def extract_serial_numbers(serials, expected_quantity, next_number: int):
|
||||
"""Attempt to extract serial numbers from an input string.
|
||||
|
||||
Requirements:
|
||||
- Serial numbers can be either strings, or integers
|
||||
- Serial numbers can be split by whitespace / newline / commma chars
|
||||
- Serial numbers can be supplied as an inclusive range using hyphen char e.g. 10-20
|
||||
- Serial numbers can be defined as ~ for getting the next available serial number
|
||||
- Serial numbers can be supplied as <start>+ for getting all expecteded numbers starting from <start>
|
||||
- Serial numbers can be supplied as <start>+<length> for getting <length> numbers starting from <start>
|
||||
|
||||
Args:
|
||||
serials: input string with patterns
|
||||
expected_quantity: The number of (unique) serial numbers we expect
|
||||
next_number(int): the next possible serial number
|
||||
"""
|
||||
serials = serials.strip()
|
||||
|
||||
# fill in the next serial number into the serial
|
||||
while '~' in serials:
|
||||
serials = serials.replace('~', str(next_number), 1)
|
||||
next_number += 1
|
||||
|
||||
# Split input string by whitespace or comma (,) characters
|
||||
groups = re.split(r"[\s,]+", serials)
|
||||
|
||||
numbers = []
|
||||
errors = []
|
||||
|
||||
# Helper function to check for duplicated numbers
|
||||
def add_sn(sn):
|
||||
# Attempt integer conversion first, so numerical strings are never stored
|
||||
try:
|
||||
sn = int(sn)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if sn in numbers:
|
||||
errors.append(_('Duplicate serial: {sn}').format(sn=sn))
|
||||
else:
|
||||
numbers.append(sn)
|
||||
|
||||
try:
|
||||
expected_quantity = int(expected_quantity)
|
||||
except ValueError:
|
||||
raise ValidationError([_("Invalid quantity provided")])
|
||||
|
||||
if len(serials) == 0:
|
||||
raise ValidationError([_("Empty serial number string")])
|
||||
|
||||
# If the user has supplied the correct number of serials, don't process them for groups
|
||||
# just add them so any duplicates (or future validations) are checked
|
||||
if len(groups) == expected_quantity:
|
||||
for group in groups:
|
||||
add_sn(group)
|
||||
|
||||
if len(errors) > 0:
|
||||
raise ValidationError(errors)
|
||||
|
||||
return numbers
|
||||
|
||||
for group in groups:
|
||||
group = group.strip()
|
||||
|
||||
# Hyphen indicates a range of numbers
|
||||
if '-' in group:
|
||||
items = group.split('-')
|
||||
|
||||
if len(items) == 2 and all([i.isnumeric() for i in items]):
|
||||
a = items[0].strip()
|
||||
b = items[1].strip()
|
||||
|
||||
try:
|
||||
a = int(a)
|
||||
b = int(b)
|
||||
|
||||
if a < b:
|
||||
for n in range(a, b + 1):
|
||||
add_sn(n)
|
||||
else:
|
||||
errors.append(_("Invalid group range: {g}").format(g=group))
|
||||
|
||||
except ValueError:
|
||||
errors.append(_("Invalid group: {g}").format(g=group))
|
||||
continue
|
||||
else:
|
||||
# More than 2 hyphens or non-numeric group so add without interpolating
|
||||
add_sn(group)
|
||||
|
||||
# plus signals either
|
||||
# 1: 'start+': expected number of serials, starting at start
|
||||
# 2: 'start+number': number of serials, starting at start
|
||||
elif '+' in group:
|
||||
items = group.split('+')
|
||||
|
||||
# case 1, 2
|
||||
if len(items) == 2:
|
||||
start = int(items[0])
|
||||
|
||||
# case 2
|
||||
if bool(items[1]):
|
||||
end = start + int(items[1]) + 1
|
||||
|
||||
# case 1
|
||||
else:
|
||||
end = start + (expected_quantity - len(numbers))
|
||||
|
||||
for n in range(start, end):
|
||||
add_sn(n)
|
||||
# no case
|
||||
else:
|
||||
errors.append(_("Invalid group sequence: {g}").format(g=group))
|
||||
|
||||
# At this point, we assume that the "group" is just a single serial value
|
||||
elif group:
|
||||
add_sn(group)
|
||||
|
||||
# No valid input group detected
|
||||
else:
|
||||
raise ValidationError(_(f"Invalid/no group {group}"))
|
||||
|
||||
if len(errors) > 0:
|
||||
raise ValidationError(errors)
|
||||
|
||||
if len(numbers) == 0:
|
||||
raise ValidationError([_("No serial numbers found")])
|
||||
|
||||
# The number of extracted serial numbers must match the expected quantity
|
||||
if expected_quantity != len(numbers):
|
||||
raise ValidationError([_("Number of unique serial numbers ({s}) must match quantity ({q})").format(s=len(numbers), q=expected_quantity)])
|
||||
|
||||
return numbers
|
||||
|
||||
|
||||
def validateFilterString(value, model=None):
|
||||
"""Validate that a provided filter string looks like a list of comma-separated key=value pairs.
|
||||
|
||||
These should nominally match to a valid database filter based on the model being filtered.
|
||||
|
||||
e.g. "category=6, IPN=12"
|
||||
e.g. "part__name=widget"
|
||||
|
||||
The ReportTemplate class uses the filter string to work out which items a given report applies to.
|
||||
For example, an acceptance test report template might only apply to stock items with a given IPN,
|
||||
so the string could be set to:
|
||||
|
||||
filters = "IPN = ACME0001"
|
||||
|
||||
Returns a map of key:value pairs
|
||||
"""
|
||||
# Empty results map
|
||||
results = {}
|
||||
|
||||
value = str(value).strip()
|
||||
|
||||
if not value or len(value) == 0:
|
||||
return results
|
||||
|
||||
groups = value.split(',')
|
||||
|
||||
for group in groups:
|
||||
group = group.strip()
|
||||
|
||||
pair = group.split('=')
|
||||
|
||||
if len(pair) != 2:
|
||||
raise ValidationError(
|
||||
"Invalid group: {g}".format(g=group)
|
||||
)
|
||||
|
||||
k, v = pair
|
||||
|
||||
k = k.strip()
|
||||
v = v.strip()
|
||||
|
||||
if not k or not v:
|
||||
raise ValidationError(
|
||||
"Invalid group: {g}".format(g=group)
|
||||
)
|
||||
|
||||
results[k] = v
|
||||
|
||||
# If a model is provided, verify that the provided filters can be used against it
|
||||
if model is not None:
|
||||
try:
|
||||
model.objects.filter(**results)
|
||||
except FieldError as e:
|
||||
raise ValidationError(
|
||||
str(e),
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def addUserPermission(user, permission):
|
||||
"""Shortcut function for adding a certain permission to a user."""
|
||||
perm = Permission.objects.get(codename=permission)
|
||||
user.user_permissions.add(perm)
|
||||
|
||||
|
||||
def addUserPermissions(user, permissions):
|
||||
"""Shortcut function for adding multiple permissions to a user."""
|
||||
for permission in permissions:
|
||||
addUserPermission(user, permission)
|
||||
|
||||
|
||||
def getMigrationFileNames(app):
|
||||
"""Return a list of all migration filenames for provided app."""
|
||||
local_dir = Path(__file__).parent
|
||||
files = local_dir.joinpath('..', app, 'migrations').iterdir()
|
||||
|
||||
# Regex pattern for migration files
|
||||
regex = re.compile(r"^[\d]+_.*\.py$")
|
||||
|
||||
migration_files = []
|
||||
|
||||
for f in files:
|
||||
if regex.match(f.name):
|
||||
migration_files.append(f.name)
|
||||
|
||||
return migration_files
|
||||
|
||||
|
||||
def getOldestMigrationFile(app, exclude_extension=True, ignore_initial=True):
|
||||
"""Return the filename associated with the oldest migration."""
|
||||
oldest_num = -1
|
||||
oldest_file = None
|
||||
|
||||
for f in getMigrationFileNames(app):
|
||||
|
||||
if ignore_initial and f.startswith('0001_initial'):
|
||||
continue
|
||||
|
||||
num = int(f.split('_')[0])
|
||||
|
||||
if oldest_file is None or num < oldest_num:
|
||||
oldest_num = num
|
||||
oldest_file = f
|
||||
|
||||
if exclude_extension:
|
||||
oldest_file = oldest_file.replace('.py', '')
|
||||
|
||||
return oldest_file
|
||||
|
||||
|
||||
def getNewestMigrationFile(app, exclude_extension=True):
|
||||
"""Return the filename associated with the newest migration."""
|
||||
newest_file = None
|
||||
newest_num = -1
|
||||
|
||||
for f in getMigrationFileNames(app):
|
||||
num = int(f.split('_')[0])
|
||||
|
||||
if newest_file is None or num > newest_num:
|
||||
newest_num = num
|
||||
newest_file = f
|
||||
|
||||
if exclude_extension:
|
||||
newest_file = newest_file.replace('.py', '')
|
||||
|
||||
return newest_file
|
||||
|
||||
|
||||
def clean_decimal(number):
|
||||
"""Clean-up decimal value."""
|
||||
# Check if empty
|
||||
if number is None or number == '' or number == 0:
|
||||
return Decimal(0)
|
||||
|
||||
# Convert to string and remove spaces
|
||||
number = str(number).replace(' ', '')
|
||||
|
||||
# Guess what type of decimal and thousands separators are used
|
||||
count_comma = number.count(',')
|
||||
count_point = number.count('.')
|
||||
|
||||
if count_comma == 1:
|
||||
# Comma is used as decimal separator
|
||||
if count_point > 0:
|
||||
# Points are used as thousands separators: remove them
|
||||
number = number.replace('.', '')
|
||||
# Replace decimal separator with point
|
||||
number = number.replace(',', '.')
|
||||
elif count_point == 1:
|
||||
# Point is used as decimal separator
|
||||
if count_comma > 0:
|
||||
# Commas are used as thousands separators: remove them
|
||||
number = number.replace(',', '')
|
||||
|
||||
# Convert to Decimal type
|
||||
try:
|
||||
clean_number = Decimal(number)
|
||||
except InvalidOperation:
|
||||
# Number cannot be converted to Decimal (eg. a string containing letters)
|
||||
return Decimal(0)
|
||||
|
||||
return clean_number.quantize(Decimal(1)) if clean_number == clean_number.to_integral() else clean_number.normalize()
|
||||
|
||||
|
||||
def get_objectreference(obj, type_ref: str = 'content_type', object_ref: str = 'object_id'):
|
||||
"""Lookup method for the GenericForeignKey fields.
|
||||
|
||||
Attributes:
|
||||
- obj: object that will be resolved
|
||||
- type_ref: field name for the contenttype field in the model
|
||||
- object_ref: field name for the object id in the model
|
||||
|
||||
Example implementation in the serializer:
|
||||
```
|
||||
target = serializers.SerializerMethodField()
|
||||
def get_target(self, obj):
|
||||
return get_objectreference(obj, 'target_content_type', 'target_object_id')
|
||||
```
|
||||
|
||||
The method name must always be the name of the field prefixed by 'get_'
|
||||
"""
|
||||
|
||||
model_cls = getattr(obj, type_ref)
|
||||
obj_id = getattr(obj, object_ref)
|
||||
|
||||
# check if references are set -> return nothing if not
|
||||
if model_cls is None or obj_id is None:
|
||||
return None
|
||||
|
||||
# resolve referenced data into objects
|
||||
model_cls = model_cls.model_class()
|
||||
|
||||
try:
|
||||
item = model_cls.objects.get(id=obj_id)
|
||||
except model_cls.DoesNotExist:
|
||||
return None
|
||||
|
||||
url_fnc = getattr(item, 'get_absolute_url', None)
|
||||
|
||||
# create output
|
||||
ret = {}
|
||||
if url_fnc:
|
||||
ret['link'] = url_fnc()
|
||||
return {
|
||||
'name': str(item),
|
||||
'model': str(model_cls._meta.verbose_name),
|
||||
**ret
|
||||
}
|
||||
|
||||
|
||||
def inheritors(cls):
|
||||
"""Return all classes that are subclasses from the supplied cls."""
|
||||
subcls = set()
|
||||
work = [cls]
|
||||
while work:
|
||||
parent = work.pop()
|
||||
for child in parent.__subclasses__():
|
||||
if child not in subcls:
|
||||
subcls.add(child)
|
||||
work.append(child)
|
||||
return subcls
|
||||
|
||||
|
||||
class InvenTreeTestCase(UserMixin, TestCase):
|
||||
"""Testcase with user setup buildin."""
|
||||
pass
|
||||
|
||||
|
||||
def notify_responsible(instance, sender, content: NotificationBody = InvenTreeNotificationBodies.NewOrder, exclude=None):
|
||||
"""Notify all responsible parties of a change in an instance.
|
||||
|
||||
Parses the supplied content with the provided instance and sender and sends a notification to all responsible users,
|
||||
excluding the optional excluded list.
|
||||
|
||||
Args:
|
||||
instance: The newly created instance
|
||||
sender: Sender model reference
|
||||
content (NotificationBody, optional): _description_. Defaults to InvenTreeNotificationBodies.NewOrder.
|
||||
exclude (User, optional): User instance that should be excluded. Defaults to None.
|
||||
"""
|
||||
if instance.responsible is not None:
|
||||
# Setup context for notification parsing
|
||||
content_context = {
|
||||
'instance': str(instance),
|
||||
'verbose_name': sender._meta.verbose_name,
|
||||
'app_label': sender._meta.app_label,
|
||||
'model_name': sender._meta.model_name,
|
||||
}
|
||||
|
||||
# Setup notification context
|
||||
context = {
|
||||
'instance': instance,
|
||||
'name': content.name.format(**content_context),
|
||||
'message': content.message.format(**content_context),
|
||||
'link': InvenTree.helpers.construct_absolute_url(instance.get_absolute_url()),
|
||||
'template': {
|
||||
'html': content.template.format(**content_context),
|
||||
'subject': content.name.format(**content_context),
|
||||
}
|
||||
}
|
||||
|
||||
# Create notification
|
||||
trigger_notification(
|
||||
instance,
|
||||
content.slug.format(**content_context),
|
||||
targets=[instance.responsible],
|
||||
target_exclude=[exclude],
|
||||
context=context,
|
||||
)
|
||||
@@ -1,10 +1,10 @@
|
||||
"""Custom management command to cleanup old settings that are not defined anymore."""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger('inventree')
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
@@ -12,7 +12,7 @@ class Command(BaseCommand):
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Cleanup old (undefined) settings in the database."""
|
||||
logger.info('Collecting settings')
|
||||
logger.info("Collecting settings")
|
||||
from common.models import InvenTreeSetting, InvenTreeUserSetting
|
||||
|
||||
# general settings
|
||||
@@ -23,7 +23,7 @@ class Command(BaseCommand):
|
||||
for setting in db_settings:
|
||||
if setting.key not in model_settings:
|
||||
setting.delete()
|
||||
logger.info("deleted setting '%s'", setting.key)
|
||||
logger.info(f"deleted setting '{setting.key}'")
|
||||
|
||||
# user settings
|
||||
db_settings = InvenTreeUserSetting.objects.all()
|
||||
@@ -33,6 +33,6 @@ class Command(BaseCommand):
|
||||
for setting in db_settings:
|
||||
if setting.key not in model_settings:
|
||||
setting.delete()
|
||||
logger.info("deleted user setting '%s'", setting.key)
|
||||
logger.info(f"deleted user setting '{setting.key}'")
|
||||
|
||||
logger.info('checked all settings')
|
||||
logger.info("checked all settings")
|
||||
58
InvenTree/InvenTree/management/commands/prerender.py
Normal file
58
InvenTree/InvenTree/management/commands/prerender.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Custom management command to prerender files."""
|
||||
|
||||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.http.request import HttpRequest
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.module_loading import import_string
|
||||
from django.utils.translation import override as lang_over
|
||||
|
||||
|
||||
def render_file(file_name, source, target, locales, ctx):
|
||||
"""Renders a file into all provided locales."""
|
||||
for locale in locales:
|
||||
target_file = os.path.join(target, locale + '.' + file_name)
|
||||
with open(target_file, 'w') as localised_file:
|
||||
with lang_over(locale):
|
||||
renderd = render_to_string(os.path.join(source, file_name), ctx)
|
||||
localised_file.write(renderd)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Django command to prerender files."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Django command to prerender files."""
|
||||
# static directories
|
||||
LC_DIR = settings.LOCALE_PATHS[0]
|
||||
SOURCE_DIR = settings.STATICFILES_I18_SRC
|
||||
TARGET_DIR = settings.STATICFILES_I18_TRG
|
||||
|
||||
# ensure static directory exists
|
||||
if not os.path.exists(TARGET_DIR):
|
||||
os.makedirs(TARGET_DIR, exist_ok=True)
|
||||
|
||||
# collect locales
|
||||
locales = {}
|
||||
for locale in os.listdir(LC_DIR):
|
||||
path = os.path.join(LC_DIR, locale)
|
||||
if os.path.exists(path) and os.path.isdir(path):
|
||||
locales[locale] = locale
|
||||
|
||||
# render!
|
||||
request = HttpRequest()
|
||||
ctx = {}
|
||||
processors = tuple(import_string(path) for path in settings.STATFILES_I18_PROCESSORS)
|
||||
for processor in processors:
|
||||
ctx.update(processor(request))
|
||||
|
||||
for file in os.listdir(SOURCE_DIR, ):
|
||||
path = os.path.join(SOURCE_DIR, file)
|
||||
if os.path.exists(path) and os.path.isfile(path):
|
||||
print(f"render {file}")
|
||||
render_file(file, SOURCE_DIR, TARGET_DIR, locales, ctx)
|
||||
else:
|
||||
raise NotImplementedError('Using multi-level directories is not implemented at this point') # TODO multilevel dir if needed
|
||||
print(f"rendered all files in {SOURCE_DIR}")
|
||||
57
InvenTree/InvenTree/management/commands/rebuild_models.py
Normal file
57
InvenTree/InvenTree/management/commands/rebuild_models.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Custom management command to rebuild all MPTT models.
|
||||
|
||||
- This is crucial after importing any fixtures, etc
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Rebuild all database models which leverage the MPTT structure."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Rebuild all database models which leverage the MPTT structure."""
|
||||
# Part model
|
||||
try:
|
||||
print("Rebuilding Part objects")
|
||||
|
||||
from part.models import Part
|
||||
Part.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding Part objects")
|
||||
|
||||
# Part category
|
||||
try:
|
||||
print("Rebuilding PartCategory objects")
|
||||
|
||||
from part.models import PartCategory
|
||||
PartCategory.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding PartCategory objects")
|
||||
|
||||
# StockItem model
|
||||
try:
|
||||
print("Rebuilding StockItem objects")
|
||||
|
||||
from stock.models import StockItem
|
||||
StockItem.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding StockItem objects")
|
||||
|
||||
# StockLocation model
|
||||
try:
|
||||
print("Rebuilding StockLocation objects")
|
||||
|
||||
from stock.models import StockLocation
|
||||
StockLocation.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding StockLocation objects")
|
||||
|
||||
# Build model
|
||||
try:
|
||||
print("Rebuilding Build objects")
|
||||
|
||||
from build.models import Build
|
||||
Build.objects.rebuild()
|
||||
except Exception:
|
||||
print("Error rebuilding Build objects")
|
||||
@@ -0,0 +1,56 @@
|
||||
"""Custom management command to rebuild thumbnail images.
|
||||
|
||||
- May be required after importing a new dataset, for example
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.utils import OperationalError, ProgrammingError
|
||||
|
||||
from PIL import UnidentifiedImageError
|
||||
|
||||
from company.models import Company
|
||||
from part.models import Part
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Rebuild all thumbnail images."""
|
||||
|
||||
def rebuild_thumbnail(self, model):
|
||||
"""Rebuild the thumbnail specified by the "image" field of the provided model."""
|
||||
if not model.image:
|
||||
return
|
||||
|
||||
img = model.image
|
||||
|
||||
logger.info(f"Generating thumbnail image for '{img}'")
|
||||
|
||||
try:
|
||||
model.image.render_variations(replace=False)
|
||||
except FileNotFoundError:
|
||||
logger.warning(f"Warning: Image file '{img}' is missing")
|
||||
except UnidentifiedImageError:
|
||||
logger.warning(f"Warning: Image file '{img}' is not a valid image")
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Rebuild all thumbnail images."""
|
||||
logger.info("Rebuilding Part thumbnails")
|
||||
|
||||
for part in Part.objects.exclude(image=None):
|
||||
try:
|
||||
self.rebuild_thumbnail(part)
|
||||
except (OperationalError, ProgrammingError):
|
||||
logger.error("ERROR: Database read error.")
|
||||
break
|
||||
|
||||
logger.info("Rebuilding Company thumbnails")
|
||||
|
||||
for company in Company.objects.exclude(image=None):
|
||||
try:
|
||||
self.rebuild_thumbnail(company)
|
||||
except (OperationalError, ProgrammingError):
|
||||
logger.error("ERROR: abase read error.")
|
||||
break
|
||||
33
InvenTree/InvenTree/management/commands/remove_mfa.py
Normal file
33
InvenTree/InvenTree/management/commands/remove_mfa.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Custom management command to remove MFA for a user."""
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Remove MFA for a user."""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
"""Add the arguments."""
|
||||
parser.add_argument('mail', type=str)
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Remove MFA for the supplied user (by mail)."""
|
||||
# general settings
|
||||
mail = kwargs.get('mail')
|
||||
if not mail:
|
||||
raise KeyError('A mail is required')
|
||||
user = get_user_model()
|
||||
mfa_user = [*set(user.objects.filter(email=mail) | user.objects.filter(emailaddress__email=mail))]
|
||||
|
||||
if len(mfa_user) == 0:
|
||||
print('No user with this mail associated')
|
||||
elif len(mfa_user) > 1:
|
||||
print('More than one user found with this mail')
|
||||
else:
|
||||
# and clean out all MFA methods
|
||||
# backup codes
|
||||
mfa_user[0].staticdevice_set.all().delete()
|
||||
# TOTP tokens
|
||||
mfa_user[0].totpdevice_set.all().delete()
|
||||
print(f'Removed all MFA methods for user {str(mfa_user[0])}')
|
||||
37
InvenTree/InvenTree/management/commands/wait_for_db.py
Normal file
37
InvenTree/InvenTree/management/commands/wait_for_db.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Custom management command, wait for the database to be ready!"""
|
||||
|
||||
import time
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
from django.db.utils import ImproperlyConfigured, OperationalError
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""Django command to pause execution until the database is ready."""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""Wait till the database is ready."""
|
||||
self.stdout.write("Waiting for database...")
|
||||
|
||||
connected = False
|
||||
|
||||
while not connected:
|
||||
|
||||
time.sleep(5)
|
||||
|
||||
try:
|
||||
connection.ensure_connection()
|
||||
|
||||
connected = True
|
||||
|
||||
except OperationalError as e:
|
||||
self.stdout.write(f"Could not connect to database: {e}")
|
||||
except ImproperlyConfigured as e:
|
||||
self.stdout.write(f"Improperly configured: {e}")
|
||||
else:
|
||||
if not connection.is_usable():
|
||||
self.stdout.write("Database configuration is not usable")
|
||||
|
||||
if connected:
|
||||
self.stdout.write("Database connection sucessful!")
|
||||
273
InvenTree/InvenTree/metadata.py
Normal file
273
InvenTree/InvenTree/metadata.py
Normal file
@@ -0,0 +1,273 @@
|
||||
"""Custom metadata for DRF."""
|
||||
|
||||
import logging
|
||||
|
||||
from rest_framework import serializers
|
||||
from rest_framework.fields import empty
|
||||
from rest_framework.metadata import SimpleMetadata
|
||||
from rest_framework.utils import model_meta
|
||||
|
||||
import users.models
|
||||
from InvenTree.helpers import str2bool
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
class InvenTreeMetadata(SimpleMetadata):
|
||||
"""Custom metadata class for the DRF API.
|
||||
|
||||
This custom metadata class imits the available "actions",
|
||||
based on the user's role permissions.
|
||||
|
||||
Thus when a client send an OPTIONS request to an API endpoint,
|
||||
it will only receive a list of actions which it is allowed to perform!
|
||||
|
||||
Additionally, we include some extra information about database models,
|
||||
so we can perform lookup for ForeignKey related fields.
|
||||
"""
|
||||
|
||||
def determine_metadata(self, request, view):
|
||||
"""Overwrite the metadata to adapt to hte request user."""
|
||||
self.request = request
|
||||
self.view = view
|
||||
|
||||
metadata = super().determine_metadata(request, view)
|
||||
|
||||
"""
|
||||
Custom context information to pass through to the OPTIONS endpoint,
|
||||
if the "context=True" is supplied to the OPTIONS requst
|
||||
|
||||
Serializer class can supply context data by defining a get_context_data() method (no arguments)
|
||||
"""
|
||||
|
||||
context = {}
|
||||
|
||||
if str2bool(request.query_params.get('context', False)):
|
||||
|
||||
if hasattr(self, 'serializer') and hasattr(self.serializer, 'get_context_data'):
|
||||
context = self.serializer.get_context_data()
|
||||
|
||||
metadata['context'] = context
|
||||
|
||||
user = request.user
|
||||
|
||||
if user is None:
|
||||
# No actions for you!
|
||||
metadata['actions'] = {}
|
||||
return metadata
|
||||
|
||||
try:
|
||||
# Extract the model name associated with the view
|
||||
self.model = view.serializer_class.Meta.model
|
||||
|
||||
# Construct the 'table name' from the model
|
||||
app_label = self.model._meta.app_label
|
||||
tbl_label = self.model._meta.model_name
|
||||
|
||||
metadata['model'] = tbl_label
|
||||
|
||||
table = f"{app_label}_{tbl_label}"
|
||||
|
||||
actions = metadata.get('actions', None)
|
||||
|
||||
if actions is None:
|
||||
actions = {}
|
||||
|
||||
check = users.models.RuleSet.check_table_permission
|
||||
|
||||
# Map the request method to a permission type
|
||||
rolemap = {
|
||||
'POST': 'add',
|
||||
'PUT': 'change',
|
||||
'PATCH': 'change',
|
||||
'DELETE': 'delete',
|
||||
}
|
||||
|
||||
# Remove any HTTP methods that the user does not have permission for
|
||||
for method, permission in rolemap.items():
|
||||
|
||||
result = check(user, table, permission)
|
||||
|
||||
if method in actions and not result:
|
||||
del actions[method]
|
||||
|
||||
# Add a 'DELETE' action if we are allowed to delete
|
||||
if 'DELETE' in view.allowed_methods and check(user, table, 'delete'):
|
||||
actions['DELETE'] = True
|
||||
|
||||
# Add a 'VIEW' action if we are allowed to view
|
||||
if 'GET' in view.allowed_methods and check(user, table, 'view'):
|
||||
actions['GET'] = True
|
||||
|
||||
metadata['actions'] = actions
|
||||
|
||||
except AttributeError:
|
||||
# We will assume that if the serializer class does *not* have a Meta
|
||||
# then we don't need a permission
|
||||
pass
|
||||
|
||||
return metadata
|
||||
|
||||
def get_serializer_info(self, serializer):
|
||||
"""Override get_serializer_info so that we can add 'default' values to any fields whose Meta.model specifies a default value."""
|
||||
self.serializer = serializer
|
||||
|
||||
serializer_info = super().get_serializer_info(serializer)
|
||||
|
||||
model_class = None
|
||||
|
||||
# Attributes to copy extra attributes from the model to the field (if they don't exist)
|
||||
extra_attributes = [
|
||||
'help_text',
|
||||
'max_length',
|
||||
]
|
||||
|
||||
try:
|
||||
model_class = serializer.Meta.model
|
||||
|
||||
model_fields = model_meta.get_field_info(model_class)
|
||||
|
||||
model_default_func = getattr(model_class, 'api_defaults', None)
|
||||
|
||||
if model_default_func:
|
||||
model_default_values = model_class.api_defaults(self.request)
|
||||
else:
|
||||
model_default_values = {}
|
||||
|
||||
# Iterate through simple fields
|
||||
for name, field in model_fields.fields.items():
|
||||
|
||||
if name in serializer_info.keys():
|
||||
|
||||
if field.has_default():
|
||||
|
||||
default = field.default
|
||||
|
||||
if callable(default):
|
||||
try:
|
||||
default = default()
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
serializer_info[name]['default'] = default
|
||||
|
||||
elif name in model_default_values:
|
||||
serializer_info[name]['default'] = model_default_values[name]
|
||||
|
||||
for attr in extra_attributes:
|
||||
if attr not in serializer_info[name]:
|
||||
|
||||
if hasattr(field, attr):
|
||||
serializer_info[name][attr] = getattr(field, attr)
|
||||
|
||||
# Iterate through relations
|
||||
for name, relation in model_fields.relations.items():
|
||||
|
||||
if name not in serializer_info.keys():
|
||||
# Skip relation not defined in serializer
|
||||
continue
|
||||
|
||||
if relation.reverse:
|
||||
# Ignore reverse relations
|
||||
continue
|
||||
|
||||
# Extract and provide the "limit_choices_to" filters
|
||||
# This is used to automatically filter AJAX requests
|
||||
serializer_info[name]['filters'] = relation.model_field.get_limit_choices_to()
|
||||
|
||||
for attr in extra_attributes:
|
||||
if attr not in serializer_info[name] and hasattr(relation.model_field, attr):
|
||||
serializer_info[name][attr] = getattr(relation.model_field, attr)
|
||||
|
||||
if name in model_default_values:
|
||||
serializer_info[name]['default'] = model_default_values[name]
|
||||
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Try to extract 'instance' information
|
||||
instance = None
|
||||
|
||||
# Extract extra information if an instance is available
|
||||
if hasattr(serializer, 'instance'):
|
||||
instance = serializer.instance
|
||||
|
||||
if instance is None and model_class is not None:
|
||||
# Attempt to find the instance based on kwargs lookup
|
||||
kwargs = getattr(self.view, 'kwargs', None)
|
||||
|
||||
if kwargs:
|
||||
pk = None
|
||||
|
||||
for field in ['pk', 'id', 'PK', 'ID']:
|
||||
if field in kwargs:
|
||||
pk = kwargs[field]
|
||||
break
|
||||
|
||||
if pk is not None:
|
||||
try:
|
||||
instance = model_class.objects.get(pk=pk)
|
||||
except (ValueError, model_class.DoesNotExist):
|
||||
pass
|
||||
|
||||
if instance is not None:
|
||||
"""If there is an instance associated with this API View, introspect that instance to find any specific API info."""
|
||||
|
||||
if hasattr(instance, 'api_instance_filters'):
|
||||
|
||||
instance_filters = instance.api_instance_filters()
|
||||
|
||||
for field_name, field_filters in instance_filters.items():
|
||||
|
||||
if field_name not in serializer_info.keys():
|
||||
# The field might be missing, but is added later on
|
||||
# This function seems to get called multiple times?
|
||||
continue
|
||||
|
||||
if 'instance_filters' not in serializer_info[field_name].keys():
|
||||
serializer_info[field_name]['instance_filters'] = {}
|
||||
|
||||
for key, value in field_filters.items():
|
||||
serializer_info[field_name]['instance_filters'][key] = value
|
||||
|
||||
return serializer_info
|
||||
|
||||
def get_field_info(self, field):
|
||||
"""Given an instance of a serializer field, return a dictionary of metadata about it.
|
||||
|
||||
We take the regular DRF metadata and add our own unique flavor
|
||||
"""
|
||||
# Run super method first
|
||||
field_info = super().get_field_info(field)
|
||||
|
||||
# If a default value is specified for the serializer field, add it!
|
||||
if 'default' not in field_info and field.default != empty:
|
||||
field_info['default'] = field.get_default()
|
||||
|
||||
# Force non-nullable fields to read as "required"
|
||||
# (even if there is a default value!)
|
||||
if not field.allow_null and not (hasattr(field, 'allow_blank') and field.allow_blank):
|
||||
field_info['required'] = True
|
||||
|
||||
# Introspect writable related fields
|
||||
if field_info['type'] == 'field' and not field_info['read_only']:
|
||||
|
||||
# If the field is a PrimaryKeyRelatedField, we can extract the model from the queryset
|
||||
if isinstance(field, serializers.PrimaryKeyRelatedField):
|
||||
model = field.queryset.model
|
||||
else:
|
||||
logger.debug("Could not extract model for:", field_info['label'], '->', field)
|
||||
model = None
|
||||
|
||||
if model:
|
||||
# Mark this field as "related", and point to the URL where we can get the data!
|
||||
field_info['type'] = 'related field'
|
||||
field_info['model'] = model._meta.model_name
|
||||
|
||||
# Special case for 'user' model
|
||||
if field_info['model'] == 'user':
|
||||
field_info['api_url'] = '/api/user/'
|
||||
else:
|
||||
field_info['api_url'] = model.get_api_url()
|
||||
|
||||
return field_info
|
||||
163
InvenTree/InvenTree/middleware.py
Normal file
163
InvenTree/InvenTree/middleware.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""Middleware for InvenTree."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.middleware import PersistentRemoteUserMiddleware
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import redirect
|
||||
from django.urls import Resolver404, include, re_path, reverse_lazy
|
||||
|
||||
from allauth_2fa.middleware import (AllauthTwoFactorMiddleware,
|
||||
BaseRequire2FAMiddleware)
|
||||
from error_report.middleware import ExceptionProcessor
|
||||
from rest_framework.authtoken.models import Token
|
||||
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree.urls import frontendpatterns
|
||||
|
||||
logger = logging.getLogger("inventree")
|
||||
|
||||
|
||||
class AuthRequiredMiddleware(object):
|
||||
"""Check for user to be authenticated."""
|
||||
|
||||
def __init__(self, get_response):
|
||||
"""Save response object."""
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
"""Check if user needs to be authenticated and is.
|
||||
|
||||
Redirects to login if not authenticated.
|
||||
"""
|
||||
# Code to be executed for each request before
|
||||
# the view (and later middleware) are called.
|
||||
|
||||
assert hasattr(request, 'user')
|
||||
|
||||
# API requests are handled by the DRF library
|
||||
if request.path_info.startswith('/api/'):
|
||||
return self.get_response(request)
|
||||
|
||||
if not request.user.is_authenticated:
|
||||
"""
|
||||
Normally, a web-based session would use csrftoken based authentication.
|
||||
|
||||
However when running an external application (e.g. the InvenTree app or Python library),
|
||||
we must validate the user token manually.
|
||||
"""
|
||||
|
||||
authorized = False
|
||||
|
||||
# Allow static files to be accessed without auth
|
||||
# Important for e.g. login page
|
||||
if request.path_info.startswith('/static/'):
|
||||
authorized = True
|
||||
|
||||
# Unauthorized users can access the login page
|
||||
elif request.path_info.startswith('/accounts/'):
|
||||
authorized = True
|
||||
|
||||
elif 'Authorization' in request.headers.keys() or 'authorization' in request.headers.keys():
|
||||
auth = request.headers.get('Authorization', request.headers.get('authorization')).strip()
|
||||
|
||||
if auth.lower().startswith('token') and len(auth.split()) == 2:
|
||||
token_key = auth.split()[1]
|
||||
|
||||
# Does the provided token match a valid user?
|
||||
try:
|
||||
token = Token.objects.get(key=token_key)
|
||||
|
||||
# Provide the user information to the request
|
||||
request.user = token.user
|
||||
authorized = True
|
||||
|
||||
except Token.DoesNotExist:
|
||||
logger.warning(f"Access denied for unknown token {token_key}")
|
||||
|
||||
# No authorization was found for the request
|
||||
if not authorized:
|
||||
path = request.path_info
|
||||
|
||||
# List of URL endpoints we *do not* want to redirect to
|
||||
urls = [
|
||||
reverse_lazy('account_login'),
|
||||
reverse_lazy('account_logout'),
|
||||
reverse_lazy('admin:login'),
|
||||
reverse_lazy('admin:logout'),
|
||||
]
|
||||
|
||||
# Do not redirect requests to any of these paths
|
||||
paths_ignore = [
|
||||
'/api/',
|
||||
'/js/',
|
||||
'/media/',
|
||||
'/static/',
|
||||
]
|
||||
|
||||
if path not in urls and not any([path.startswith(p) for p in paths_ignore]):
|
||||
# Save the 'next' parameter to pass through to the login view
|
||||
|
||||
return redirect(f'{reverse_lazy("account_login")}?next={request.path}')
|
||||
|
||||
else:
|
||||
# Return a 401 (Unauthorized) response code for this request
|
||||
return HttpResponse('Unauthorized', status=401)
|
||||
|
||||
response = self.get_response(request)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
url_matcher = re_path('', include(frontendpatterns))
|
||||
|
||||
|
||||
class Check2FAMiddleware(BaseRequire2FAMiddleware):
|
||||
"""Check if user is required to have MFA enabled."""
|
||||
def require_2fa(self, request):
|
||||
"""Use setting to check if MFA should be enforced for frontend page."""
|
||||
try:
|
||||
if url_matcher.resolve(request.path[1:]):
|
||||
return InvenTreeSetting.get_setting('LOGIN_ENFORCE_MFA')
|
||||
except Resolver404:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
class CustomAllauthTwoFactorMiddleware(AllauthTwoFactorMiddleware):
|
||||
"""This function ensures only frontend code triggers the MFA auth cycle."""
|
||||
def process_request(self, request):
|
||||
"""Check if requested url is forntend and enforce MFA check."""
|
||||
try:
|
||||
if not url_matcher.resolve(request.path[1:]):
|
||||
super().process_request(request)
|
||||
except Resolver404:
|
||||
pass
|
||||
|
||||
|
||||
class InvenTreeRemoteUserMiddleware(PersistentRemoteUserMiddleware):
|
||||
"""Middleware to check if HTTP-header based auth is enabled and to set it up."""
|
||||
header = settings.REMOTE_LOGIN_HEADER
|
||||
|
||||
def process_request(self, request):
|
||||
"""Check if proxy login is enabled."""
|
||||
if not settings.REMOTE_LOGIN:
|
||||
return
|
||||
|
||||
return super().process_request(request)
|
||||
|
||||
|
||||
class InvenTreeExceptionProcessor(ExceptionProcessor):
|
||||
"""Custom exception processor that respects blocked errors."""
|
||||
|
||||
def process_exception(self, request, exception):
|
||||
"""Check if kind is ignored before procesing."""
|
||||
kind, info, data = sys.exc_info()
|
||||
|
||||
# Check if the eror is on the ignore list
|
||||
if kind in settings.IGNORED_ERRORS:
|
||||
return
|
||||
|
||||
return super().process_exception(request, exception)
|
||||
90
InvenTree/InvenTree/mixins.py
Normal file
90
InvenTree/InvenTree/mixins.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Mixins for (API) views in the whole project."""
|
||||
|
||||
from bleach import clean
|
||||
from rest_framework import generics, status
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
||||
class CleanMixin():
|
||||
"""Model mixin class which cleans inputs."""
|
||||
|
||||
# Define a map of fields avaialble for import
|
||||
SAFE_FIELDS = {}
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
"""Override to clean data before processing it."""
|
||||
serializer = self.get_serializer(data=self.clean_data(request.data))
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_create(serializer)
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
"""Override to clean data before processing it."""
|
||||
partial = kwargs.pop('partial', False)
|
||||
instance = self.get_object()
|
||||
serializer = self.get_serializer(instance, data=self.clean_data(request.data), partial=partial)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_update(serializer)
|
||||
|
||||
if getattr(instance, '_prefetched_objects_cache', None):
|
||||
# If 'prefetch_related' has been applied to a queryset, we need to
|
||||
# forcibly invalidate the prefetch cache on the instance.
|
||||
instance._prefetched_objects_cache = {}
|
||||
|
||||
return Response(serializer.data)
|
||||
|
||||
def clean_data(self, data: dict) -> dict:
|
||||
"""Clean / sanitize data.
|
||||
|
||||
This uses mozillas bleach under the hood to disable certain html tags by
|
||||
encoding them - this leads to script tags etc. to not work.
|
||||
The results can be longer then the input; might make some character combinations
|
||||
`ugly`. Prevents XSS on the server-level.
|
||||
|
||||
Args:
|
||||
data (dict): Data that should be sanatized.
|
||||
|
||||
Returns:
|
||||
dict: Profided data sanatized; still in the same order.
|
||||
"""
|
||||
clean_data = {}
|
||||
for k, v in data.items():
|
||||
if isinstance(v, str):
|
||||
ret = clean(v)
|
||||
elif isinstance(v, dict):
|
||||
ret = self.clean_data(v)
|
||||
else:
|
||||
ret = v
|
||||
clean_data[k] = ret
|
||||
return clean_data
|
||||
|
||||
|
||||
class ListAPI(generics.ListAPIView):
|
||||
"""View for list API."""
|
||||
|
||||
|
||||
class ListCreateAPI(CleanMixin, generics.ListCreateAPIView):
|
||||
"""View for list and create API."""
|
||||
|
||||
|
||||
class CreateAPI(CleanMixin, generics.CreateAPIView):
|
||||
"""View for create API."""
|
||||
|
||||
|
||||
class RetrieveAPI(generics.RetrieveAPIView):
|
||||
"""View for retreive API."""
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveUpdateAPI(CleanMixin, generics.RetrieveUpdateAPIView):
|
||||
"""View for retrieve and update API."""
|
||||
pass
|
||||
|
||||
|
||||
class RetrieveUpdateDestroyAPI(CleanMixin, generics.RetrieveUpdateDestroyAPIView):
|
||||
"""View for retrieve, update and destroy API."""
|
||||
|
||||
|
||||
class UpdateAPI(CleanMixin, generics.UpdateAPIView):
|
||||
"""View for update API."""
|
||||
685
InvenTree/InvenTree/models.py
Normal file
685
InvenTree/InvenTree/models.py
Normal file
@@ -0,0 +1,685 @@
|
||||
"""Generic models which provide extra functionality over base Django model types."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.db.models.signals import post_save, pre_delete
|
||||
from django.dispatch import receiver
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from error_report.models import Error
|
||||
from mptt.exceptions import InvalidMove
|
||||
from mptt.models import MPTTModel, TreeForeignKey
|
||||
|
||||
import InvenTree.format
|
||||
import InvenTree.helpers
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree.fields import InvenTreeURLField
|
||||
|
||||
logger = logging.getLogger('inventree')
|
||||
|
||||
|
||||
def rename_attachment(instance, filename):
|
||||
"""Function for renaming an attachment file. The subdirectory for the uploaded file is determined by the implementing class.
|
||||
|
||||
Args:
|
||||
instance: Instance of a PartAttachment object
|
||||
filename: name of uploaded file
|
||||
|
||||
Returns:
|
||||
path to store file, format: '<subdir>/<id>/filename'
|
||||
"""
|
||||
# Construct a path to store a file attachment for a given model type
|
||||
return os.path.join(instance.getSubdir(), filename)
|
||||
|
||||
|
||||
class DataImportMixin(object):
|
||||
"""Model mixin class which provides support for 'data import' functionality.
|
||||
|
||||
Models which implement this mixin should provide information on the fields available for import
|
||||
"""
|
||||
|
||||
# Define a map of fields avaialble for import
|
||||
IMPORT_FIELDS = {}
|
||||
|
||||
@classmethod
|
||||
def get_import_fields(cls):
|
||||
"""Return all available import fields.
|
||||
|
||||
Where information on a particular field is not explicitly provided,
|
||||
introspect the base model to (attempt to) find that information.
|
||||
"""
|
||||
fields = cls.IMPORT_FIELDS
|
||||
|
||||
for name, field in fields.items():
|
||||
|
||||
# Attempt to extract base field information from the model
|
||||
base_field = None
|
||||
|
||||
for f in cls._meta.fields:
|
||||
if f.name == name:
|
||||
base_field = f
|
||||
break
|
||||
|
||||
if base_field:
|
||||
if 'label' not in field:
|
||||
field['label'] = base_field.verbose_name
|
||||
|
||||
if 'help_text' not in field:
|
||||
field['help_text'] = base_field.help_text
|
||||
|
||||
fields[name] = field
|
||||
|
||||
return fields
|
||||
|
||||
@classmethod
|
||||
def get_required_import_fields(cls):
|
||||
"""Return all *required* import fields."""
|
||||
fields = {}
|
||||
|
||||
for name, field in cls.get_import_fields().items():
|
||||
required = field.get('required', False)
|
||||
|
||||
if required:
|
||||
fields[name] = field
|
||||
|
||||
return fields
|
||||
|
||||
|
||||
class ReferenceIndexingMixin(models.Model):
|
||||
"""A mixin for keeping track of numerical copies of the "reference" field.
|
||||
|
||||
Here, we attempt to convert a "reference" field value (char) to an integer,
|
||||
for performing fast natural sorting.
|
||||
|
||||
This requires extra database space (due to the extra table column),
|
||||
but is required as not all supported database backends provide equivalent casting.
|
||||
|
||||
This mixin adds a field named 'reference_int'.
|
||||
|
||||
- If the 'reference' field can be cast to an integer, it is stored here
|
||||
- If the 'reference' field *starts* with an integer, it is stored here
|
||||
- Otherwise, we store zero
|
||||
"""
|
||||
|
||||
# Name of the global setting which defines the required reference pattern for this model
|
||||
REFERENCE_PATTERN_SETTING = None
|
||||
|
||||
@classmethod
|
||||
def get_reference_pattern(cls):
|
||||
"""Returns the reference pattern associated with this model.
|
||||
|
||||
This is defined by a global setting object, specified by the REFERENCE_PATTERN_SETTING attribute
|
||||
"""
|
||||
|
||||
# By default, we return an empty string
|
||||
if cls.REFERENCE_PATTERN_SETTING is None:
|
||||
return ''
|
||||
|
||||
return InvenTreeSetting.get_setting(cls.REFERENCE_PATTERN_SETTING, create=False).strip()
|
||||
|
||||
@classmethod
|
||||
def get_reference_context(cls):
|
||||
"""Generate context data for generating the 'reference' field for this class.
|
||||
|
||||
- Returns a python dict object which contains the context data for formatting the reference string.
|
||||
- The default implementation provides some default context information
|
||||
"""
|
||||
|
||||
return {
|
||||
'ref': cls.get_next_reference(),
|
||||
'date': datetime.now(),
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_most_recent_item(cls):
|
||||
"""Return the item which is 'most recent'
|
||||
|
||||
In practice, this means the item with the highest reference value
|
||||
"""
|
||||
|
||||
query = cls.objects.all().order_by('-reference_int', '-pk')
|
||||
|
||||
if query.exists():
|
||||
return query.first()
|
||||
else:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def get_next_reference(cls):
|
||||
"""Return the next available reference value for this particular class."""
|
||||
|
||||
# Find the "most recent" item
|
||||
latest = cls.get_most_recent_item()
|
||||
|
||||
if not latest:
|
||||
# No existing items
|
||||
return 1
|
||||
|
||||
reference = latest.reference.strip
|
||||
|
||||
try:
|
||||
reference = InvenTree.format.extract_named_group('ref', reference, cls.get_reference_pattern())
|
||||
except Exception:
|
||||
# If reference cannot be extracted using the pattern, try just the integer value
|
||||
reference = str(latest.reference_int)
|
||||
|
||||
# Attempt to perform 'intelligent' incrementing of the reference field
|
||||
incremented = InvenTree.helpers.increment(reference)
|
||||
|
||||
try:
|
||||
incremented = int(incremented)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
return incremented
|
||||
|
||||
@classmethod
|
||||
def generate_reference(cls):
|
||||
"""Generate the next 'reference' field based on specified pattern"""
|
||||
|
||||
fmt = cls.get_reference_pattern()
|
||||
ctx = cls.get_reference_context()
|
||||
|
||||
reference = None
|
||||
|
||||
attempts = set()
|
||||
|
||||
while reference is None:
|
||||
try:
|
||||
ref = fmt.format(**ctx)
|
||||
|
||||
if ref in attempts:
|
||||
# We are stuck in a loop!
|
||||
reference = ref
|
||||
break
|
||||
else:
|
||||
attempts.add(ref)
|
||||
|
||||
if cls.objects.filter(reference=ref).exists():
|
||||
# Handle case where we have duplicated an existing reference
|
||||
ctx['ref'] = InvenTree.helpers.increment(ctx['ref'])
|
||||
else:
|
||||
# We have found an 'unused' reference
|
||||
reference = ref
|
||||
break
|
||||
|
||||
except Exception:
|
||||
# If anything goes wrong, return the most recent reference
|
||||
recent = cls.get_most_recent_item()
|
||||
if recent:
|
||||
reference = recent.reference
|
||||
else:
|
||||
reference = ""
|
||||
|
||||
return reference
|
||||
|
||||
@classmethod
|
||||
def validate_reference_pattern(cls, pattern):
|
||||
"""Ensure that the provided pattern is valid"""
|
||||
|
||||
ctx = cls.get_reference_context()
|
||||
|
||||
try:
|
||||
info = InvenTree.format.parse_format_string(pattern)
|
||||
except Exception:
|
||||
raise ValidationError({
|
||||
"value": _("Improperly formatted pattern"),
|
||||
})
|
||||
|
||||
# Check that only 'allowed' keys are provided
|
||||
for key in info.keys():
|
||||
if key not in ctx.keys():
|
||||
raise ValidationError({
|
||||
"value": _("Unknown format key specified") + f": '{key}'"
|
||||
})
|
||||
|
||||
# Check that the 'ref' variable is specified
|
||||
if 'ref' not in info.keys():
|
||||
raise ValidationError({
|
||||
'value': _("Missing required format key") + ": 'ref'"
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def validate_reference_field(cls, value):
|
||||
"""Check that the provided 'reference' value matches the requisite pattern"""
|
||||
|
||||
pattern = cls.get_reference_pattern()
|
||||
|
||||
value = str(value).strip()
|
||||
|
||||
if len(value) == 0:
|
||||
raise ValidationError(_("Reference field cannot be empty"))
|
||||
|
||||
# An 'empty' pattern means no further validation is required
|
||||
if not pattern:
|
||||
return
|
||||
|
||||
if not InvenTree.format.validate_string(value, pattern):
|
||||
raise ValidationError(_("Reference must match required pattern") + ": " + pattern)
|
||||
|
||||
# Check that the reference field can be rebuild
|
||||
cls.rebuild_reference_field(value, validate=True)
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options. Abstract ensures no database table is created."""
|
||||
|
||||
abstract = True
|
||||
|
||||
@classmethod
|
||||
def rebuild_reference_field(cls, reference, validate=False):
|
||||
"""Extract integer out of reference for sorting.
|
||||
|
||||
If the 'integer' portion is buried somewhere 'within' the reference,
|
||||
we can first try to extract it using the pattern.
|
||||
|
||||
Example:
|
||||
reference - BO-123-ABC
|
||||
pattern - BO-{ref}-???
|
||||
extracted - 123
|
||||
|
||||
If we cannot extract using the pattern for some reason, fallback to the entire reference
|
||||
"""
|
||||
|
||||
try:
|
||||
# Extract named group based on provided pattern
|
||||
reference = InvenTree.format.extract_named_group('ref', reference, cls.get_reference_pattern())
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
reference_int = extract_int(reference)
|
||||
|
||||
if validate:
|
||||
if reference_int > models.BigIntegerField.MAX_BIGINT:
|
||||
raise ValidationError({
|
||||
"reference": _("Reference number is too large")
|
||||
})
|
||||
|
||||
return reference_int
|
||||
|
||||
reference_int = models.BigIntegerField(default=0)
|
||||
|
||||
|
||||
def extract_int(reference, clip=0x7fffffff, allow_negative=False):
|
||||
"""Extract an integer out of reference."""
|
||||
|
||||
# Default value if we cannot convert to an integer
|
||||
ref_int = 0
|
||||
|
||||
reference = str(reference).strip()
|
||||
|
||||
# Ignore empty string
|
||||
if len(reference) == 0:
|
||||
return 0
|
||||
|
||||
# Look at the start of the string - can it be "integerized"?
|
||||
result = re.match(r"^(\d+)", reference)
|
||||
|
||||
if result and len(result.groups()) == 1:
|
||||
ref = result.groups()[0]
|
||||
try:
|
||||
ref_int = int(ref)
|
||||
except Exception:
|
||||
ref_int = 0
|
||||
else:
|
||||
# Look at the "end" of the string
|
||||
result = re.search(r'(\d+)$', reference)
|
||||
|
||||
if result and len(result.groups()) == 1:
|
||||
ref = result.groups()[0]
|
||||
try:
|
||||
ref_int = int(ref)
|
||||
except Exception:
|
||||
ref_int = 0
|
||||
|
||||
# Ensure that the returned values are within the range that can be stored in an IntegerField
|
||||
# Note: This will result in large values being "clipped"
|
||||
if clip is not None:
|
||||
if ref_int > clip:
|
||||
ref_int = clip
|
||||
elif ref_int < -clip:
|
||||
ref_int = -clip
|
||||
|
||||
if not allow_negative and ref_int < 0:
|
||||
ref_int = abs(ref_int)
|
||||
|
||||
return ref_int
|
||||
|
||||
|
||||
class InvenTreeAttachment(models.Model):
|
||||
"""Provides an abstracted class for managing file attachments.
|
||||
|
||||
An attachment can be either an uploaded file, or an external URL
|
||||
|
||||
Attributes:
|
||||
attachment: File
|
||||
comment: String descriptor for the attachment
|
||||
user: User associated with file upload
|
||||
upload_date: Date the file was uploaded
|
||||
"""
|
||||
|
||||
def getSubdir(self):
|
||||
"""Return the subdirectory under which attachments should be stored.
|
||||
|
||||
Note: Re-implement this for each subclass of InvenTreeAttachment
|
||||
"""
|
||||
return "attachments"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Provide better validation error."""
|
||||
# Either 'attachment' or 'link' must be specified!
|
||||
if not self.attachment and not self.link:
|
||||
raise ValidationError({
|
||||
'attachment': _('Missing file'),
|
||||
'link': _('Missing external link'),
|
||||
})
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
"""Human name for attachment."""
|
||||
if self.attachment is not None:
|
||||
return os.path.basename(self.attachment.name)
|
||||
else:
|
||||
return str(self.link)
|
||||
|
||||
attachment = models.FileField(upload_to=rename_attachment, verbose_name=_('Attachment'),
|
||||
help_text=_('Select file to attach'),
|
||||
blank=True, null=True
|
||||
)
|
||||
|
||||
link = InvenTreeURLField(
|
||||
blank=True, null=True,
|
||||
verbose_name=_('Link'),
|
||||
help_text=_('Link to external URL')
|
||||
)
|
||||
|
||||
comment = models.CharField(blank=True, max_length=100, verbose_name=_('Comment'), help_text=_('File comment'))
|
||||
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.SET_NULL,
|
||||
blank=True, null=True,
|
||||
verbose_name=_('User'),
|
||||
help_text=_('User'),
|
||||
)
|
||||
|
||||
upload_date = models.DateField(auto_now_add=True, null=True, blank=True, verbose_name=_('upload date'))
|
||||
|
||||
@property
|
||||
def basename(self):
|
||||
"""Base name/path for attachment."""
|
||||
if self.attachment:
|
||||
return os.path.basename(self.attachment.name)
|
||||
else:
|
||||
return None
|
||||
|
||||
@basename.setter
|
||||
def basename(self, fn):
|
||||
"""Function to rename the attachment file.
|
||||
|
||||
- Filename cannot be empty
|
||||
- Filename cannot contain illegal characters
|
||||
- Filename must specify an extension
|
||||
- Filename cannot match an existing file
|
||||
"""
|
||||
fn = fn.strip()
|
||||
|
||||
if len(fn) == 0:
|
||||
raise ValidationError(_('Filename must not be empty'))
|
||||
|
||||
attachment_dir = settings.MEDIA_ROOT.joinpath(self.getSubdir())
|
||||
old_file = settings.MEDIA_ROOT.joinpath(self.attachment.name)
|
||||
new_file = settings.MEDIA_ROOT.joinpath(self.getSubdir(), fn).resolve()
|
||||
|
||||
# Check that there are no directory tricks going on...
|
||||
if new_file.parent != attachment_dir:
|
||||
logger.error(f"Attempted to rename attachment outside valid directory: '{new_file}'")
|
||||
raise ValidationError(_("Invalid attachment directory"))
|
||||
|
||||
# Ignore further checks if the filename is not actually being renamed
|
||||
if new_file == old_file:
|
||||
return
|
||||
|
||||
forbidden = ["'", '"', "#", "@", "!", "&", "^", "<", ">", ":", ";", "/", "\\", "|", "?", "*", "%", "~", "`"]
|
||||
|
||||
for c in forbidden:
|
||||
if c in fn:
|
||||
raise ValidationError(_(f"Filename contains illegal character '{c}'"))
|
||||
|
||||
if len(fn.split('.')) < 2:
|
||||
raise ValidationError(_("Filename missing extension"))
|
||||
|
||||
if not old_file.exists():
|
||||
logger.error(f"Trying to rename attachment '{old_file}' which does not exist")
|
||||
return
|
||||
|
||||
if new_file.exists():
|
||||
raise ValidationError(_("Attachment with this filename already exists"))
|
||||
|
||||
try:
|
||||
os.rename(old_file, new_file)
|
||||
self.attachment.name = os.path.join(self.getSubdir(), fn)
|
||||
self.save()
|
||||
except Exception:
|
||||
raise ValidationError(_("Error renaming file"))
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options. Abstract ensures no database table is created."""
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
class InvenTreeTree(MPTTModel):
|
||||
"""Provides an abstracted self-referencing tree model for data categories.
|
||||
|
||||
- Each Category has one parent Category, which can be blank (for a top-level Category).
|
||||
- Each Category can have zero-or-more child Categor(y/ies)
|
||||
|
||||
Attributes:
|
||||
name: brief name
|
||||
description: longer form description
|
||||
parent: The item immediately above this one. An item with a null parent is a top-level item
|
||||
"""
|
||||
|
||||
def api_instance_filters(self):
|
||||
"""Instance filters for InvenTreeTree models."""
|
||||
return {
|
||||
'parent': {
|
||||
'exclude_tree': self.pk,
|
||||
}
|
||||
}
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Custom save method for InvenTreeTree abstract model"""
|
||||
|
||||
try:
|
||||
super().save(*args, **kwargs)
|
||||
except InvalidMove:
|
||||
# Provide better error for parent selection
|
||||
raise ValidationError({
|
||||
'parent': _("Invalid choice"),
|
||||
})
|
||||
|
||||
# Re-calculate the 'pathstring' field
|
||||
pathstring = InvenTree.helpers.constructPathString(
|
||||
[item.name for item in self.path]
|
||||
)
|
||||
|
||||
if pathstring != self.pathstring:
|
||||
self.pathstring = pathstring
|
||||
super().save(force_update=True)
|
||||
|
||||
class Meta:
|
||||
"""Metaclass defines extra model properties."""
|
||||
|
||||
abstract = True
|
||||
|
||||
# Names must be unique at any given level in the tree
|
||||
unique_together = ('name', 'parent')
|
||||
|
||||
class MPTTMeta:
|
||||
"""Set insert order."""
|
||||
order_insertion_by = ['name']
|
||||
|
||||
name = models.CharField(
|
||||
blank=False,
|
||||
max_length=100,
|
||||
verbose_name=_("Name"),
|
||||
help_text=_("Name"),
|
||||
)
|
||||
|
||||
description = models.CharField(
|
||||
blank=True,
|
||||
max_length=250,
|
||||
verbose_name=_("Description"),
|
||||
help_text=_("Description (optional)")
|
||||
)
|
||||
|
||||
# When a category is deleted, graft the children onto its parent
|
||||
parent = TreeForeignKey('self',
|
||||
on_delete=models.DO_NOTHING,
|
||||
blank=True,
|
||||
null=True,
|
||||
verbose_name=_("parent"),
|
||||
related_name='children')
|
||||
|
||||
# The 'pathstring' field is calculated each time the model is saved
|
||||
pathstring = models.CharField(
|
||||
blank=True,
|
||||
max_length=250,
|
||||
verbose_name=_('Path'),
|
||||
help_text=_('Path')
|
||||
)
|
||||
|
||||
@property
|
||||
def item_count(self):
|
||||
"""Return the number of items which exist *under* this node in the tree.
|
||||
|
||||
Here an 'item' is considered to be the 'leaf' at the end of each branch,
|
||||
and the exact nature here will depend on the class implementation.
|
||||
|
||||
The default implementation returns zero
|
||||
"""
|
||||
return 0
|
||||
|
||||
def getUniqueParents(self):
|
||||
"""Return a flat set of all parent items that exist above this node.
|
||||
|
||||
If any parents are repeated (which would be very bad!), the process is halted
|
||||
"""
|
||||
return self.get_ancestors()
|
||||
|
||||
def getUniqueChildren(self, include_self=True):
|
||||
"""Return a flat set of all child items that exist under this node.
|
||||
|
||||
If any child items are repeated, the repetitions are omitted.
|
||||
"""
|
||||
return self.get_descendants(include_self=include_self)
|
||||
|
||||
@property
|
||||
def has_children(self):
|
||||
"""True if there are any children under this item."""
|
||||
return self.getUniqueChildren(include_self=False).count() > 0
|
||||
|
||||
def getAcceptableParents(self):
|
||||
"""Returns a list of acceptable parent items within this model Acceptable parents are ones which are not underneath this item.
|
||||
|
||||
Setting the parent of an item to its own child results in recursion.
|
||||
"""
|
||||
contents = ContentType.objects.get_for_model(type(self))
|
||||
|
||||
available = contents.get_all_objects_for_this_type()
|
||||
|
||||
# List of child IDs
|
||||
childs = self.getUniqueChildren()
|
||||
|
||||
acceptable = [None]
|
||||
|
||||
for a in available:
|
||||
if a.id not in childs:
|
||||
acceptable.append(a)
|
||||
|
||||
return acceptable
|
||||
|
||||
@property
|
||||
def parentpath(self):
|
||||
"""Get the parent path of this category.
|
||||
|
||||
Returns:
|
||||
List of category names from the top level to the parent of this category
|
||||
"""
|
||||
return [a for a in self.get_ancestors()]
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""Get the complete part of this category.
|
||||
|
||||
e.g. ["Top", "Second", "Third", "This"]
|
||||
|
||||
Returns:
|
||||
List of category names from the top level to this category
|
||||
"""
|
||||
return self.parentpath + [self]
|
||||
|
||||
def __str__(self):
|
||||
"""String representation of a category is the full path to that category."""
|
||||
return "{path} - {desc}".format(path=self.pathstring, desc=self.description)
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=InvenTreeTree, dispatch_uid='tree_pre_delete_log')
|
||||
def before_delete_tree_item(sender, instance, using, **kwargs):
|
||||
"""Receives pre_delete signal from InvenTreeTree object.
|
||||
|
||||
Before an item is deleted, update each child object to point to the parent of the object being deleted.
|
||||
"""
|
||||
# Update each tree item below this one
|
||||
for child in instance.children.all():
|
||||
child.parent = instance.parent
|
||||
child.save()
|
||||
|
||||
|
||||
@receiver(post_save, sender=Error, dispatch_uid='error_post_save_notification')
|
||||
def after_error_logged(sender, instance: Error, created: bool, **kwargs):
|
||||
"""Callback when a server error is logged.
|
||||
|
||||
- Send a UI notification to all users with staff status
|
||||
"""
|
||||
|
||||
if created:
|
||||
try:
|
||||
import common.notifications
|
||||
|
||||
users = get_user_model().objects.filter(is_staff=True)
|
||||
|
||||
link = InvenTree.helpers.construct_absolute_url(
|
||||
reverse('admin:error_report_error_change', kwargs={'object_id': instance.pk})
|
||||
)
|
||||
|
||||
context = {
|
||||
'error': instance,
|
||||
'name': _('Server Error'),
|
||||
'message': _('An error has been logged by the server.'),
|
||||
'link': link
|
||||
}
|
||||
|
||||
common.notifications.trigger_notification(
|
||||
instance,
|
||||
'inventree.error_log',
|
||||
context=context,
|
||||
targets=users,
|
||||
delivery_methods=set([common.notifications.UIMessageNotification]),
|
||||
)
|
||||
|
||||
except Exception as exc:
|
||||
"""We do not want to throw an exception while reporting an exception"""
|
||||
logger.error(exc)
|
||||
65
InvenTree/InvenTree/permissions.py
Normal file
65
InvenTree/InvenTree/permissions.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Permission set for InvenTree."""
|
||||
|
||||
from rest_framework import permissions
|
||||
|
||||
import users.models
|
||||
|
||||
|
||||
class RolePermission(permissions.BasePermission):
|
||||
"""Role mixin for API endpoints, allowing us to specify the user "role" which is required for certain operations.
|
||||
|
||||
Each endpoint can have one or more of the following actions:
|
||||
- GET
|
||||
- POST
|
||||
- PUT
|
||||
- PATCH
|
||||
- DELETE
|
||||
|
||||
Specify the required "role" using the role_required attribute.
|
||||
|
||||
e.g.
|
||||
|
||||
role_required = "part"
|
||||
|
||||
The RoleMixin class will then determine if the user has the required permission
|
||||
to perform the specified action.
|
||||
|
||||
For example, a DELETE action will be rejected unless the user has the "part.remove" permission
|
||||
"""
|
||||
|
||||
def has_permission(self, request, view):
|
||||
"""Determine if the current user has the specified permissions."""
|
||||
user = request.user
|
||||
|
||||
# Superuser can do it all
|
||||
if user.is_superuser:
|
||||
return True
|
||||
|
||||
# Map the request method to a permission type
|
||||
rolemap = {
|
||||
'GET': 'view',
|
||||
'OPTIONS': 'view',
|
||||
'POST': 'add',
|
||||
'PUT': 'change',
|
||||
'PATCH': 'change',
|
||||
'DELETE': 'delete',
|
||||
}
|
||||
|
||||
permission = rolemap[request.method]
|
||||
|
||||
try:
|
||||
# Extract the model name associated with this request
|
||||
model = view.serializer_class.Meta.model
|
||||
|
||||
app_label = model._meta.app_label
|
||||
model_name = model._meta.model_name
|
||||
|
||||
table = f"{app_label}_{model_name}"
|
||||
except AttributeError:
|
||||
# We will assume that if the serializer class does *not* have a Meta,
|
||||
# then we don't need a permission
|
||||
return True
|
||||
|
||||
result = users.models.RuleSet.check_table_permission(user, table, permission)
|
||||
|
||||
return result
|
||||
50
InvenTree/InvenTree/ready.py
Normal file
50
InvenTree/InvenTree/ready.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Functions to check if certain parts of InvenTree are ready."""
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def isInTestMode():
|
||||
"""Returns True if the database is in testing mode."""
|
||||
return 'test' in sys.argv
|
||||
|
||||
|
||||
def isImportingData():
|
||||
"""Returns True if the database is currently importing data, e.g. 'loaddata' command is performed."""
|
||||
return 'loaddata' in sys.argv
|
||||
|
||||
|
||||
def canAppAccessDatabase(allow_test=False):
|
||||
"""Returns True if the apps.py file can access database records.
|
||||
|
||||
There are some circumstances where we don't want the ready function in apps.py
|
||||
to touch the database
|
||||
"""
|
||||
# If any of the following management commands are being executed,
|
||||
# prevent custom "on load" code from running!
|
||||
excluded_commands = [
|
||||
'flush',
|
||||
'loaddata',
|
||||
'dumpdata',
|
||||
'makemigrations',
|
||||
'migrate',
|
||||
'check',
|
||||
'shell',
|
||||
'createsuperuser',
|
||||
'wait_for_db',
|
||||
'prerender',
|
||||
'rebuild_models',
|
||||
'rebuild_thumbnails',
|
||||
'collectstatic',
|
||||
'makemessages',
|
||||
'compilemessages',
|
||||
]
|
||||
|
||||
if not allow_test:
|
||||
# Override for testing mode?
|
||||
excluded_commands.append('test')
|
||||
|
||||
for cmd in excluded_commands:
|
||||
if cmd in sys.argv:
|
||||
return False
|
||||
|
||||
return True
|
||||
625
InvenTree/InvenTree/serializers.py
Normal file
625
InvenTree/InvenTree/serializers.py
Normal file
@@ -0,0 +1,625 @@
|
||||
"""Serializers used in various InvenTree apps."""
|
||||
|
||||
import os
|
||||
from collections import OrderedDict
|
||||
from decimal import Decimal
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import tablib
|
||||
from djmoney.contrib.django_rest_framework.fields import MoneyField
|
||||
from djmoney.money import Money
|
||||
from djmoney.utils import MONEY_CLASSES, get_currency_field_name
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import empty
|
||||
from rest_framework.serializers import DecimalField
|
||||
from rest_framework.utils import model_meta
|
||||
|
||||
from common.models import InvenTreeSetting
|
||||
from InvenTree.fields import InvenTreeRestURLField
|
||||
from InvenTree.helpers import download_image_from_url
|
||||
|
||||
|
||||
class InvenTreeMoneySerializer(MoneyField):
|
||||
"""Custom serializer for 'MoneyField', which ensures that passed values are numerically valid.
|
||||
|
||||
Ref: https://github.com/django-money/django-money/blob/master/djmoney/contrib/django_rest_framework/fields.py
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Overrite default values."""
|
||||
kwargs["max_digits"] = kwargs.get("max_digits", 19)
|
||||
kwargs["decimal_places"] = kwargs.get("decimal_places", 4)
|
||||
kwargs["required"] = kwargs.get("required", False)
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_value(self, data):
|
||||
"""Test that the returned amount is a valid Decimal."""
|
||||
amount = super(DecimalField, self).get_value(data)
|
||||
|
||||
# Convert an empty string to None
|
||||
if len(str(amount).strip()) == 0:
|
||||
amount = None
|
||||
|
||||
try:
|
||||
if amount is not None and amount is not empty:
|
||||
amount = Decimal(amount)
|
||||
except Exception:
|
||||
raise ValidationError({
|
||||
self.field_name: [_("Must be a valid number")],
|
||||
})
|
||||
|
||||
currency = data.get(get_currency_field_name(self.field_name), self.default_currency)
|
||||
|
||||
if currency and amount is not None and not isinstance(amount, MONEY_CLASSES) and amount is not empty:
|
||||
return Money(amount, currency)
|
||||
|
||||
return amount
|
||||
|
||||
|
||||
class InvenTreeModelSerializer(serializers.ModelSerializer):
|
||||
"""Inherits the standard Django ModelSerializer class, but also ensures that the underlying model class data are checked on validation."""
|
||||
|
||||
# Switch out URLField mapping
|
||||
serializer_field_mapping = {
|
||||
**serializers.ModelSerializer.serializer_field_mapping,
|
||||
models.URLField: InvenTreeRestURLField,
|
||||
}
|
||||
|
||||
def __init__(self, instance=None, data=empty, **kwargs):
|
||||
"""Custom __init__ routine to ensure that *default* values (as specified in the ORM) are used by the DRF serializers, *if* the values are not provided by the user."""
|
||||
# If instance is None, we are creating a new instance
|
||||
if instance is None and data is not empty:
|
||||
|
||||
if data is None:
|
||||
data = OrderedDict()
|
||||
else:
|
||||
new_data = OrderedDict()
|
||||
new_data.update(data)
|
||||
|
||||
data = new_data
|
||||
|
||||
# Add missing fields which have default values
|
||||
ModelClass = self.Meta.model
|
||||
|
||||
fields = model_meta.get_field_info(ModelClass)
|
||||
|
||||
for field_name, field in fields.fields.items():
|
||||
|
||||
"""
|
||||
Update the field IF (and ONLY IF):
|
||||
|
||||
- The field has a specified default value
|
||||
- The field does not already have a value set
|
||||
"""
|
||||
if field.has_default() and field_name not in data:
|
||||
|
||||
value = field.default
|
||||
|
||||
# Account for callable functions
|
||||
if callable(value):
|
||||
try:
|
||||
value = value()
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
data[field_name] = value
|
||||
|
||||
super().__init__(instance, data, **kwargs)
|
||||
|
||||
def get_initial(self):
|
||||
"""Construct initial data for the serializer.
|
||||
|
||||
Use the 'default' values specified by the django model definition
|
||||
"""
|
||||
initials = super().get_initial().copy()
|
||||
|
||||
# Are we creating a new instance?
|
||||
if self.instance is None:
|
||||
ModelClass = self.Meta.model
|
||||
|
||||
fields = model_meta.get_field_info(ModelClass)
|
||||
|
||||
for field_name, field in fields.fields.items():
|
||||
|
||||
if field.has_default() and field_name not in initials:
|
||||
|
||||
value = field.default
|
||||
|
||||
# Account for callable functions
|
||||
if callable(value):
|
||||
try:
|
||||
value = value()
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
initials[field_name] = value
|
||||
|
||||
return initials
|
||||
|
||||
def save(self, **kwargs):
|
||||
"""Catch any django ValidationError thrown at the moment `save` is called, and re-throw as a DRF ValidationError."""
|
||||
try:
|
||||
super().save(**kwargs)
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
raise ValidationError(detail=serializers.as_serializer_error(exc))
|
||||
|
||||
return self.instance
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
"""Catch any django ValidationError, and re-throw as a DRF ValidationError."""
|
||||
try:
|
||||
instance = super().update(instance, validated_data)
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
raise ValidationError(detail=serializers.as_serializer_error(exc))
|
||||
|
||||
return instance
|
||||
|
||||
def run_validation(self, data=empty):
|
||||
"""Perform serializer validation.
|
||||
|
||||
In addition to running validators on the serializer fields,
|
||||
this class ensures that the underlying model is also validated.
|
||||
"""
|
||||
# Run any native validation checks first (may raise a ValidationError)
|
||||
data = super().run_validation(data)
|
||||
|
||||
# Now ensure the underlying model is correct
|
||||
|
||||
if not hasattr(self, 'instance') or self.instance is None:
|
||||
# No instance exists (we are creating a new one)
|
||||
instance = self.Meta.model(**data)
|
||||
else:
|
||||
# Instance already exists (we are updating!)
|
||||
instance = self.instance
|
||||
|
||||
# Update instance fields
|
||||
for attr, value in data.items():
|
||||
try:
|
||||
setattr(instance, attr, value)
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
raise ValidationError(detail=serializers.as_serializer_error(exc))
|
||||
|
||||
# Run a 'full_clean' on the model.
|
||||
# Note that by default, DRF does *not* perform full model validation!
|
||||
try:
|
||||
instance.full_clean()
|
||||
except (ValidationError, DjangoValidationError) as exc:
|
||||
|
||||
data = exc.message_dict
|
||||
|
||||
# Change '__all__' key (django style) to 'non_field_errors' (DRF style)
|
||||
if '__all__' in data:
|
||||
data['non_field_errors'] = data['__all__']
|
||||
del data['__all__']
|
||||
|
||||
raise ValidationError(data)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class UserSerializer(InvenTreeModelSerializer):
|
||||
"""Serializer for a User."""
|
||||
|
||||
class Meta:
|
||||
"""Metaclass defines serializer fields."""
|
||||
model = User
|
||||
fields = [
|
||||
'pk',
|
||||
'username',
|
||||
'first_name',
|
||||
'last_name',
|
||||
'email'
|
||||
]
|
||||
|
||||
|
||||
class InvenTreeAttachmentSerializerField(serializers.FileField):
|
||||
"""Override the DRF native FileField serializer, to remove the leading server path.
|
||||
|
||||
For example, the FileField might supply something like:
|
||||
|
||||
http://127.0.0.1:8000/media/foo/bar.jpg
|
||||
|
||||
Whereas we wish to return:
|
||||
|
||||
/media/foo/bar.jpg
|
||||
|
||||
If the server process is serving the data at 127.0.0.1,
|
||||
but a proxy service (e.g. nginx) is then providing DNS lookup to the outside world,
|
||||
then an attachment which prefixes the "address" of the internal server
|
||||
will not be accessible from the outside world.
|
||||
"""
|
||||
|
||||
def to_representation(self, value):
|
||||
"""To json-serializable type."""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
return os.path.join(str(settings.MEDIA_URL), str(value))
|
||||
|
||||
|
||||
class InvenTreeAttachmentSerializer(InvenTreeModelSerializer):
|
||||
"""Special case of an InvenTreeModelSerializer, which handles an "attachment" model.
|
||||
|
||||
The only real addition here is that we support "renaming" of the attachment file.
|
||||
"""
|
||||
|
||||
user_detail = UserSerializer(source='user', read_only=True, many=False)
|
||||
|
||||
attachment = InvenTreeAttachmentSerializerField(
|
||||
required=False,
|
||||
allow_null=False,
|
||||
)
|
||||
|
||||
# The 'filename' field must be present in the serializer
|
||||
filename = serializers.CharField(
|
||||
label=_('Filename'),
|
||||
required=False,
|
||||
source='basename',
|
||||
allow_blank=False,
|
||||
)
|
||||
|
||||
|
||||
class InvenTreeImageSerializerField(serializers.ImageField):
|
||||
"""Custom image serializer.
|
||||
|
||||
On upload, validate that the file is a valid image file
|
||||
"""
|
||||
|
||||
def to_representation(self, value):
|
||||
"""To json-serializable type."""
|
||||
if not value:
|
||||
return None
|
||||
|
||||
return os.path.join(str(settings.MEDIA_URL), str(value))
|
||||
|
||||
|
||||
class InvenTreeDecimalField(serializers.FloatField):
|
||||
"""Custom serializer for decimal fields.
|
||||
|
||||
Solves the following issues:
|
||||
- The normal DRF DecimalField renders values with trailing zeros
|
||||
- Using a FloatField can result in rounding issues: https://code.djangoproject.com/ticket/30290
|
||||
"""
|
||||
|
||||
def to_internal_value(self, data):
|
||||
"""Convert to python type."""
|
||||
# Convert the value to a string, and then a decimal
|
||||
try:
|
||||
return Decimal(str(data))
|
||||
except Exception:
|
||||
raise serializers.ValidationError(_("Invalid value"))
|
||||
|
||||
|
||||
class DataFileUploadSerializer(serializers.Serializer):
|
||||
"""Generic serializer for uploading a data file, and extracting a dataset.
|
||||
|
||||
- Validates uploaded file
|
||||
- Extracts column names
|
||||
- Extracts data rows
|
||||
"""
|
||||
|
||||
# Implementing class should register a target model (database model) to be used for import
|
||||
TARGET_MODEL = None
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
fields = [
|
||||
'data_file',
|
||||
]
|
||||
|
||||
data_file = serializers.FileField(
|
||||
label=_("Data File"),
|
||||
help_text=_("Select data file for upload"),
|
||||
required=True,
|
||||
allow_empty_file=False,
|
||||
)
|
||||
|
||||
def validate_data_file(self, data_file):
|
||||
"""Perform validation checks on the uploaded data file."""
|
||||
self.filename = data_file.name
|
||||
|
||||
name, ext = os.path.splitext(data_file.name)
|
||||
|
||||
# Remove the leading . from the extension
|
||||
ext = ext[1:]
|
||||
|
||||
accepted_file_types = [
|
||||
'xls', 'xlsx',
|
||||
'csv', 'tsv',
|
||||
'xml',
|
||||
]
|
||||
|
||||
if ext not in accepted_file_types:
|
||||
raise serializers.ValidationError(_("Unsupported file type"))
|
||||
|
||||
# Impose a 50MB limit on uploaded BOM files
|
||||
max_upload_file_size = 50 * 1024 * 1024
|
||||
|
||||
if data_file.size > max_upload_file_size:
|
||||
raise serializers.ValidationError(_("File is too large"))
|
||||
|
||||
# Read file data into memory (bytes object)
|
||||
try:
|
||||
data = data_file.read()
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError(str(e))
|
||||
|
||||
if ext in ['csv', 'tsv', 'xml']:
|
||||
try:
|
||||
data = data.decode()
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError(str(e))
|
||||
|
||||
# Convert to a tablib dataset (we expect headers)
|
||||
try:
|
||||
self.dataset = tablib.Dataset().load(data, ext, headers=True)
|
||||
except Exception as e:
|
||||
raise serializers.ValidationError(str(e))
|
||||
|
||||
if len(self.dataset.headers) == 0:
|
||||
raise serializers.ValidationError(_("No columns found in file"))
|
||||
|
||||
if len(self.dataset) == 0:
|
||||
raise serializers.ValidationError(_("No data rows found in file"))
|
||||
|
||||
return data_file
|
||||
|
||||
def match_column(self, column_name, field_names, exact=False):
|
||||
"""Attempt to match a column name (from the file) to a field (defined in the model).
|
||||
|
||||
Order of matching is:
|
||||
- Direct match
|
||||
- Case insensitive match
|
||||
- Fuzzy match
|
||||
"""
|
||||
if not column_name:
|
||||
return None
|
||||
|
||||
column_name = str(column_name).strip()
|
||||
|
||||
column_name_lower = column_name.lower()
|
||||
|
||||
if column_name in field_names:
|
||||
return column_name
|
||||
|
||||
for field_name in field_names:
|
||||
if field_name.lower() == column_name_lower:
|
||||
return field_name
|
||||
|
||||
if exact:
|
||||
# Finished available 'exact' matches
|
||||
return None
|
||||
|
||||
# TODO: Fuzzy pattern matching for column names
|
||||
|
||||
# No matches found
|
||||
return None
|
||||
|
||||
def extract_data(self):
|
||||
"""Returns dataset extracted from the file."""
|
||||
# Provide a dict of available import fields for the model
|
||||
model_fields = {}
|
||||
|
||||
# Keep track of columns we have already extracted
|
||||
matched_columns = set()
|
||||
|
||||
if self.TARGET_MODEL:
|
||||
try:
|
||||
model_fields = self.TARGET_MODEL.get_import_fields()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Extract a list of valid model field names
|
||||
model_field_names = [key for key in model_fields.keys()]
|
||||
|
||||
# Provide a dict of available columns from the dataset
|
||||
file_columns = {}
|
||||
|
||||
for header in self.dataset.headers:
|
||||
column = {}
|
||||
|
||||
# Attempt to "match" file columns to model fields
|
||||
match = self.match_column(header, model_field_names, exact=True)
|
||||
|
||||
if match is not None and match not in matched_columns:
|
||||
matched_columns.add(match)
|
||||
column['value'] = match
|
||||
else:
|
||||
column['value'] = None
|
||||
|
||||
file_columns[header] = column
|
||||
|
||||
return {
|
||||
'file_fields': file_columns,
|
||||
'model_fields': model_fields,
|
||||
'rows': [row.values() for row in self.dataset.dict],
|
||||
'filename': self.filename,
|
||||
}
|
||||
|
||||
def save(self):
|
||||
"""Empty overwrite for save."""
|
||||
...
|
||||
|
||||
|
||||
class DataFileExtractSerializer(serializers.Serializer):
|
||||
"""Generic serializer for extracting data from an imported dataset.
|
||||
|
||||
- User provides an array of matched headers
|
||||
- User provides an array of raw data rows
|
||||
"""
|
||||
|
||||
# Implementing class should register a target model (database model) to be used for import
|
||||
TARGET_MODEL = None
|
||||
|
||||
class Meta:
|
||||
"""Metaclass options."""
|
||||
|
||||
fields = [
|
||||
'columns',
|
||||
'rows',
|
||||
]
|
||||
|
||||
# Mapping of columns
|
||||
columns = serializers.ListField(
|
||||
child=serializers.CharField(
|
||||
allow_blank=True,
|
||||
),
|
||||
)
|
||||
|
||||
rows = serializers.ListField(
|
||||
child=serializers.ListField(
|
||||
child=serializers.CharField(
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
def validate(self, data):
|
||||
"""Clean data."""
|
||||
data = super().validate(data)
|
||||
|
||||
self.columns = data.get('columns', [])
|
||||
self.rows = data.get('rows', [])
|
||||
|
||||
if len(self.rows) == 0:
|
||||
raise serializers.ValidationError(_("No data rows provided"))
|
||||
|
||||
if len(self.columns) == 0:
|
||||
raise serializers.ValidationError(_("No data columns supplied"))
|
||||
|
||||
self.validate_extracted_columns()
|
||||
|
||||
return data
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
"""Returns current data."""
|
||||
if self.TARGET_MODEL:
|
||||
try:
|
||||
model_fields = self.TARGET_MODEL.get_import_fields()
|
||||
except Exception:
|
||||
model_fields = {}
|
||||
|
||||
rows = []
|
||||
|
||||
for row in self.rows:
|
||||
"""Optionally pre-process each row, before sending back to the client."""
|
||||
|
||||
processed_row = self.process_row(self.row_to_dict(row))
|
||||
|
||||
if processed_row:
|
||||
rows.append({
|
||||
"original": row,
|
||||
"data": processed_row,
|
||||
})
|
||||
|
||||
return {
|
||||
'fields': model_fields,
|
||||
'columns': self.columns,
|
||||
'rows': rows,
|
||||
}
|
||||
|
||||
def process_row(self, row):
|
||||
"""Process a 'row' of data, which is a mapped column:value dict.
|
||||
|
||||
Returns either a mapped column:value dict, or None.
|
||||
|
||||
If the function returns None, the column is ignored!
|
||||
"""
|
||||
# Default implementation simply returns the original row data
|
||||
return row
|
||||
|
||||
def row_to_dict(self, row):
|
||||
"""Convert a "row" to a named data dict."""
|
||||
row_dict = {
|
||||
'errors': {},
|
||||
}
|
||||
|
||||
for idx, value in enumerate(row):
|
||||
|
||||
if idx < len(self.columns):
|
||||
col = self.columns[idx]
|
||||
|
||||
if col:
|
||||
row_dict[col] = value
|
||||
|
||||
return row_dict
|
||||
|
||||
def validate_extracted_columns(self):
|
||||
"""Perform custom validation of header mapping."""
|
||||
if self.TARGET_MODEL:
|
||||
try:
|
||||
model_fields = self.TARGET_MODEL.get_import_fields()
|
||||
except Exception:
|
||||
model_fields = {}
|
||||
|
||||
cols_seen = set()
|
||||
|
||||
for name, field in model_fields.items():
|
||||
|
||||
required = field.get('required', False)
|
||||
|
||||
# Check for missing required columns
|
||||
if required:
|
||||
if name not in self.columns:
|
||||
raise serializers.ValidationError(_(f"Missing required column: '{name}'"))
|
||||
|
||||
for col in self.columns:
|
||||
|
||||
if not col:
|
||||
continue
|
||||
|
||||
# Check for duplicated columns
|
||||
if col in cols_seen:
|
||||
raise serializers.ValidationError(_(f"Duplicate column: '{col}'"))
|
||||
|
||||
cols_seen.add(col)
|
||||
|
||||
def save(self):
|
||||
"""No "save" action for this serializer."""
|
||||
pass
|
||||
|
||||
|
||||
class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
|
||||
"""Mixin class which allows downloading an 'image' from a remote URL.
|
||||
|
||||
Adds the optional, write-only `remote_image` field to the serializer
|
||||
"""
|
||||
|
||||
remote_image = serializers.URLField(
|
||||
required=False,
|
||||
allow_blank=False,
|
||||
write_only=True,
|
||||
label=_("URL"),
|
||||
help_text=_("URL of remote image file"),
|
||||
)
|
||||
|
||||
def validate_remote_image(self, url):
|
||||
"""Perform custom validation for the remote image URL.
|
||||
|
||||
- Attempt to download the image and store it against this object instance
|
||||
- Catches and re-throws any errors
|
||||
"""
|
||||
|
||||
if not url:
|
||||
return
|
||||
|
||||
if not InvenTreeSetting.get_setting('INVENTREE_DOWNLOAD_FROM_URL'):
|
||||
raise ValidationError(_("Downloading images from remote URL is not enabled"))
|
||||
|
||||
try:
|
||||
self.remote_image_file = download_image_from_url(url)
|
||||
except Exception as exc:
|
||||
self.remote_image_file = None
|
||||
raise ValidationError(str(exc))
|
||||
|
||||
return url
|
||||
846
InvenTree/InvenTree/settings.py
Normal file
846
InvenTree/InvenTree/settings.py
Normal file
@@ -0,0 +1,846 @@
|
||||
"""Django settings for InvenTree project.
|
||||
|
||||
In practice the settings in this file should not be adjusted,
|
||||
instead settings can be configured in the config.yaml file
|
||||
located in the top level project directory.
|
||||
|
||||
This allows implementation configuration to be hidden from source control,
|
||||
as well as separate configuration parameters from the more complex
|
||||
database setup in this file.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import django.conf.locale
|
||||
from django.contrib.staticfiles.storage import StaticFilesStorage
|
||||
from django.core.files.storage import default_storage
|
||||
from django.http import Http404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
import moneyed
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
|
||||
from . import config
|
||||
from .config import get_boolean_setting, get_setting
|
||||
|
||||
# Determine if we are running in "test" mode e.g. "manage.py test"
|
||||
TESTING = 'test' in sys.argv
|
||||
|
||||
# Are enviroment variables manipulated by tests? Needs to be set by testing code
|
||||
TESTING_ENV = False
|
||||
|
||||
# New requirement for django 3.2+
|
||||
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
|
||||
|
||||
# Build paths inside the project like this: BASE_DIR.joinpath(...)
|
||||
BASE_DIR = config.get_base_dir()
|
||||
|
||||
# Load configuration data
|
||||
CONFIG = config.load_config_data()
|
||||
|
||||
# Default action is to run the system in Debug mode
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = get_boolean_setting('INVENTREE_DEBUG', 'debug', True)
|
||||
|
||||
# Configure logging settings
|
||||
log_level = get_setting('INVENTREE_LOG_LEVEL', 'log_level', 'WARNING')
|
||||
|
||||
logging.basicConfig(
|
||||
level=log_level,
|
||||
format="%(asctime)s %(levelname)s %(message)s",
|
||||
)
|
||||
|
||||
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
|
||||
log_level = 'WARNING' # pragma: no cover
|
||||
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'handlers': {
|
||||
'console': {
|
||||
'class': 'logging.StreamHandler',
|
||||
},
|
||||
},
|
||||
'root': {
|
||||
'handlers': ['console'],
|
||||
'level': log_level,
|
||||
},
|
||||
'filters': {
|
||||
'require_not_maintenance_mode_503': {
|
||||
'()': 'maintenance_mode.logging.RequireNotMaintenanceMode503',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Get a logger instance for this setup file
|
||||
logger = logging.getLogger("inventree")
|
||||
|
||||
# Load SECRET_KEY
|
||||
SECRET_KEY = config.get_secret_key()
|
||||
|
||||
# The filesystem location for served static files
|
||||
STATIC_ROOT = config.get_static_dir()
|
||||
|
||||
# The filesystem location for uploaded meadia files
|
||||
MEDIA_ROOT = config.get_media_dir()
|
||||
|
||||
# List of allowed hosts (default = allow all)
|
||||
ALLOWED_HOSTS = get_setting(
|
||||
config_key='allowed_hosts',
|
||||
default_value=['*']
|
||||
)
|
||||
|
||||
# Cross Origin Resource Sharing (CORS) options
|
||||
|
||||
# Only allow CORS access to API
|
||||
CORS_URLS_REGEX = r'^/api/.*$'
|
||||
|
||||
# Extract CORS options from configuration file
|
||||
CORS_ORIGIN_ALLOW_ALL = get_boolean_setting(
|
||||
config_key='cors.allow_all',
|
||||
default_value=False,
|
||||
)
|
||||
|
||||
CORS_ORIGIN_WHITELIST = get_setting(
|
||||
config_key='cors.whitelist',
|
||||
default_value=[]
|
||||
)
|
||||
|
||||
# Web URL endpoint for served static files
|
||||
STATIC_URL = '/static/'
|
||||
|
||||
STATICFILES_DIRS = []
|
||||
|
||||
# Translated Template settings
|
||||
STATICFILES_I18_PREFIX = 'i18n'
|
||||
STATICFILES_I18_SRC = BASE_DIR.joinpath('templates', 'js', 'translated')
|
||||
STATICFILES_I18_TRG = BASE_DIR.joinpath('InvenTree', 'static_i18n')
|
||||
STATICFILES_DIRS.append(STATICFILES_I18_TRG)
|
||||
STATICFILES_I18_TRG = STATICFILES_I18_TRG.joinpath(STATICFILES_I18_PREFIX)
|
||||
|
||||
STATFILES_I18_PROCESSORS = [
|
||||
'InvenTree.context.status_codes',
|
||||
]
|
||||
|
||||
# Color Themes Directory
|
||||
STATIC_COLOR_THEMES_DIR = STATIC_ROOT.joinpath('css', 'color-themes')
|
||||
|
||||
# Web URL endpoint for served media files
|
||||
MEDIA_URL = '/media/'
|
||||
|
||||
# Application definition
|
||||
|
||||
INSTALLED_APPS = [
|
||||
# Admin site integration
|
||||
'django.contrib.admin',
|
||||
|
||||
# InvenTree apps
|
||||
'build.apps.BuildConfig',
|
||||
'common.apps.CommonConfig',
|
||||
'company.apps.CompanyConfig',
|
||||
'label.apps.LabelConfig',
|
||||
'order.apps.OrderConfig',
|
||||
'part.apps.PartConfig',
|
||||
'report.apps.ReportConfig',
|
||||
'stock.apps.StockConfig',
|
||||
'users.apps.UsersConfig',
|
||||
'plugin.apps.PluginAppConfig',
|
||||
'InvenTree.apps.InvenTreeConfig', # InvenTree app runs last
|
||||
|
||||
# Core django modules
|
||||
'django.contrib.auth',
|
||||
'django.contrib.contenttypes',
|
||||
'user_sessions', # db user sessions
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
'django.contrib.sites',
|
||||
|
||||
# Maintenance
|
||||
'maintenance_mode',
|
||||
|
||||
# Third part add-ons
|
||||
'django_filters', # Extended filter functionality
|
||||
'rest_framework', # DRF (Django Rest Framework)
|
||||
'rest_framework.authtoken', # Token authentication for API
|
||||
'corsheaders', # Cross-origin Resource Sharing for DRF
|
||||
'crispy_forms', # Improved form rendering
|
||||
'import_export', # Import / export tables to file
|
||||
'django_cleanup.apps.CleanupConfig', # Automatically delete orphaned MEDIA files
|
||||
'mptt', # Modified Preorder Tree Traversal
|
||||
'markdownify', # Markdown template rendering
|
||||
'djmoney', # django-money integration
|
||||
'djmoney.contrib.exchange', # django-money exchange rates
|
||||
'error_report', # Error reporting in the admin interface
|
||||
'django_q',
|
||||
'formtools', # Form wizard tools
|
||||
|
||||
'allauth', # Base app for SSO
|
||||
'allauth.account', # Extend user with accounts
|
||||
'allauth.socialaccount', # Use 'social' providers
|
||||
|
||||
'django_otp', # OTP is needed for MFA - base package
|
||||
'django_otp.plugins.otp_totp', # Time based OTP
|
||||
'django_otp.plugins.otp_static', # Backup codes
|
||||
|
||||
'allauth_2fa', # MFA flow for allauth
|
||||
]
|
||||
|
||||
MIDDLEWARE = CONFIG.get('middleware', [
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'x_forwarded_for.middleware.XForwardedForMiddleware',
|
||||
'user_sessions.middleware.SessionMiddleware', # db user sessions
|
||||
'django.middleware.locale.LocaleMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'corsheaders.middleware.CorsMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'InvenTree.middleware.InvenTreeRemoteUserMiddleware', # Remote / proxy auth
|
||||
'django_otp.middleware.OTPMiddleware', # MFA support
|
||||
'InvenTree.middleware.CustomAllauthTwoFactorMiddleware', # Flow control for allauth
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
'InvenTree.middleware.AuthRequiredMiddleware',
|
||||
'InvenTree.middleware.Check2FAMiddleware', # Check if the user should be forced to use MFA
|
||||
'maintenance_mode.middleware.MaintenanceModeMiddleware',
|
||||
'InvenTree.middleware.InvenTreeExceptionProcessor', # Error reporting
|
||||
])
|
||||
|
||||
AUTHENTICATION_BACKENDS = CONFIG.get('authentication_backends', [
|
||||
'django.contrib.auth.backends.RemoteUserBackend', # proxy login
|
||||
'django.contrib.auth.backends.ModelBackend',
|
||||
'allauth.account.auth_backends.AuthenticationBackend', # SSO login via external providers
|
||||
])
|
||||
|
||||
DEBUG_TOOLBAR_ENABLED = DEBUG and CONFIG.get('debug_toolbar', False)
|
||||
|
||||
# If the debug toolbar is enabled, add the modules
|
||||
if DEBUG_TOOLBAR_ENABLED: # pragma: no cover
|
||||
logger.info("Running with DEBUG_TOOLBAR enabled")
|
||||
INSTALLED_APPS.append('debug_toolbar')
|
||||
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
|
||||
|
||||
DEBUG_TOOLBAR_CONFIG = {
|
||||
'RESULTS_CACHE_SIZE': 100,
|
||||
'OBSERVE_REQUEST_CALLBACK': lambda x: False,
|
||||
}
|
||||
|
||||
# Internal IP addresses allowed to see the debug toolbar
|
||||
INTERNAL_IPS = [
|
||||
'127.0.0.1',
|
||||
]
|
||||
|
||||
# Internal flag to determine if we are running in docker mode
|
||||
DOCKER = get_boolean_setting('INVENTREE_DOCKER', default_value=False)
|
||||
|
||||
if DOCKER: # pragma: no cover
|
||||
# Internal IP addresses are different when running under docker
|
||||
hostname, ___, ips = socket.gethostbyname_ex(socket.gethostname())
|
||||
INTERNAL_IPS = [ip[: ip.rfind(".")] + ".1" for ip in ips] + ["127.0.0.1", "10.0.2.2"]
|
||||
|
||||
# Allow secure http developer server in debug mode
|
||||
if DEBUG:
|
||||
INSTALLED_APPS.append('sslserver')
|
||||
|
||||
# InvenTree URL configuration
|
||||
|
||||
# Base URL for admin pages (default="admin")
|
||||
INVENTREE_ADMIN_URL = get_setting(
|
||||
'INVENTREE_ADMIN_URL',
|
||||
config_key='admin_url',
|
||||
default_value='admin'
|
||||
)
|
||||
|
||||
ROOT_URLCONF = 'InvenTree.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [
|
||||
BASE_DIR.joinpath('templates'),
|
||||
# Allow templates in the reporting directory to be accessed
|
||||
MEDIA_ROOT.joinpath('report'),
|
||||
MEDIA_ROOT.joinpath('label'),
|
||||
],
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
'django.template.context_processors.i18n',
|
||||
'django.contrib.auth.context_processors.auth',
|
||||
'django.contrib.messages.context_processors.messages',
|
||||
# Custom InvenTree context processors
|
||||
'InvenTree.context.health_status',
|
||||
'InvenTree.context.status_codes',
|
||||
'InvenTree.context.user_roles',
|
||||
],
|
||||
'loaders': [(
|
||||
'django.template.loaders.cached.Loader', [
|
||||
'plugin.template.PluginTemplateLoader',
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader',
|
||||
])
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
if DEBUG_TOOLBAR_ENABLED: # pragma: no cover
|
||||
# Note that the APP_DIRS value must be set when using debug_toolbar
|
||||
# But this will kill template loading for plugins
|
||||
TEMPLATES[0]['APP_DIRS'] = True
|
||||
del TEMPLATES[0]['OPTIONS']['loaders']
|
||||
|
||||
REST_FRAMEWORK = {
|
||||
'EXCEPTION_HANDLER': 'InvenTree.exceptions.exception_handler',
|
||||
'DATETIME_FORMAT': '%Y-%m-%d %H:%M',
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
'rest_framework.authentication.BasicAuthentication',
|
||||
'rest_framework.authentication.SessionAuthentication',
|
||||
'rest_framework.authentication.TokenAuthentication',
|
||||
),
|
||||
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
|
||||
'DEFAULT_PERMISSION_CLASSES': (
|
||||
'rest_framework.permissions.IsAuthenticated',
|
||||
'rest_framework.permissions.DjangoModelPermissions',
|
||||
'InvenTree.permissions.RolePermission',
|
||||
),
|
||||
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema',
|
||||
'DEFAULT_METADATA_CLASS': 'InvenTree.metadata.InvenTreeMetadata',
|
||||
'DEFAULT_RENDERER_CLASSES': [
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
]
|
||||
}
|
||||
|
||||
if DEBUG:
|
||||
# Enable browsable API if in DEBUG mode
|
||||
REST_FRAMEWORK['DEFAULT_RENDERER_CLASSES'].append('rest_framework.renderers.BrowsableAPIRenderer')
|
||||
|
||||
WSGI_APPLICATION = 'InvenTree.wsgi.application'
|
||||
|
||||
"""
|
||||
Configure the database backend based on the user-specified values.
|
||||
|
||||
- Primarily this configuration happens in the config.yaml file
|
||||
- However there may be reason to configure the DB via environmental variables
|
||||
- The following code lets the user "mix and match" database configuration
|
||||
"""
|
||||
|
||||
logger.debug("Configuring database backend:")
|
||||
|
||||
# Extract database configuration from the config.yaml file
|
||||
db_config = CONFIG.get('database', {})
|
||||
|
||||
if not db_config:
|
||||
db_config = {}
|
||||
|
||||
# Environment variables take preference over config file!
|
||||
|
||||
db_keys = ['ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']
|
||||
|
||||
for key in db_keys:
|
||||
# First, check the environment variables
|
||||
env_key = f"INVENTREE_DB_{key}"
|
||||
env_var = os.environ.get(env_key, None)
|
||||
|
||||
if env_var:
|
||||
# Override configuration value
|
||||
db_config[key] = env_var
|
||||
|
||||
# Check that required database configuration options are specified
|
||||
reqiured_keys = ['ENGINE', 'NAME']
|
||||
|
||||
for key in reqiured_keys:
|
||||
if key not in db_config: # pragma: no cover
|
||||
error_msg = f'Missing required database configuration value {key}'
|
||||
logger.error(error_msg)
|
||||
|
||||
print('Error: ' + error_msg)
|
||||
sys.exit(-1)
|
||||
|
||||
"""
|
||||
Special considerations for the database 'ENGINE' setting.
|
||||
It can be specified in config.yaml (or envvar) as either (for example):
|
||||
- sqlite3
|
||||
- django.db.backends.sqlite3
|
||||
- django.db.backends.postgresql
|
||||
"""
|
||||
|
||||
db_engine = db_config['ENGINE'].lower()
|
||||
|
||||
# Correct common misspelling
|
||||
if db_engine == 'sqlite':
|
||||
db_engine = 'sqlite3' # pragma: no cover
|
||||
|
||||
if db_engine in ['sqlite3', 'postgresql', 'mysql']:
|
||||
# Prepend the required python module string
|
||||
db_engine = f'django.db.backends.{db_engine}'
|
||||
db_config['ENGINE'] = db_engine
|
||||
|
||||
db_name = db_config['NAME']
|
||||
db_host = db_config.get('HOST', "''")
|
||||
|
||||
if 'sqlite' in db_engine:
|
||||
db_name = str(Path(db_name).resolve())
|
||||
db_config['NAME'] = db_name
|
||||
|
||||
logger.info(f"DB_ENGINE: {db_engine}")
|
||||
logger.info(f"DB_NAME: {db_name}")
|
||||
logger.info(f"DB_HOST: {db_host}")
|
||||
|
||||
"""
|
||||
In addition to base-level database configuration, we may wish to specify specific options to the database backend
|
||||
Ref: https://docs.djangoproject.com/en/3.2/ref/settings/#std:setting-OPTIONS
|
||||
"""
|
||||
|
||||
# 'OPTIONS' or 'options' can be specified in config.yaml
|
||||
# Set useful sensible timeouts for a transactional webserver to communicate
|
||||
# with its database server, that is, if the webserver is having issues
|
||||
# connecting to the database server (such as a replica failover) don't sit and
|
||||
# wait for possibly an hour or more, just tell the client something went wrong
|
||||
# and let the client retry when they want to.
|
||||
db_options = db_config.get("OPTIONS", db_config.get("options", {}))
|
||||
|
||||
# Specific options for postgres backend
|
||||
if "postgres" in db_engine: # pragma: no cover
|
||||
from psycopg2.extensions import (ISOLATION_LEVEL_READ_COMMITTED,
|
||||
ISOLATION_LEVEL_SERIALIZABLE)
|
||||
|
||||
# Connection timeout
|
||||
if "connect_timeout" not in db_options:
|
||||
# The DB server is in the same data center, it should not take very
|
||||
# long to connect to the database server
|
||||
# # seconds, 2 is minium allowed by libpq
|
||||
db_options["connect_timeout"] = int(
|
||||
get_setting('INVENTREE_DB_TIMEOUT', 'database.timeout', 2)
|
||||
)
|
||||
|
||||
# Setup TCP keepalive
|
||||
# DB server is in the same DC, it should not become unresponsive for
|
||||
# very long. With the defaults below we wait 5 seconds for the network
|
||||
# issue to resolve itself. It it that doesn't happen whatever happened
|
||||
# is probably fatal and no amount of waiting is going to fix it.
|
||||
# # 0 - TCP Keepalives disabled; 1 - enabled
|
||||
if "keepalives" not in db_options:
|
||||
db_options["keepalives"] = int(
|
||||
get_setting('INVENTREE_DB_TCP_KEEPALIVES', 'database.tcp_keepalives', 1)
|
||||
)
|
||||
|
||||
# Seconds after connection is idle to send keep alive
|
||||
if "keepalives_idle" not in db_options:
|
||||
db_options["keepalives_idle"] = int(
|
||||
get_setting('INVENTREE_DB_TCP_KEEPALIVES_IDLE', 'database.tcp_keepalives_idle', 1)
|
||||
)
|
||||
|
||||
# Seconds after missing ACK to send another keep alive
|
||||
if "keepalives_interval" not in db_options:
|
||||
db_options["keepalives_interval"] = int(
|
||||
get_setting("INVENTREE_DB_TCP_KEEPALIVES_INTERVAL", "database.tcp_keepalives_internal", "1")
|
||||
)
|
||||
|
||||
# Number of missing ACKs before we close the connection
|
||||
if "keepalives_count" not in db_options:
|
||||
db_options["keepalives_count"] = int(
|
||||
get_setting("INVENTREE_DB_TCP_KEEPALIVES_COUNT", "database.tcp_keepalives_count", "5")
|
||||
)
|
||||
|
||||
# # Milliseconds for how long pending data should remain unacked
|
||||
# by the remote server
|
||||
# TODO: Supported starting in PSQL 11
|
||||
# "tcp_user_timeout": int(os.getenv("PGTCP_USER_TIMEOUT", "1000"),
|
||||
|
||||
# Postgres's default isolation level is Read Committed which is
|
||||
# normally fine, but most developers think the database server is
|
||||
# actually going to do Serializable type checks on the queries to
|
||||
# protect against simultaneous changes.
|
||||
# https://www.postgresql.org/docs/devel/transaction-iso.html
|
||||
# https://docs.djangoproject.com/en/3.2/ref/databases/#isolation-level
|
||||
if "isolation_level" not in db_options:
|
||||
serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False)
|
||||
db_options["isolation_level"] = ISOLATION_LEVEL_SERIALIZABLE if serializable else ISOLATION_LEVEL_READ_COMMITTED
|
||||
|
||||
# Specific options for MySql / MariaDB backend
|
||||
elif "mysql" in db_engine: # pragma: no cover
|
||||
# TODO TCP time outs and keepalives
|
||||
|
||||
# MariaDB's default isolation level is Repeatable Read which is
|
||||
# normally fine, but most developers think the database server is
|
||||
# actually going to Serializable type checks on the queries to
|
||||
# protect against siumltaneous changes.
|
||||
# https://mariadb.com/kb/en/mariadb-transactions-and-isolation-levels-for-sql-server-users/#changing-the-isolation-level
|
||||
# https://docs.djangoproject.com/en/3.2/ref/databases/#mysql-isolation-level
|
||||
if "isolation_level" not in db_options:
|
||||
serializable = get_boolean_setting('INVENTREE_DB_ISOLATION_SERIALIZABLE', 'database.serializable', False)
|
||||
db_options["isolation_level"] = "serializable" if serializable else "read committed"
|
||||
|
||||
# Specific options for sqlite backend
|
||||
elif "sqlite" in db_engine:
|
||||
# TODO: Verify timeouts are not an issue because no network is involved for SQLite
|
||||
|
||||
# SQLite's default isolation level is Serializable due to SQLite's
|
||||
# single writer implementation. Presumably as a result of this, it is
|
||||
# not possible to implement any lower isolation levels in SQLite.
|
||||
# https://www.sqlite.org/isolation.html
|
||||
pass
|
||||
|
||||
# Provide OPTIONS dict back to the database configuration dict
|
||||
db_config['OPTIONS'] = db_options
|
||||
|
||||
# Set testing options for the database
|
||||
db_config['TEST'] = {
|
||||
'CHARSET': 'utf8',
|
||||
}
|
||||
|
||||
# Set collation option for mysql test database
|
||||
if 'mysql' in db_engine:
|
||||
db_config['TEST']['COLLATION'] = 'utf8_general_ci' # pragma: no cover
|
||||
|
||||
DATABASES = {
|
||||
'default': db_config
|
||||
}
|
||||
|
||||
# Cache configuration
|
||||
cache_host = get_setting('INVENTREE_CACHE_HOST', 'cache.host', None)
|
||||
cache_port = get_setting('INVENTREE_CACHE_PORT', 'cache.port', '6379')
|
||||
|
||||
if cache_host: # pragma: no cover
|
||||
# We are going to rely upon a possibly non-localhost for our cache,
|
||||
# so don't wait too long for the cache as nothing in the cache should be
|
||||
# irreplacable.
|
||||
_cache_options = {
|
||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||
"SOCKET_CONNECT_TIMEOUT": int(os.getenv("CACHE_CONNECT_TIMEOUT", "2")),
|
||||
"SOCKET_TIMEOUT": int(os.getenv("CACHE_SOCKET_TIMEOUT", "2")),
|
||||
"CONNECTION_POOL_KWARGS": {
|
||||
"socket_keepalive": config.is_true(
|
||||
os.getenv("CACHE_TCP_KEEPALIVE", "1")
|
||||
),
|
||||
"socket_keepalive_options": {
|
||||
socket.TCP_KEEPCNT: int(
|
||||
os.getenv("CACHE_KEEPALIVES_COUNT", "5")
|
||||
),
|
||||
socket.TCP_KEEPIDLE: int(
|
||||
os.getenv("CACHE_KEEPALIVES_IDLE", "1")
|
||||
),
|
||||
socket.TCP_KEEPINTVL: int(
|
||||
os.getenv("CACHE_KEEPALIVES_INTERVAL", "1")
|
||||
),
|
||||
socket.TCP_USER_TIMEOUT: int(
|
||||
os.getenv("CACHE_TCP_USER_TIMEOUT", "1000")
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": f"redis://{cache_host}:{cache_port}/0",
|
||||
"OPTIONS": _cache_options,
|
||||
},
|
||||
}
|
||||
else:
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
},
|
||||
}
|
||||
|
||||
# django-q background worker configuration
|
||||
Q_CLUSTER = {
|
||||
'name': 'InvenTree',
|
||||
'workers': int(get_setting('INVENTREE_BACKGROUND_WORKERS', 'background.workers', 4)),
|
||||
'timeout': int(get_setting('INVENTREE_BACKGROUND_TIMEOUT', 'background.timeout', 90)),
|
||||
'retry': 120,
|
||||
'queue_limit': 50,
|
||||
'bulk': 10,
|
||||
'orm': 'default',
|
||||
'sync': False,
|
||||
}
|
||||
|
||||
if cache_host: # pragma: no cover
|
||||
# If using external redis cache, make the cache the broker for Django Q
|
||||
# as well
|
||||
Q_CLUSTER["django_redis"] = "worker"
|
||||
|
||||
# database user sessions
|
||||
SESSION_ENGINE = 'user_sessions.backends.db'
|
||||
LOGOUT_REDIRECT_URL = 'index'
|
||||
SILENCED_SYSTEM_CHECKS = [
|
||||
'admin.E410',
|
||||
]
|
||||
|
||||
# Password validation
|
||||
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
|
||||
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
|
||||
},
|
||||
{
|
||||
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
|
||||
},
|
||||
]
|
||||
|
||||
# Extra (optional) URL validators
|
||||
# See https://docs.djangoproject.com/en/2.2/ref/validators/#django.core.validators.URLValidator
|
||||
|
||||
EXTRA_URL_SCHEMES = CONFIG.get('extra_url_schemes', [])
|
||||
|
||||
if type(EXTRA_URL_SCHEMES) not in [list]: # pragma: no cover
|
||||
logger.warning("extra_url_schemes not correctly formatted")
|
||||
EXTRA_URL_SCHEMES = []
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/dev/topics/i18n/
|
||||
LANGUAGE_CODE = get_setting('INVENTREE_LANGUAGE', 'language', 'en-us')
|
||||
|
||||
# If a new language translation is supported, it must be added here
|
||||
LANGUAGES = [
|
||||
('cs', _('Czech')),
|
||||
('de', _('German')),
|
||||
('el', _('Greek')),
|
||||
('en', _('English')),
|
||||
('es', _('Spanish')),
|
||||
('es-mx', _('Spanish (Mexican)')),
|
||||
('fa', _('Farsi / Persian')),
|
||||
('fr', _('French')),
|
||||
('he', _('Hebrew')),
|
||||
('hu', _('Hungarian')),
|
||||
('it', _('Italian')),
|
||||
('ja', _('Japanese')),
|
||||
('ko', _('Korean')),
|
||||
('nl', _('Dutch')),
|
||||
('no', _('Norwegian')),
|
||||
('pl', _('Polish')),
|
||||
('pt', _('Portuguese')),
|
||||
('pt-BR', _('Portuguese (Brazilian)')),
|
||||
('ru', _('Russian')),
|
||||
('sv', _('Swedish')),
|
||||
('th', _('Thai')),
|
||||
('tr', _('Turkish')),
|
||||
('vi', _('Vietnamese')),
|
||||
('zh-cn', _('Chinese')),
|
||||
]
|
||||
|
||||
# Testing interface translations
|
||||
if get_boolean_setting('TEST_TRANSLATIONS', default_value=False): # pragma: no cover
|
||||
# Set default language
|
||||
LANGUAGE_CODE = 'xx'
|
||||
|
||||
# Add to language catalog
|
||||
LANGUAGES.append(('xx', 'Test'))
|
||||
|
||||
# Add custom languages not provided by Django
|
||||
EXTRA_LANG_INFO = {
|
||||
'xx': {
|
||||
'code': 'xx',
|
||||
'name': 'Test',
|
||||
'name_local': 'Test'
|
||||
},
|
||||
}
|
||||
LANG_INFO = dict(django.conf.locale.LANG_INFO, **EXTRA_LANG_INFO)
|
||||
django.conf.locale.LANG_INFO = LANG_INFO
|
||||
|
||||
# Currencies available for use
|
||||
CURRENCIES = CONFIG.get(
|
||||
'currencies',
|
||||
[
|
||||
'AUD', 'CAD', 'CNY', 'EUR', 'GBP', 'JPY', 'NZD', 'USD',
|
||||
],
|
||||
)
|
||||
|
||||
# Check that each provided currency is supported
|
||||
for currency in CURRENCIES:
|
||||
if currency not in moneyed.CURRENCIES: # pragma: no cover
|
||||
print(f"Currency code '{currency}' is not supported")
|
||||
sys.exit(1)
|
||||
|
||||
# Custom currency exchange backend
|
||||
EXCHANGE_BACKEND = 'InvenTree.exchange.InvenTreeExchange'
|
||||
|
||||
# Email configuration options
|
||||
EMAIL_BACKEND = get_setting('INVENTREE_EMAIL_BACKEND', 'email.backend', 'django.core.mail.backends.smtp.EmailBackend')
|
||||
EMAIL_HOST = get_setting('INVENTREE_EMAIL_HOST', 'email.host', '')
|
||||
EMAIL_PORT = int(get_setting('INVENTREE_EMAIL_PORT', 'email.port', 25))
|
||||
EMAIL_HOST_USER = get_setting('INVENTREE_EMAIL_USERNAME', 'email.username', '')
|
||||
EMAIL_HOST_PASSWORD = get_setting('INVENTREE_EMAIL_PASSWORD', 'email.password', '')
|
||||
EMAIL_SUBJECT_PREFIX = get_setting('INVENTREE_EMAIL_PREFIX', 'email.prefix', '[InvenTree] ')
|
||||
EMAIL_USE_TLS = get_boolean_setting('INVENTREE_EMAIL_TLS', 'email.tls', False)
|
||||
EMAIL_USE_SSL = get_boolean_setting('INVENTREE_EMAIL_SSL', 'email.ssl', False)
|
||||
|
||||
DEFAULT_FROM_EMAIL = get_setting('INVENTREE_EMAIL_SENDER', 'email.sender', '')
|
||||
|
||||
EMAIL_USE_LOCALTIME = False
|
||||
EMAIL_TIMEOUT = 60
|
||||
|
||||
LOCALE_PATHS = (
|
||||
BASE_DIR.joinpath('locale/'),
|
||||
)
|
||||
|
||||
TIME_ZONE = get_setting('INVENTREE_TIMEZONE', 'timezone', 'UTC')
|
||||
|
||||
USE_I18N = True
|
||||
|
||||
USE_L10N = True
|
||||
|
||||
# Do not use native timezone support in "test" mode
|
||||
# It generates a *lot* of cruft in the logs
|
||||
if not TESTING:
|
||||
USE_TZ = True # pragma: no cover
|
||||
|
||||
DATE_INPUT_FORMATS = [
|
||||
"%Y-%m-%d",
|
||||
]
|
||||
|
||||
# crispy forms use the bootstrap templates
|
||||
CRISPY_TEMPLATE_PACK = 'bootstrap4'
|
||||
|
||||
# Use database transactions when importing / exporting data
|
||||
IMPORT_EXPORT_USE_TRANSACTIONS = True
|
||||
|
||||
SITE_ID = 1
|
||||
|
||||
# Load the allauth social backends
|
||||
SOCIAL_BACKENDS = CONFIG.get('social_backends', [])
|
||||
for app in SOCIAL_BACKENDS:
|
||||
INSTALLED_APPS.append(app) # pragma: no cover
|
||||
|
||||
SOCIALACCOUNT_PROVIDERS = CONFIG.get('social_providers', [])
|
||||
|
||||
SOCIALACCOUNT_STORE_TOKENS = True
|
||||
|
||||
# settings for allauth
|
||||
ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = get_setting('INVENTREE_LOGIN_CONFIRM_DAYS', 'login_confirm_days', 3)
|
||||
ACCOUNT_LOGIN_ATTEMPTS_LIMIT = get_setting('INVENTREE_LOGIN_ATTEMPTS', 'login_attempts', 5)
|
||||
ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True
|
||||
ACCOUNT_PREVENT_ENUMERATION = True
|
||||
|
||||
# override forms / adapters
|
||||
ACCOUNT_FORMS = {
|
||||
'login': 'allauth.account.forms.LoginForm',
|
||||
'signup': 'InvenTree.forms.CustomSignupForm',
|
||||
'add_email': 'allauth.account.forms.AddEmailForm',
|
||||
'change_password': 'allauth.account.forms.ChangePasswordForm',
|
||||
'set_password': 'allauth.account.forms.SetPasswordForm',
|
||||
'reset_password': 'allauth.account.forms.ResetPasswordForm',
|
||||
'reset_password_from_key': 'allauth.account.forms.ResetPasswordKeyForm',
|
||||
'disconnect': 'allauth.socialaccount.forms.DisconnectForm',
|
||||
}
|
||||
|
||||
SOCIALACCOUNT_ADAPTER = 'InvenTree.forms.CustomSocialAccountAdapter'
|
||||
ACCOUNT_ADAPTER = 'InvenTree.forms.CustomAccountAdapter'
|
||||
|
||||
# login settings
|
||||
REMOTE_LOGIN = get_boolean_setting('INVENTREE_REMOTE_LOGIN', 'remote_login_enabled', False)
|
||||
REMOTE_LOGIN_HEADER = get_setting('INVENTREE_REMOTE_LOGIN_HEADER', 'remote_login_header', 'REMOTE_USER')
|
||||
|
||||
# Markdownify configuration
|
||||
# Ref: https://django-markdownify.readthedocs.io/en/latest/settings.html
|
||||
|
||||
MARKDOWNIFY = {
|
||||
'default': {
|
||||
'BLEACH': True,
|
||||
'WHITELIST_ATTRS': [
|
||||
'href',
|
||||
'src',
|
||||
'alt',
|
||||
],
|
||||
'WHITELIST_TAGS': [
|
||||
'a',
|
||||
'abbr',
|
||||
'b',
|
||||
'blockquote',
|
||||
'em',
|
||||
'h1', 'h2', 'h3',
|
||||
'i',
|
||||
'img',
|
||||
'li',
|
||||
'ol',
|
||||
'p',
|
||||
'strong',
|
||||
'ul'
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
# sentry.io integration for error reporting
|
||||
SENTRY_ENABLED = get_boolean_setting('INVENTREE_SENTRY_ENABLED', 'sentry_enabled', False)
|
||||
# Default Sentry DSN (can be overriden if user wants custom sentry integration)
|
||||
INVENTREE_DSN = 'https://3928ccdba1d34895abde28031fd00100@o378676.ingest.sentry.io/6494600'
|
||||
SENTRY_DSN = get_setting('INVENTREE_SENTRY_DSN', 'sentry_dsn', INVENTREE_DSN)
|
||||
SENTRY_SAMPLE_RATE = float(get_setting('INVENTREE_SENTRY_SAMPLE_RATE', 'sentry_sample_rate', 0.1))
|
||||
|
||||
if SENTRY_ENABLED and SENTRY_DSN: # pragma: no cover
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
integrations=[DjangoIntegration(), ],
|
||||
traces_sample_rate=1.0 if DEBUG else SENTRY_SAMPLE_RATE,
|
||||
send_default_pii=True
|
||||
)
|
||||
inventree_tags = {
|
||||
'testing': TESTING,
|
||||
'docker': DOCKER,
|
||||
'debug': DEBUG,
|
||||
'remote': REMOTE_LOGIN,
|
||||
}
|
||||
for key, val in inventree_tags.items():
|
||||
sentry_sdk.set_tag(f'inventree_{key}', val)
|
||||
|
||||
# In-database error logging
|
||||
IGNORED_ERRORS = [
|
||||
Http404
|
||||
]
|
||||
|
||||
# Maintenance mode
|
||||
MAINTENANCE_MODE_RETRY_AFTER = 60
|
||||
MAINTENANCE_MODE_STATE_BACKEND = 'maintenance_mode.backends.DefaultStorageBackend'
|
||||
|
||||
# Are plugins enabled?
|
||||
PLUGINS_ENABLED = get_boolean_setting('INVENTREE_PLUGINS_ENABLED', 'plugins_enabled', False)
|
||||
|
||||
PLUGIN_FILE = config.get_plugin_file()
|
||||
|
||||
# Plugin test settings
|
||||
PLUGIN_TESTING = CONFIG.get('PLUGIN_TESTING', TESTING) # are plugins beeing tested?
|
||||
PLUGIN_TESTING_SETUP = CONFIG.get('PLUGIN_TESTING_SETUP', False) # load plugins from setup hooks in testing?
|
||||
PLUGIN_TESTING_EVENTS = False # Flag if events are tested right now
|
||||
PLUGIN_RETRY = CONFIG.get('PLUGIN_RETRY', 5) # how often should plugin loading be tried?
|
||||
PLUGIN_FILE_CHECKED = False # Was the plugin file checked?
|
||||
|
||||
# User interface customization values
|
||||
CUSTOMIZE = get_setting('INVENTREE_CUSTOMIZE', 'customize', {})
|
||||
|
||||
CUSTOM_LOGO = get_setting('INVENTREE_CUSTOM_LOGO', 'customize.logo', None)
|
||||
|
||||
"""
|
||||
Check for the existence of a 'custom logo' file:
|
||||
- Check the 'static' directory
|
||||
- Check the 'media' directory (legacy)
|
||||
"""
|
||||
|
||||
if CUSTOM_LOGO:
|
||||
static_storage = StaticFilesStorage()
|
||||
|
||||
if static_storage.exists(CUSTOM_LOGO):
|
||||
logger.info(f"Loading custom logo from static directory: {CUSTOM_LOGO}")
|
||||
elif default_storage.exists(CUSTOM_LOGO):
|
||||
logger.info(f"Loading custom logo from media directory: {CUSTOM_LOGO}")
|
||||
else:
|
||||
logger.warning(f"The custom logo file '{CUSTOM_LOGO}' could not be found in the static or media directories")
|
||||
CUSTOM_LOGO = False
|
||||
|
||||
if DEBUG:
|
||||
logger.info("InvenTree running with DEBUG enabled")
|
||||
|
||||
logger.debug(f"MEDIA_ROOT: '{MEDIA_ROOT}'")
|
||||
logger.debug(f"STATIC_ROOT: '{STATIC_ROOT}'")
|
||||
371
InvenTree/InvenTree/static/bootstrap-table/bootstrap-table.css
vendored
Normal file
371
InvenTree/InvenTree/static/bootstrap-table/bootstrap-table.css
vendored
Normal file
@@ -0,0 +1,371 @@
|
||||
/**
|
||||
* @author zhixin wen <wenzhixin2010@gmail.com>
|
||||
* version: 1.18.3
|
||||
* https://github.com/wenzhixin/bootstrap-table/
|
||||
*/
|
||||
.bootstrap-table .fixed-table-toolbar::after {
|
||||
content: "";
|
||||
display: block;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .bs-bars,
|
||||
.bootstrap-table .fixed-table-toolbar .search,
|
||||
.bootstrap-table .fixed-table-toolbar .columns {
|
||||
position: relative;
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .btn-group > .btn-group {
|
||||
display: inline-block;
|
||||
margin-left: -1px !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .btn-group > .btn-group > .btn {
|
||||
border-radius: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .btn-group > .btn-group:first-child > .btn {
|
||||
border-top-left-radius: 4px;
|
||||
border-bottom-left-radius: 4px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .btn-group > .btn-group:last-child > .btn {
|
||||
border-top-right-radius: 4px;
|
||||
border-bottom-right-radius: 4px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns .dropdown-menu {
|
||||
text-align: left;
|
||||
max-height: 300px;
|
||||
overflow: auto;
|
||||
-ms-overflow-style: scrollbar;
|
||||
z-index: 1001;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns label {
|
||||
display: block;
|
||||
padding: 3px 20px;
|
||||
clear: both;
|
||||
font-weight: normal;
|
||||
line-height: 1.428571429;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns-left {
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .columns-right {
|
||||
margin-left: 5px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-toolbar .pull-right .dropdown-menu {
|
||||
right: 0;
|
||||
left: auto;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container {
|
||||
position: relative;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table {
|
||||
width: 100%;
|
||||
margin-bottom: 0 !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table th,
|
||||
.bootstrap-table .fixed-table-container .table td {
|
||||
vertical-align: middle;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th {
|
||||
vertical-align: bottom;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th:focus {
|
||||
outline: 0 solid transparent;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th.detail {
|
||||
width: 30px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .th-inner {
|
||||
padding: 0.75rem;
|
||||
vertical-align: bottom;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .sortable {
|
||||
cursor: pointer;
|
||||
background-position: right;
|
||||
background-repeat: no-repeat;
|
||||
padding-right: 30px !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .both {
|
||||
background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAQAAADYWf5HAAAAkElEQVQoz7X QMQ5AQBCF4dWQSJxC5wwax1Cq1e7BAdxD5SL+Tq/QCM1oNiJidwox0355mXnG/DrEtIQ6azioNZQxI0ykPhTQIwhCR+BmBYtlK7kLJYwWCcJA9M4qdrZrd8pPjZWPtOqdRQy320YSV17OatFC4euts6z39GYMKRPCTKY9UnPQ6P+GtMRfGtPnBCiqhAeJPmkqAAAAAElFTkSuQmCC");
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .asc {
|
||||
background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAYAAAByUDbMAAAAZ0lEQVQ4y2NgGLKgquEuFxBPAGI2ahhWCsS/gDibUoO0gPgxEP8H4ttArEyuQYxAPBdqEAxPBImTY5gjEL9DM+wTENuQahAvEO9DMwiGdwAxOymGJQLxTyD+jgWDxCMZRsEoGAVoAADeemwtPcZI2wAAAABJRU5ErkJggg==");
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table thead th .desc {
|
||||
background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAATCAYAAAByUDbMAAAAZUlEQVQ4y2NgGAWjYBSggaqGu5FA/BOIv2PBIPFEUgxjB+IdQPwfC94HxLykus4GiD+hGfQOiB3J8SojEE9EM2wuSJzcsFMG4ttQgx4DsRalkZENxL+AuJQaMcsGxBOAmGvopk8AVz1sLZgg0bsAAAAASUVORK5CYII= ");
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr.selected td {
|
||||
background-color: rgba(0, 0, 0, 0.075);
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr.no-records-found td {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr .card-view {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr .card-view .card-view-title {
|
||||
font-weight: bold;
|
||||
display: inline-block;
|
||||
min-width: 30%;
|
||||
width: auto !important;
|
||||
text-align: left !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table tbody tr .card-view .card-view-value {
|
||||
width: 100% !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table .bs-checkbox {
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table .bs-checkbox label {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table .bs-checkbox label input[type="radio"],
|
||||
.bootstrap-table .fixed-table-container .table .bs-checkbox label input[type="checkbox"] {
|
||||
margin: 0 auto !important;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .table.table-sm .th-inner {
|
||||
padding: 0.3rem;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height:not(.has-footer) {
|
||||
border-bottom: 1px solid #dee2e6;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height.has-card-view {
|
||||
border-top: 1px solid #dee2e6;
|
||||
border-bottom: 1px solid #dee2e6;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height .fixed-table-border {
|
||||
border-left: 1px solid #dee2e6;
|
||||
border-right: 1px solid #dee2e6;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height .table thead th {
|
||||
border-bottom: 1px solid #dee2e6;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container.fixed-height .table-dark thead th {
|
||||
border-bottom: 1px solid #32383e;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-header {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body {
|
||||
overflow-x: auto;
|
||||
overflow-y: auto;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading {
|
||||
align-items: center;
|
||||
background: #fff;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
z-index: 1000;
|
||||
transition: visibility 0s, opacity 0.15s ease-in-out;
|
||||
opacity: 0;
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.open {
|
||||
visibility: visible;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap {
|
||||
align-items: baseline;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .loading-text {
|
||||
margin-right: 6px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-wrap {
|
||||
align-items: center;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-dot,
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-wrap::after,
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-wrap::before {
|
||||
content: "";
|
||||
animation-duration: 1.5s;
|
||||
animation-iteration-count: infinite;
|
||||
animation-name: LOADING;
|
||||
background: #212529;
|
||||
border-radius: 50%;
|
||||
display: block;
|
||||
height: 5px;
|
||||
margin: 0 4px;
|
||||
opacity: 0;
|
||||
width: 5px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-dot {
|
||||
animation-delay: 0.3s;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading .loading-wrap .animation-wrap::after {
|
||||
animation-delay: 0.6s;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.table-dark {
|
||||
background: #212529;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.table-dark .animation-dot,
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.table-dark .animation-wrap::after,
|
||||
.bootstrap-table .fixed-table-container .fixed-table-body .fixed-table-loading.table-dark .animation-wrap::before {
|
||||
background: #fff;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-container .fixed-table-footer {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination::after {
|
||||
content: "";
|
||||
display: block;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail,
|
||||
.bootstrap-table .fixed-table-pagination > .pagination {
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail .pagination-info {
|
||||
line-height: 34px;
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail .page-list {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail .page-list .btn-group {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination-detail .page-list .btn-group .dropdown-menu {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination li.page-intermediate a {
|
||||
color: #c8c8c8;
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination li.page-intermediate a::before {
|
||||
content: '\2B05';
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination li.page-intermediate a::after {
|
||||
content: '\27A1';
|
||||
}
|
||||
|
||||
.bootstrap-table .fixed-table-pagination > .pagination ul.pagination li.disabled a {
|
||||
pointer-events: none;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
.bootstrap-table.fullscreen {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
z-index: 1050;
|
||||
width: 100% !important;
|
||||
background: #fff;
|
||||
height: calc(100vh);
|
||||
overflow-y: scroll;
|
||||
}
|
||||
|
||||
.bootstrap-table.bootstrap4 .pagination-lg .page-link, .bootstrap-table.bootstrap5 .pagination-lg .page-link {
|
||||
padding: .5rem 1rem;
|
||||
}
|
||||
|
||||
.bootstrap-table.bootstrap5 .float-left {
|
||||
float: left;
|
||||
}
|
||||
|
||||
.bootstrap-table.bootstrap5 .float-right {
|
||||
float: right;
|
||||
}
|
||||
|
||||
/* calculate scrollbar width */
|
||||
div.fixed-table-scroll-inner {
|
||||
width: 100%;
|
||||
height: 200px;
|
||||
}
|
||||
|
||||
div.fixed-table-scroll-outer {
|
||||
top: 0;
|
||||
left: 0;
|
||||
visibility: hidden;
|
||||
width: 200px;
|
||||
height: 150px;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
@keyframes LOADING {
|
||||
0% {
|
||||
opacity: 0;
|
||||
}
|
||||
50% {
|
||||
opacity: 1;
|
||||
}
|
||||
to {
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
7409
InvenTree/InvenTree/static/bootstrap-table/bootstrap-table.js
vendored
Normal file
7409
InvenTree/InvenTree/static/bootstrap-table/bootstrap-table.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
10
InvenTree/InvenTree/static/bootstrap-table/bootstrap-table.min.css
vendored
Normal file
10
InvenTree/InvenTree/static/bootstrap-table/bootstrap-table.min.css
vendored
Normal file
File diff suppressed because one or more lines are too long
10
InvenTree/InvenTree/static/bootstrap-table/bootstrap-table.min.js
vendored
Normal file
10
InvenTree/InvenTree/static/bootstrap-table/bootstrap-table.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1779
InvenTree/InvenTree/static/bootstrap-table/extensions/addrbar/bootstrap-table-addrbar.js
vendored
Normal file
1779
InvenTree/InvenTree/static/bootstrap-table/extensions/addrbar/bootstrap-table-addrbar.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
10
InvenTree/InvenTree/static/bootstrap-table/extensions/addrbar/bootstrap-table-addrbar.min.js
vendored
Normal file
10
InvenTree/InvenTree/static/bootstrap-table/extensions/addrbar/bootstrap-table-addrbar.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1211
InvenTree/InvenTree/static/bootstrap-table/extensions/auto-refresh/bootstrap-table-auto-refresh.js
vendored
Normal file
1211
InvenTree/InvenTree/static/bootstrap-table/extensions/auto-refresh/bootstrap-table-auto-refresh.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
2546
InvenTree/InvenTree/static/bootstrap-table/extensions/cookie/bootstrap-table-cookie.js
vendored
Normal file
2546
InvenTree/InvenTree/static/bootstrap-table/extensions/cookie/bootstrap-table-cookie.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
10
InvenTree/InvenTree/static/bootstrap-table/extensions/cookie/bootstrap-table-cookie.min.js
vendored
Normal file
10
InvenTree/InvenTree/static/bootstrap-table/extensions/cookie/bootstrap-table-cookie.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1237
InvenTree/InvenTree/static/bootstrap-table/extensions/copy-rows/bootstrap-table-copy-rows.js
vendored
Normal file
1237
InvenTree/InvenTree/static/bootstrap-table/extensions/copy-rows/bootstrap-table-copy-rows.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
10
InvenTree/InvenTree/static/bootstrap-table/extensions/copy-rows/bootstrap-table-copy-rows.min.js
vendored
Normal file
10
InvenTree/InvenTree/static/bootstrap-table/extensions/copy-rows/bootstrap-table-copy-rows.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1256
InvenTree/InvenTree/static/bootstrap-table/extensions/custom-view/bootstrap-table-custom-view.js
vendored
Normal file
1256
InvenTree/InvenTree/static/bootstrap-table/extensions/custom-view/bootstrap-table-custom-view.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
869
InvenTree/InvenTree/static/bootstrap-table/extensions/defer-url/bootstrap-table-defer-url.js
vendored
Normal file
869
InvenTree/InvenTree/static/bootstrap-table/extensions/defer-url/bootstrap-table-defer-url.js
vendored
Normal file
@@ -0,0 +1,869 @@
|
||||
(function (global, factory) {
|
||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('jquery')) :
|
||||
typeof define === 'function' && define.amd ? define(['jquery'], factory) :
|
||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.jQuery));
|
||||
}(this, (function ($) { 'use strict';
|
||||
|
||||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
||||
|
||||
var $__default = /*#__PURE__*/_interopDefaultLegacy($);
|
||||
|
||||
function _classCallCheck(instance, Constructor) {
|
||||
if (!(instance instanceof Constructor)) {
|
||||
throw new TypeError("Cannot call a class as a function");
|
||||
}
|
||||
}
|
||||
|
||||
function _defineProperties(target, props) {
|
||||
for (var i = 0; i < props.length; i++) {
|
||||
var descriptor = props[i];
|
||||
descriptor.enumerable = descriptor.enumerable || false;
|
||||
descriptor.configurable = true;
|
||||
if ("value" in descriptor) descriptor.writable = true;
|
||||
Object.defineProperty(target, descriptor.key, descriptor);
|
||||
}
|
||||
}
|
||||
|
||||
function _createClass(Constructor, protoProps, staticProps) {
|
||||
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
|
||||
if (staticProps) _defineProperties(Constructor, staticProps);
|
||||
return Constructor;
|
||||
}
|
||||
|
||||
function _inherits(subClass, superClass) {
|
||||
if (typeof superClass !== "function" && superClass !== null) {
|
||||
throw new TypeError("Super expression must either be null or a function");
|
||||
}
|
||||
|
||||
subClass.prototype = Object.create(superClass && superClass.prototype, {
|
||||
constructor: {
|
||||
value: subClass,
|
||||
writable: true,
|
||||
configurable: true
|
||||
}
|
||||
});
|
||||
if (superClass) _setPrototypeOf(subClass, superClass);
|
||||
}
|
||||
|
||||
function _getPrototypeOf(o) {
|
||||
_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
|
||||
return o.__proto__ || Object.getPrototypeOf(o);
|
||||
};
|
||||
return _getPrototypeOf(o);
|
||||
}
|
||||
|
||||
function _setPrototypeOf(o, p) {
|
||||
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
|
||||
o.__proto__ = p;
|
||||
return o;
|
||||
};
|
||||
|
||||
return _setPrototypeOf(o, p);
|
||||
}
|
||||
|
||||
function _isNativeReflectConstruct() {
|
||||
if (typeof Reflect === "undefined" || !Reflect.construct) return false;
|
||||
if (Reflect.construct.sham) return false;
|
||||
if (typeof Proxy === "function") return true;
|
||||
|
||||
try {
|
||||
Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {}));
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function _assertThisInitialized(self) {
|
||||
if (self === void 0) {
|
||||
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
function _possibleConstructorReturn(self, call) {
|
||||
if (call && (typeof call === "object" || typeof call === "function")) {
|
||||
return call;
|
||||
}
|
||||
|
||||
return _assertThisInitialized(self);
|
||||
}
|
||||
|
||||
function _createSuper(Derived) {
|
||||
var hasNativeReflectConstruct = _isNativeReflectConstruct();
|
||||
|
||||
return function _createSuperInternal() {
|
||||
var Super = _getPrototypeOf(Derived),
|
||||
result;
|
||||
|
||||
if (hasNativeReflectConstruct) {
|
||||
var NewTarget = _getPrototypeOf(this).constructor;
|
||||
|
||||
result = Reflect.construct(Super, arguments, NewTarget);
|
||||
} else {
|
||||
result = Super.apply(this, arguments);
|
||||
}
|
||||
|
||||
return _possibleConstructorReturn(this, result);
|
||||
};
|
||||
}
|
||||
|
||||
function _superPropBase(object, property) {
|
||||
while (!Object.prototype.hasOwnProperty.call(object, property)) {
|
||||
object = _getPrototypeOf(object);
|
||||
if (object === null) break;
|
||||
}
|
||||
|
||||
return object;
|
||||
}
|
||||
|
||||
function _get(target, property, receiver) {
|
||||
if (typeof Reflect !== "undefined" && Reflect.get) {
|
||||
_get = Reflect.get;
|
||||
} else {
|
||||
_get = function _get(target, property, receiver) {
|
||||
var base = _superPropBase(target, property);
|
||||
|
||||
if (!base) return;
|
||||
var desc = Object.getOwnPropertyDescriptor(base, property);
|
||||
|
||||
if (desc.get) {
|
||||
return desc.get.call(receiver);
|
||||
}
|
||||
|
||||
return desc.value;
|
||||
};
|
||||
}
|
||||
|
||||
return _get(target, property, receiver || target);
|
||||
}
|
||||
|
||||
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
||||
|
||||
function createCommonjsModule(fn, module) {
|
||||
return module = { exports: {} }, fn(module, module.exports), module.exports;
|
||||
}
|
||||
|
||||
var check = function (it) {
|
||||
return it && it.Math == Math && it;
|
||||
};
|
||||
|
||||
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
|
||||
var global_1 =
|
||||
/* global globalThis -- safe */
|
||||
check(typeof globalThis == 'object' && globalThis) ||
|
||||
check(typeof window == 'object' && window) ||
|
||||
check(typeof self == 'object' && self) ||
|
||||
check(typeof commonjsGlobal == 'object' && commonjsGlobal) ||
|
||||
// eslint-disable-next-line no-new-func -- fallback
|
||||
(function () { return this; })() || Function('return this')();
|
||||
|
||||
var fails = function (exec) {
|
||||
try {
|
||||
return !!exec();
|
||||
} catch (error) {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
// Detect IE8's incomplete defineProperty implementation
|
||||
var descriptors = !fails(function () {
|
||||
return Object.defineProperty({}, 1, { get: function () { return 7; } })[1] != 7;
|
||||
});
|
||||
|
||||
var nativePropertyIsEnumerable = {}.propertyIsEnumerable;
|
||||
var getOwnPropertyDescriptor$1 = Object.getOwnPropertyDescriptor;
|
||||
|
||||
// Nashorn ~ JDK8 bug
|
||||
var NASHORN_BUG = getOwnPropertyDescriptor$1 && !nativePropertyIsEnumerable.call({ 1: 2 }, 1);
|
||||
|
||||
// `Object.prototype.propertyIsEnumerable` method implementation
|
||||
// https://tc39.es/ecma262/#sec-object.prototype.propertyisenumerable
|
||||
var f$4 = NASHORN_BUG ? function propertyIsEnumerable(V) {
|
||||
var descriptor = getOwnPropertyDescriptor$1(this, V);
|
||||
return !!descriptor && descriptor.enumerable;
|
||||
} : nativePropertyIsEnumerable;
|
||||
|
||||
var objectPropertyIsEnumerable = {
|
||||
f: f$4
|
||||
};
|
||||
|
||||
var createPropertyDescriptor = function (bitmap, value) {
|
||||
return {
|
||||
enumerable: !(bitmap & 1),
|
||||
configurable: !(bitmap & 2),
|
||||
writable: !(bitmap & 4),
|
||||
value: value
|
||||
};
|
||||
};
|
||||
|
||||
var toString = {}.toString;
|
||||
|
||||
var classofRaw = function (it) {
|
||||
return toString.call(it).slice(8, -1);
|
||||
};
|
||||
|
||||
var split = ''.split;
|
||||
|
||||
// fallback for non-array-like ES3 and non-enumerable old V8 strings
|
||||
var indexedObject = fails(function () {
|
||||
// throws an error in rhino, see https://github.com/mozilla/rhino/issues/346
|
||||
// eslint-disable-next-line no-prototype-builtins -- safe
|
||||
return !Object('z').propertyIsEnumerable(0);
|
||||
}) ? function (it) {
|
||||
return classofRaw(it) == 'String' ? split.call(it, '') : Object(it);
|
||||
} : Object;
|
||||
|
||||
// `RequireObjectCoercible` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-requireobjectcoercible
|
||||
var requireObjectCoercible = function (it) {
|
||||
if (it == undefined) throw TypeError("Can't call method on " + it);
|
||||
return it;
|
||||
};
|
||||
|
||||
// toObject with fallback for non-array-like ES3 strings
|
||||
|
||||
|
||||
|
||||
var toIndexedObject = function (it) {
|
||||
return indexedObject(requireObjectCoercible(it));
|
||||
};
|
||||
|
||||
var isObject = function (it) {
|
||||
return typeof it === 'object' ? it !== null : typeof it === 'function';
|
||||
};
|
||||
|
||||
// `ToPrimitive` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-toprimitive
|
||||
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
|
||||
// and the second argument - flag - preferred type is a string
|
||||
var toPrimitive = function (input, PREFERRED_STRING) {
|
||||
if (!isObject(input)) return input;
|
||||
var fn, val;
|
||||
if (PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
|
||||
if (typeof (fn = input.valueOf) == 'function' && !isObject(val = fn.call(input))) return val;
|
||||
if (!PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
|
||||
throw TypeError("Can't convert object to primitive value");
|
||||
};
|
||||
|
||||
var hasOwnProperty = {}.hasOwnProperty;
|
||||
|
||||
var has$1 = function (it, key) {
|
||||
return hasOwnProperty.call(it, key);
|
||||
};
|
||||
|
||||
var document = global_1.document;
|
||||
// typeof document.createElement is 'object' in old IE
|
||||
var EXISTS = isObject(document) && isObject(document.createElement);
|
||||
|
||||
var documentCreateElement = function (it) {
|
||||
return EXISTS ? document.createElement(it) : {};
|
||||
};
|
||||
|
||||
// Thank's IE8 for his funny defineProperty
|
||||
var ie8DomDefine = !descriptors && !fails(function () {
|
||||
return Object.defineProperty(documentCreateElement('div'), 'a', {
|
||||
get: function () { return 7; }
|
||||
}).a != 7;
|
||||
});
|
||||
|
||||
var nativeGetOwnPropertyDescriptor = Object.getOwnPropertyDescriptor;
|
||||
|
||||
// `Object.getOwnPropertyDescriptor` method
|
||||
// https://tc39.es/ecma262/#sec-object.getownpropertydescriptor
|
||||
var f$3 = descriptors ? nativeGetOwnPropertyDescriptor : function getOwnPropertyDescriptor(O, P) {
|
||||
O = toIndexedObject(O);
|
||||
P = toPrimitive(P, true);
|
||||
if (ie8DomDefine) try {
|
||||
return nativeGetOwnPropertyDescriptor(O, P);
|
||||
} catch (error) { /* empty */ }
|
||||
if (has$1(O, P)) return createPropertyDescriptor(!objectPropertyIsEnumerable.f.call(O, P), O[P]);
|
||||
};
|
||||
|
||||
var objectGetOwnPropertyDescriptor = {
|
||||
f: f$3
|
||||
};
|
||||
|
||||
var anObject = function (it) {
|
||||
if (!isObject(it)) {
|
||||
throw TypeError(String(it) + ' is not an object');
|
||||
} return it;
|
||||
};
|
||||
|
||||
var nativeDefineProperty = Object.defineProperty;
|
||||
|
||||
// `Object.defineProperty` method
|
||||
// https://tc39.es/ecma262/#sec-object.defineproperty
|
||||
var f$2 = descriptors ? nativeDefineProperty : function defineProperty(O, P, Attributes) {
|
||||
anObject(O);
|
||||
P = toPrimitive(P, true);
|
||||
anObject(Attributes);
|
||||
if (ie8DomDefine) try {
|
||||
return nativeDefineProperty(O, P, Attributes);
|
||||
} catch (error) { /* empty */ }
|
||||
if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported');
|
||||
if ('value' in Attributes) O[P] = Attributes.value;
|
||||
return O;
|
||||
};
|
||||
|
||||
var objectDefineProperty = {
|
||||
f: f$2
|
||||
};
|
||||
|
||||
var createNonEnumerableProperty = descriptors ? function (object, key, value) {
|
||||
return objectDefineProperty.f(object, key, createPropertyDescriptor(1, value));
|
||||
} : function (object, key, value) {
|
||||
object[key] = value;
|
||||
return object;
|
||||
};
|
||||
|
||||
var setGlobal = function (key, value) {
|
||||
try {
|
||||
createNonEnumerableProperty(global_1, key, value);
|
||||
} catch (error) {
|
||||
global_1[key] = value;
|
||||
} return value;
|
||||
};
|
||||
|
||||
var SHARED = '__core-js_shared__';
|
||||
var store$1 = global_1[SHARED] || setGlobal(SHARED, {});
|
||||
|
||||
var sharedStore = store$1;
|
||||
|
||||
var functionToString = Function.toString;
|
||||
|
||||
// this helper broken in `3.4.1-3.4.4`, so we can't use `shared` helper
|
||||
if (typeof sharedStore.inspectSource != 'function') {
|
||||
sharedStore.inspectSource = function (it) {
|
||||
return functionToString.call(it);
|
||||
};
|
||||
}
|
||||
|
||||
var inspectSource = sharedStore.inspectSource;
|
||||
|
||||
var WeakMap$1 = global_1.WeakMap;
|
||||
|
||||
var nativeWeakMap = typeof WeakMap$1 === 'function' && /native code/.test(inspectSource(WeakMap$1));
|
||||
|
||||
var shared = createCommonjsModule(function (module) {
|
||||
(module.exports = function (key, value) {
|
||||
return sharedStore[key] || (sharedStore[key] = value !== undefined ? value : {});
|
||||
})('versions', []).push({
|
||||
version: '3.9.1',
|
||||
mode: 'global',
|
||||
copyright: '© 2021 Denis Pushkarev (zloirock.ru)'
|
||||
});
|
||||
});
|
||||
|
||||
var id = 0;
|
||||
var postfix = Math.random();
|
||||
|
||||
var uid = function (key) {
|
||||
return 'Symbol(' + String(key === undefined ? '' : key) + ')_' + (++id + postfix).toString(36);
|
||||
};
|
||||
|
||||
var keys = shared('keys');
|
||||
|
||||
var sharedKey = function (key) {
|
||||
return keys[key] || (keys[key] = uid(key));
|
||||
};
|
||||
|
||||
var hiddenKeys$1 = {};
|
||||
|
||||
var WeakMap = global_1.WeakMap;
|
||||
var set, get, has;
|
||||
|
||||
var enforce = function (it) {
|
||||
return has(it) ? get(it) : set(it, {});
|
||||
};
|
||||
|
||||
var getterFor = function (TYPE) {
|
||||
return function (it) {
|
||||
var state;
|
||||
if (!isObject(it) || (state = get(it)).type !== TYPE) {
|
||||
throw TypeError('Incompatible receiver, ' + TYPE + ' required');
|
||||
} return state;
|
||||
};
|
||||
};
|
||||
|
||||
if (nativeWeakMap) {
|
||||
var store = sharedStore.state || (sharedStore.state = new WeakMap());
|
||||
var wmget = store.get;
|
||||
var wmhas = store.has;
|
||||
var wmset = store.set;
|
||||
set = function (it, metadata) {
|
||||
metadata.facade = it;
|
||||
wmset.call(store, it, metadata);
|
||||
return metadata;
|
||||
};
|
||||
get = function (it) {
|
||||
return wmget.call(store, it) || {};
|
||||
};
|
||||
has = function (it) {
|
||||
return wmhas.call(store, it);
|
||||
};
|
||||
} else {
|
||||
var STATE = sharedKey('state');
|
||||
hiddenKeys$1[STATE] = true;
|
||||
set = function (it, metadata) {
|
||||
metadata.facade = it;
|
||||
createNonEnumerableProperty(it, STATE, metadata);
|
||||
return metadata;
|
||||
};
|
||||
get = function (it) {
|
||||
return has$1(it, STATE) ? it[STATE] : {};
|
||||
};
|
||||
has = function (it) {
|
||||
return has$1(it, STATE);
|
||||
};
|
||||
}
|
||||
|
||||
var internalState = {
|
||||
set: set,
|
||||
get: get,
|
||||
has: has,
|
||||
enforce: enforce,
|
||||
getterFor: getterFor
|
||||
};
|
||||
|
||||
var redefine = createCommonjsModule(function (module) {
|
||||
var getInternalState = internalState.get;
|
||||
var enforceInternalState = internalState.enforce;
|
||||
var TEMPLATE = String(String).split('String');
|
||||
|
||||
(module.exports = function (O, key, value, options) {
|
||||
var unsafe = options ? !!options.unsafe : false;
|
||||
var simple = options ? !!options.enumerable : false;
|
||||
var noTargetGet = options ? !!options.noTargetGet : false;
|
||||
var state;
|
||||
if (typeof value == 'function') {
|
||||
if (typeof key == 'string' && !has$1(value, 'name')) {
|
||||
createNonEnumerableProperty(value, 'name', key);
|
||||
}
|
||||
state = enforceInternalState(value);
|
||||
if (!state.source) {
|
||||
state.source = TEMPLATE.join(typeof key == 'string' ? key : '');
|
||||
}
|
||||
}
|
||||
if (O === global_1) {
|
||||
if (simple) O[key] = value;
|
||||
else setGlobal(key, value);
|
||||
return;
|
||||
} else if (!unsafe) {
|
||||
delete O[key];
|
||||
} else if (!noTargetGet && O[key]) {
|
||||
simple = true;
|
||||
}
|
||||
if (simple) O[key] = value;
|
||||
else createNonEnumerableProperty(O, key, value);
|
||||
// add fake Function#toString for correct work wrapped methods / constructors with methods like LoDash isNative
|
||||
})(Function.prototype, 'toString', function toString() {
|
||||
return typeof this == 'function' && getInternalState(this).source || inspectSource(this);
|
||||
});
|
||||
});
|
||||
|
||||
var path = global_1;
|
||||
|
||||
var aFunction = function (variable) {
|
||||
return typeof variable == 'function' ? variable : undefined;
|
||||
};
|
||||
|
||||
var getBuiltIn = function (namespace, method) {
|
||||
return arguments.length < 2 ? aFunction(path[namespace]) || aFunction(global_1[namespace])
|
||||
: path[namespace] && path[namespace][method] || global_1[namespace] && global_1[namespace][method];
|
||||
};
|
||||
|
||||
var ceil = Math.ceil;
|
||||
var floor = Math.floor;
|
||||
|
||||
// `ToInteger` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-tointeger
|
||||
var toInteger = function (argument) {
|
||||
return isNaN(argument = +argument) ? 0 : (argument > 0 ? floor : ceil)(argument);
|
||||
};
|
||||
|
||||
var min$1 = Math.min;
|
||||
|
||||
// `ToLength` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-tolength
|
||||
var toLength = function (argument) {
|
||||
return argument > 0 ? min$1(toInteger(argument), 0x1FFFFFFFFFFFFF) : 0; // 2 ** 53 - 1 == 9007199254740991
|
||||
};
|
||||
|
||||
var max = Math.max;
|
||||
var min = Math.min;
|
||||
|
||||
// Helper for a popular repeating case of the spec:
|
||||
// Let integer be ? ToInteger(index).
|
||||
// If integer < 0, let result be max((length + integer), 0); else let result be min(integer, length).
|
||||
var toAbsoluteIndex = function (index, length) {
|
||||
var integer = toInteger(index);
|
||||
return integer < 0 ? max(integer + length, 0) : min(integer, length);
|
||||
};
|
||||
|
||||
// `Array.prototype.{ indexOf, includes }` methods implementation
|
||||
var createMethod = function (IS_INCLUDES) {
|
||||
return function ($this, el, fromIndex) {
|
||||
var O = toIndexedObject($this);
|
||||
var length = toLength(O.length);
|
||||
var index = toAbsoluteIndex(fromIndex, length);
|
||||
var value;
|
||||
// Array#includes uses SameValueZero equality algorithm
|
||||
// eslint-disable-next-line no-self-compare -- NaN check
|
||||
if (IS_INCLUDES && el != el) while (length > index) {
|
||||
value = O[index++];
|
||||
// eslint-disable-next-line no-self-compare -- NaN check
|
||||
if (value != value) return true;
|
||||
// Array#indexOf ignores holes, Array#includes - not
|
||||
} else for (;length > index; index++) {
|
||||
if ((IS_INCLUDES || index in O) && O[index] === el) return IS_INCLUDES || index || 0;
|
||||
} return !IS_INCLUDES && -1;
|
||||
};
|
||||
};
|
||||
|
||||
var arrayIncludes = {
|
||||
// `Array.prototype.includes` method
|
||||
// https://tc39.es/ecma262/#sec-array.prototype.includes
|
||||
includes: createMethod(true),
|
||||
// `Array.prototype.indexOf` method
|
||||
// https://tc39.es/ecma262/#sec-array.prototype.indexof
|
||||
indexOf: createMethod(false)
|
||||
};
|
||||
|
||||
var indexOf = arrayIncludes.indexOf;
|
||||
|
||||
|
||||
var objectKeysInternal = function (object, names) {
|
||||
var O = toIndexedObject(object);
|
||||
var i = 0;
|
||||
var result = [];
|
||||
var key;
|
||||
for (key in O) !has$1(hiddenKeys$1, key) && has$1(O, key) && result.push(key);
|
||||
// Don't enum bug & hidden keys
|
||||
while (names.length > i) if (has$1(O, key = names[i++])) {
|
||||
~indexOf(result, key) || result.push(key);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
// IE8- don't enum bug keys
|
||||
var enumBugKeys = [
|
||||
'constructor',
|
||||
'hasOwnProperty',
|
||||
'isPrototypeOf',
|
||||
'propertyIsEnumerable',
|
||||
'toLocaleString',
|
||||
'toString',
|
||||
'valueOf'
|
||||
];
|
||||
|
||||
var hiddenKeys = enumBugKeys.concat('length', 'prototype');
|
||||
|
||||
// `Object.getOwnPropertyNames` method
|
||||
// https://tc39.es/ecma262/#sec-object.getownpropertynames
|
||||
var f$1 = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
|
||||
return objectKeysInternal(O, hiddenKeys);
|
||||
};
|
||||
|
||||
var objectGetOwnPropertyNames = {
|
||||
f: f$1
|
||||
};
|
||||
|
||||
var f = Object.getOwnPropertySymbols;
|
||||
|
||||
var objectGetOwnPropertySymbols = {
|
||||
f: f
|
||||
};
|
||||
|
||||
// all object keys, includes non-enumerable and symbols
|
||||
var ownKeys = getBuiltIn('Reflect', 'ownKeys') || function ownKeys(it) {
|
||||
var keys = objectGetOwnPropertyNames.f(anObject(it));
|
||||
var getOwnPropertySymbols = objectGetOwnPropertySymbols.f;
|
||||
return getOwnPropertySymbols ? keys.concat(getOwnPropertySymbols(it)) : keys;
|
||||
};
|
||||
|
||||
var copyConstructorProperties = function (target, source) {
|
||||
var keys = ownKeys(source);
|
||||
var defineProperty = objectDefineProperty.f;
|
||||
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
|
||||
for (var i = 0; i < keys.length; i++) {
|
||||
var key = keys[i];
|
||||
if (!has$1(target, key)) defineProperty(target, key, getOwnPropertyDescriptor(source, key));
|
||||
}
|
||||
};
|
||||
|
||||
var replacement = /#|\.prototype\./;
|
||||
|
||||
var isForced = function (feature, detection) {
|
||||
var value = data[normalize(feature)];
|
||||
return value == POLYFILL ? true
|
||||
: value == NATIVE ? false
|
||||
: typeof detection == 'function' ? fails(detection)
|
||||
: !!detection;
|
||||
};
|
||||
|
||||
var normalize = isForced.normalize = function (string) {
|
||||
return String(string).replace(replacement, '.').toLowerCase();
|
||||
};
|
||||
|
||||
var data = isForced.data = {};
|
||||
var NATIVE = isForced.NATIVE = 'N';
|
||||
var POLYFILL = isForced.POLYFILL = 'P';
|
||||
|
||||
var isForced_1 = isForced;
|
||||
|
||||
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/*
|
||||
options.target - name of the target object
|
||||
options.global - target is the global object
|
||||
options.stat - export as static methods of target
|
||||
options.proto - export as prototype methods of target
|
||||
options.real - real prototype method for the `pure` version
|
||||
options.forced - export even if the native feature is available
|
||||
options.bind - bind methods to the target, required for the `pure` version
|
||||
options.wrap - wrap constructors to preventing global pollution, required for the `pure` version
|
||||
options.unsafe - use the simple assignment of property instead of delete + defineProperty
|
||||
options.sham - add a flag to not completely full polyfills
|
||||
options.enumerable - export as enumerable property
|
||||
options.noTargetGet - prevent calling a getter on target
|
||||
*/
|
||||
var _export = function (options, source) {
|
||||
var TARGET = options.target;
|
||||
var GLOBAL = options.global;
|
||||
var STATIC = options.stat;
|
||||
var FORCED, target, key, targetProperty, sourceProperty, descriptor;
|
||||
if (GLOBAL) {
|
||||
target = global_1;
|
||||
} else if (STATIC) {
|
||||
target = global_1[TARGET] || setGlobal(TARGET, {});
|
||||
} else {
|
||||
target = (global_1[TARGET] || {}).prototype;
|
||||
}
|
||||
if (target) for (key in source) {
|
||||
sourceProperty = source[key];
|
||||
if (options.noTargetGet) {
|
||||
descriptor = getOwnPropertyDescriptor(target, key);
|
||||
targetProperty = descriptor && descriptor.value;
|
||||
} else targetProperty = target[key];
|
||||
FORCED = isForced_1(GLOBAL ? key : TARGET + (STATIC ? '.' : '#') + key, options.forced);
|
||||
// contained in target
|
||||
if (!FORCED && targetProperty !== undefined) {
|
||||
if (typeof sourceProperty === typeof targetProperty) continue;
|
||||
copyConstructorProperties(sourceProperty, targetProperty);
|
||||
}
|
||||
// add a flag to not completely full polyfills
|
||||
if (options.sham || (targetProperty && targetProperty.sham)) {
|
||||
createNonEnumerableProperty(sourceProperty, 'sham', true);
|
||||
}
|
||||
// extend global
|
||||
redefine(target, key, sourceProperty, options);
|
||||
}
|
||||
};
|
||||
|
||||
// `IsArray` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-isarray
|
||||
var isArray = Array.isArray || function isArray(arg) {
|
||||
return classofRaw(arg) == 'Array';
|
||||
};
|
||||
|
||||
// `ToObject` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-toobject
|
||||
var toObject = function (argument) {
|
||||
return Object(requireObjectCoercible(argument));
|
||||
};
|
||||
|
||||
var createProperty = function (object, key, value) {
|
||||
var propertyKey = toPrimitive(key);
|
||||
if (propertyKey in object) objectDefineProperty.f(object, propertyKey, createPropertyDescriptor(0, value));
|
||||
else object[propertyKey] = value;
|
||||
};
|
||||
|
||||
var engineIsNode = classofRaw(global_1.process) == 'process';
|
||||
|
||||
var engineUserAgent = getBuiltIn('navigator', 'userAgent') || '';
|
||||
|
||||
var process = global_1.process;
|
||||
var versions = process && process.versions;
|
||||
var v8 = versions && versions.v8;
|
||||
var match, version;
|
||||
|
||||
if (v8) {
|
||||
match = v8.split('.');
|
||||
version = match[0] + match[1];
|
||||
} else if (engineUserAgent) {
|
||||
match = engineUserAgent.match(/Edge\/(\d+)/);
|
||||
if (!match || match[1] >= 74) {
|
||||
match = engineUserAgent.match(/Chrome\/(\d+)/);
|
||||
if (match) version = match[1];
|
||||
}
|
||||
}
|
||||
|
||||
var engineV8Version = version && +version;
|
||||
|
||||
var nativeSymbol = !!Object.getOwnPropertySymbols && !fails(function () {
|
||||
/* global Symbol -- required for testing */
|
||||
return !Symbol.sham &&
|
||||
// Chrome 38 Symbol has incorrect toString conversion
|
||||
// Chrome 38-40 symbols are not inherited from DOM collections prototypes to instances
|
||||
(engineIsNode ? engineV8Version === 38 : engineV8Version > 37 && engineV8Version < 41);
|
||||
});
|
||||
|
||||
var useSymbolAsUid = nativeSymbol
|
||||
/* global Symbol -- safe */
|
||||
&& !Symbol.sham
|
||||
&& typeof Symbol.iterator == 'symbol';
|
||||
|
||||
var WellKnownSymbolsStore = shared('wks');
|
||||
var Symbol$1 = global_1.Symbol;
|
||||
var createWellKnownSymbol = useSymbolAsUid ? Symbol$1 : Symbol$1 && Symbol$1.withoutSetter || uid;
|
||||
|
||||
var wellKnownSymbol = function (name) {
|
||||
if (!has$1(WellKnownSymbolsStore, name) || !(nativeSymbol || typeof WellKnownSymbolsStore[name] == 'string')) {
|
||||
if (nativeSymbol && has$1(Symbol$1, name)) {
|
||||
WellKnownSymbolsStore[name] = Symbol$1[name];
|
||||
} else {
|
||||
WellKnownSymbolsStore[name] = createWellKnownSymbol('Symbol.' + name);
|
||||
}
|
||||
} return WellKnownSymbolsStore[name];
|
||||
};
|
||||
|
||||
var SPECIES$1 = wellKnownSymbol('species');
|
||||
|
||||
// `ArraySpeciesCreate` abstract operation
|
||||
// https://tc39.es/ecma262/#sec-arrayspeciescreate
|
||||
var arraySpeciesCreate = function (originalArray, length) {
|
||||
var C;
|
||||
if (isArray(originalArray)) {
|
||||
C = originalArray.constructor;
|
||||
// cross-realm fallback
|
||||
if (typeof C == 'function' && (C === Array || isArray(C.prototype))) C = undefined;
|
||||
else if (isObject(C)) {
|
||||
C = C[SPECIES$1];
|
||||
if (C === null) C = undefined;
|
||||
}
|
||||
} return new (C === undefined ? Array : C)(length === 0 ? 0 : length);
|
||||
};
|
||||
|
||||
var SPECIES = wellKnownSymbol('species');
|
||||
|
||||
var arrayMethodHasSpeciesSupport = function (METHOD_NAME) {
|
||||
// We can't use this feature detection in V8 since it causes
|
||||
// deoptimization and serious performance degradation
|
||||
// https://github.com/zloirock/core-js/issues/677
|
||||
return engineV8Version >= 51 || !fails(function () {
|
||||
var array = [];
|
||||
var constructor = array.constructor = {};
|
||||
constructor[SPECIES] = function () {
|
||||
return { foo: 1 };
|
||||
};
|
||||
return array[METHOD_NAME](Boolean).foo !== 1;
|
||||
});
|
||||
};
|
||||
|
||||
var IS_CONCAT_SPREADABLE = wellKnownSymbol('isConcatSpreadable');
|
||||
var MAX_SAFE_INTEGER = 0x1FFFFFFFFFFFFF;
|
||||
var MAXIMUM_ALLOWED_INDEX_EXCEEDED = 'Maximum allowed index exceeded';
|
||||
|
||||
// We can't use this feature detection in V8 since it causes
|
||||
// deoptimization and serious performance degradation
|
||||
// https://github.com/zloirock/core-js/issues/679
|
||||
var IS_CONCAT_SPREADABLE_SUPPORT = engineV8Version >= 51 || !fails(function () {
|
||||
var array = [];
|
||||
array[IS_CONCAT_SPREADABLE] = false;
|
||||
return array.concat()[0] !== array;
|
||||
});
|
||||
|
||||
var SPECIES_SUPPORT = arrayMethodHasSpeciesSupport('concat');
|
||||
|
||||
var isConcatSpreadable = function (O) {
|
||||
if (!isObject(O)) return false;
|
||||
var spreadable = O[IS_CONCAT_SPREADABLE];
|
||||
return spreadable !== undefined ? !!spreadable : isArray(O);
|
||||
};
|
||||
|
||||
var FORCED = !IS_CONCAT_SPREADABLE_SUPPORT || !SPECIES_SUPPORT;
|
||||
|
||||
// `Array.prototype.concat` method
|
||||
// https://tc39.es/ecma262/#sec-array.prototype.concat
|
||||
// with adding support of @@isConcatSpreadable and @@species
|
||||
_export({ target: 'Array', proto: true, forced: FORCED }, {
|
||||
// eslint-disable-next-line no-unused-vars -- required for `.length`
|
||||
concat: function concat(arg) {
|
||||
var O = toObject(this);
|
||||
var A = arraySpeciesCreate(O, 0);
|
||||
var n = 0;
|
||||
var i, k, length, len, E;
|
||||
for (i = -1, length = arguments.length; i < length; i++) {
|
||||
E = i === -1 ? O : arguments[i];
|
||||
if (isConcatSpreadable(E)) {
|
||||
len = toLength(E.length);
|
||||
if (n + len > MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
|
||||
for (k = 0; k < len; k++, n++) if (k in E) createProperty(A, n, E[k]);
|
||||
} else {
|
||||
if (n >= MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
|
||||
createProperty(A, n++, E);
|
||||
}
|
||||
}
|
||||
A.length = n;
|
||||
return A;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* When using server-side processing, the default mode of operation for
|
||||
* bootstrap-table is to simply throw away any data that currently exists in the
|
||||
* table and make a request to the server to get the first page of data to
|
||||
* display. This is fine for an empty table, but if you already have the first
|
||||
* page of data displayed in the plain HTML, it is a waste of resources. As
|
||||
* such, you can use data-defer-url instead of data-url to allow you to instruct
|
||||
* bootstrap-table to not make that initial request, rather it will use the data
|
||||
* already on the page.
|
||||
*
|
||||
* @author: Ruben Suarez
|
||||
* @webSite: http://rubensa.eu.org
|
||||
* @update zhixin wen <wenzhixin2010@gmail.com>
|
||||
*/
|
||||
|
||||
$__default['default'].extend($__default['default'].fn.bootstrapTable.defaults, {
|
||||
deferUrl: undefined
|
||||
});
|
||||
|
||||
$__default['default'].BootstrapTable = /*#__PURE__*/function (_$$BootstrapTable) {
|
||||
_inherits(_class, _$$BootstrapTable);
|
||||
|
||||
var _super = _createSuper(_class);
|
||||
|
||||
function _class() {
|
||||
_classCallCheck(this, _class);
|
||||
|
||||
return _super.apply(this, arguments);
|
||||
}
|
||||
|
||||
_createClass(_class, [{
|
||||
key: "init",
|
||||
value: function init() {
|
||||
var _get2;
|
||||
|
||||
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
|
||||
args[_key] = arguments[_key];
|
||||
}
|
||||
|
||||
(_get2 = _get(_getPrototypeOf(_class.prototype), "init", this)).call.apply(_get2, [this].concat(args));
|
||||
|
||||
if (this.options.deferUrl) {
|
||||
this.options.url = this.options.deferUrl;
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
return _class;
|
||||
}($__default['default'].BootstrapTable);
|
||||
|
||||
})));
|
||||
10
InvenTree/InvenTree/static/bootstrap-table/extensions/defer-url/bootstrap-table-defer-url.min.js
vendored
Normal file
10
InvenTree/InvenTree/static/bootstrap-table/extensions/defer-url/bootstrap-table-defer-url.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
1904
InvenTree/InvenTree/static/bootstrap-table/extensions/editable/bootstrap-table-editable.js
vendored
Normal file
1904
InvenTree/InvenTree/static/bootstrap-table/extensions/editable/bootstrap-table-editable.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
10
InvenTree/InvenTree/static/bootstrap-table/extensions/editable/bootstrap-table-editable.min.js
vendored
Normal file
10
InvenTree/InvenTree/static/bootstrap-table/extensions/editable/bootstrap-table-editable.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
2232
InvenTree/InvenTree/static/bootstrap-table/extensions/export/bootstrap-table-export.js
vendored
Normal file
2232
InvenTree/InvenTree/static/bootstrap-table/extensions/export/bootstrap-table-export.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
10
InvenTree/InvenTree/static/bootstrap-table/extensions/export/bootstrap-table-export.min.js
vendored
Normal file
10
InvenTree/InvenTree/static/bootstrap-table/extensions/export/bootstrap-table-export.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -0,0 +1,13 @@
|
||||
@charset "UTF-8";
|
||||
/**
|
||||
* @author: Dennis Hernández
|
||||
* @webSite: http://djhvscf.github.io/Blog
|
||||
* @version: v2.1.1
|
||||
*/
|
||||
.no-filter-control {
|
||||
height: 34px;
|
||||
}
|
||||
|
||||
.filter-control {
|
||||
margin: 0 2px 2px 2px;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user