Compare commits

..

17 Commits

Author SHA1 Message Date
Oliver
c17b34a864 Specify upper bound for mariadb python version (#3333)
- 1.1.0 and above causes build process to break
2022-07-15 14:30:34 +10:00
Oliver
5d44811f98 Bump version number to 0.7.5 (#3324)
* Bump version number to 0.7.5

* Add fix for stock migration

- Ensure the serial number is not too large when performing migration
- Add unit test for data migration

(cherry picked from commit 661fbf0e3d)
(cherry picked from commit 233446c2bb)

* Add similar fixes for PO and SO migrations

(cherry picked from commit bde23c130c)
(cherry picked from commit 4261090e6d)

* And similar fix for BuildOrder reference field

(cherry picked from commit ca0f4e0031)
(cherry picked from commit 9fa4ee48d6)

* Fix for plugin unit testing

* Revert test database name

(cherry picked from commit 53333c29c3)

* Override default URL behaviour for unit test

(cherry picked from commit 2c12a69529)
2022-07-15 11:58:06 +10:00
Oliver
49c61f74b1 Migration bug fix (#3325)
* Add fix for stock migration

- Ensure the serial number is not too large when performing migration
- Add unit test for data migration

(cherry picked from commit 661fbf0e3d)
(cherry picked from commit 233446c2bb)

* Add similar fixes for PO and SO migrations

(cherry picked from commit bde23c130c)
(cherry picked from commit 4261090e6d)

* And similar fix for BuildOrder reference field

(cherry picked from commit ca0f4e0031)
(cherry picked from commit 9fa4ee48d6)

* Fix for plugin unit testing

* Revert test database name

(cherry picked from commit 53333c29c3)

* Override default URL behaviour for unit test

(cherry picked from commit 2c12a69529)
2022-07-15 11:56:02 +10:00
Oliver
a19d342800 Fix translation issue with javascript (#3246) (#3252)
* Adds a custom translation node class to strip dirty characters from translated strings

* Update javascript files to use new template tag

* Override behaviour of {% load i18n %}

- No longer requires custom tag loading
- All templates now use escaped translation values
- Requires re-ordering of app loading
- Revert js_i18n to simply i18n

* CI step now lints JS files compiled in each locale

* Checking that the CI step fails

* Revert "Checking that the CI step fails"

This reverts commit ba2be0470d.

(cherry picked from commit 44b42050aa)
2022-06-25 11:28:17 +10:00
Oliver
32b81aa598 Bump version number to 0.7.4 (#3241) 2022-06-23 14:23:17 +10:00
Oliver
5196fd5546 Prevent newline chars from breaking part detail page (#3245) 2022-06-23 14:23:07 +10:00
Oliver
f9aa5a60fd Override 2FA token removal form (#3240)
- Requires user to input valid token to remove 2FA for their account

Co-authored-by: Matthias Mair <code@mjmair.com>
2022-06-23 13:04:53 +10:00
Oliver
b9c6cd70d4 Enable "sanitize" option for EasyMDE editor (#3206)
* Enable "sanitize" option for EasyMDE editor

* Bump version number
2022-06-16 13:31:20 +10:00
Oliver
26bf51c20a Back porting of security patches (#3197)
* Merge pull request from GHSA-fr2w-mp56-g4xp

* Enforce file download for attachments table(s)

* Enforce file download for attachment in 'StockItemTestResult' table

(cherry picked from commit 76aa3a75f2)

* Merge pull request from GHSA-7rq4-qcpw-74gq

* Merge pull request from GHSA-rm89-9g65-4ffr

* Enable HTML escaping for all tables by default

* Enable HTML escaping for all tables by default

* Adds automatic escaping for bootstrap tables where custom formatter function is specified

- Intercept the row data *before* it is provided to the renderer function
- Adds a function for sanitizing nested data structure

* Sanitize form data before processing

(cherry picked from commit cd418d6948)

* Increment version number for release

* Fix sanitization for array case - was missing a return value
2022-06-15 20:43:43 +10:00
Oliver
f9c28eedaf Add error handling for case where user does not have git installed (#3179) (#3198)
(cherry picked from commit 5ecba6b13c)
2022-06-15 18:52:10 +10:00
Oliver
9bdbb0137f Adds release.yml file for auto-generation of release notes (#3194) 2022-06-14 20:30:09 +10:00
Oliver
412b464b09 Prevent auto-creation of SalesOrderShipment if we are importing data (#3170)
- Fixes a bug which prevents importing of datasets
2022-06-10 11:26:16 +10:00
Oliver
f48bd62534 Bump version number 2022-06-02 16:35:00 +10:00
Oliver
bd92ff1290 Fix filtering for purchaseorder table on supplierpart page (#3115) 2022-06-02 14:25:28 +10:00
Oliver
3b3238f762 Check user permissions before performing search (#3083)
* Check user permissions before performing search

* JS linting

(cherry picked from commit 6c7a80c141)
2022-05-27 13:27:28 +10:00
Oliver
81d29efc12 Improve error management for order price calculation (#3075)
* Improve error management for order price calculation

- If there are missing exchange rates, it throws an error
- Very much an edge case

* Style fixes

* Add warning message if total order price cannot be calculated

* price -> cost

(cherry picked from commit 640a5d0f24)
2022-05-27 13:27:22 +10:00
Oliver Walters
044315afbe Bump version number 2022-05-24 20:33:48 +10:00
1812 changed files with 178602 additions and 714918 deletions

View File

@@ -1,33 +0,0 @@
version = 1
exclude_patterns = [
"docs/docs/javascripts/**", # Docs: Helpers
"docs/ci/**", # Docs: CI
"InvenTree/InvenTree/static/**", # Backend: CUI static files
"ci/**", # Backend: CI
"InvenTree/**/migrations/*.py", # Backend: Migration files
"src/frontend/src/locales/**", # Frontend: Translations
]
test_patterns = ["**/test_*.py", "**/test.py", "**/tests.py"]
[[analyzers]]
name = "shell"
[[analyzers]]
name = "javascript"
[analyzers.meta]
plugins = ["react"]
[[analyzers]]
name = "python"
[analyzers.meta]
runtime_version = "3.x.x"
[[analyzers]]
name = "docker"
[[analyzers]]
name = "test-coverage"
enabled = false

View File

@@ -1,70 +0,0 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/python-3
{
"name": "InvenTree devcontainer",
"dockerComposeFile": "docker-compose.yml",
"service": "inventree",
"overrideCommand": true,
"workspaceFolder": "/home/inventree/",
// Configure tool-specific properties.
"customizations": {
// Configure properties specific to VS Code.
"vscode": {
// Set *default* container specific settings.json values on container create.
"settings": {
"python.defaultInterpreterPath": "${containerWorkspaceFolder}/dev/venv/bin/python",
"python.linting.enabled": true,
"python.linting.pylintEnabled": false,
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"batisteo.vscode-django",
"eamodio.gitlens"
]
}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [5173, 8000, 8080],
"portsAttributes": {
"5173": {
"label": "Vite Server"
},
"8000": {
"label": "InvenTree Server"
},
"8080": {
"label": "mkdocs server"
}
},
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": ".devcontainer/postCreateCommand.sh",
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode",
"containerUser": "vscode",
"remoteEnv": {
// Python config
"PIP_USER": "no",
// used to load the venv into the PATH and activate it
// Ref: https://stackoverflow.com/a/56286534
"VIRTUAL_ENV": "${containerWorkspaceFolder}/dev/venv",
"PATH": "${containerWorkspaceFolder}/dev/venv/bin:${containerEnv:PATH}"
}
}

View File

@@ -1,40 +0,0 @@
version: "3"
services:
db:
image: postgres:13
restart: unless-stopped
expose:
- 5432/tcp
volumes:
- inventreedatabase:/var/lib/postgresql/data:z
environment:
POSTGRES_DB: inventree
POSTGRES_USER: inventree_user
POSTGRES_PASSWORD: inventree_password
inventree:
build:
context: ..
target: dev
args:
base_image: "mcr.microsoft.com/vscode/devcontainers/base:alpine-3.18"
data_dir: "dev"
volumes:
- ../:/home/inventree:z
environment:
INVENTREE_DEBUG: True
INVENTREE_DB_ENGINE: postgresql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_HOST: db
INVENTREE_DB_USER: inventree_user
INVENTREE_DB_PASSWORD: inventree_password
INVENTREE_PLUGINS_ENABLED: True
INVENTREE_PY_ENV: /home/inventree/dev/venv
depends_on:
- db
volumes:
inventreedatabase:

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# Avoiding Dubious Ownership in Dev Containers for setup commands that use git
git config --global --add safe.directory /home/inventree
# create venv
python3 -m venv /home/inventree/dev/venv --system-site-packages --upgrade-deps
. /home/inventree/dev/venv/bin/activate
# Run initial InvenTree server setup
invoke update -s
# Configure dev environment
invoke setup-dev
# Install required frontend packages
invoke frontend-install
# remove existing gitconfig created by "Avoiding Dubious Ownership" step
# so that it gets copied from host to the container to have your global
# git config in container
rm -f /home/vscode/.gitconfig

View File

@@ -1,71 +0,0 @@
# Python Django
# Test a Django project on multiple versions of Python.
# Add steps that analyze code, save build artifacts, deploy, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
trigger:
- master
pool:
vmImage: ubuntu-latest
strategy:
matrix:
Python39:
PYTHON_VERSION: '3.9'
maxParallel: 3
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(PYTHON_VERSION)'
architecture: 'x64'
- task: PythonScript@0
displayName: 'Export project path'
inputs:
scriptSource: 'inline'
script: |
"""Search all subdirectories for `manage.py`."""
from glob import iglob
from os import path
# Python >= 3.5
manage_py = next(iglob(path.join('**', 'manage.py'), recursive=True), None)
if not manage_py:
raise SystemExit('Could not find a Django project')
project_location = path.dirname(path.abspath(manage_py))
print('Found Django project in', project_location)
print('##vso[task.setvariable variable=projectRoot]{}'.format(project_location))
- script: |
python -m pip install --upgrade pip setuptools wheel
pip install -r requirements.txt
pip install -r requirements-dev.txt
pip install unittest-xml-reporting coverage invoke
sudo apt-get install poppler-utils
sudo apt-get install libpoppler-dev
displayName: 'Install prerequisites'
- script: |
pushd '$(projectRoot)'
invoke update
coverage run manage.py test --testrunner xmlrunner.extra.djangotestrunner.XMLTestRunner --no-input
coverage xml -i
displayName: 'Run tests'
env:
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_DB_NAME: inventree
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
INVENTREE_BACKUP_DIR: ./backup
INVENTREE_PLUGINS_ENABLED: true
- task: PublishTestResults@2
inputs:
testResultsFiles: "**/TEST-*.xml"
testRunTitle: 'Python $(PYTHON_VERSION)'
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml'

View File

@@ -4,7 +4,7 @@ env:
es2021: true
jquery: true
extends:
- eslint:recommended
- google
parserOptions:
ecmaVersion: 12
rules:
@@ -19,8 +19,6 @@ rules:
valid-jsdoc: off
no-multiple-empty-lines: off
comma-dangle: off
no-unused-vars: off
no-useless-escape: off
prefer-spread: off
indent:
- error

2
.github/CODEOWNERS vendored
View File

@@ -1,2 +0,0 @@
# General owner is the maintainers team
* @SchrodingersGat

4
.github/FUNDING.yml vendored
View File

@@ -1,4 +1,2 @@
github: inventree
ko_fi: inventree
patreon: inventree
custom: [paypal.me/inventree]
ko_fi: inventree

47
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@@ -0,0 +1,47 @@
---
name: Bug
about: Create a bug report to help us improve InvenTree!
title: "[BUG] Enter bug description"
labels: bug, question
assignees: ''
---
<!---
Everything inside these brackets is hidden - please remove them where you fill out information.
--->
**Describe the bug**
<!---
A clear and concise description of what the bug is.
--->
**Steps to Reproduce**
Steps to reproduce the behavior:
<!---
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
--->
**Expected behavior**
<!---
A clear and concise description of what you expected to happen.
--->
<!---
**Screenshots**
If applicable, add screenshots to help explain your problem.
--->
**Deployment Method**
- [ ] Docker
- [ ] Bare Metal
**Version Information**
<!---
You can get this by going to the "About InvenTree" section in the upper right corner and clicking on to the "copy version information"
--->

View File

@@ -1,69 +0,0 @@
name: "Bug"
description: "Create a bug report to help us improve InvenTree!"
labels: ["bug", "question", "triage:not-checked"]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "Please verify that this bug has NOT been raised before."
description: "Search in the issues sections by clicking [HERE](https://github.com/inventree/inventree/issues?q=) and read the [Frequently Asked Questions](https://docs.inventree.org/en/latest/faq/)!"
options:
- label: "I checked and didn't find a similar issue"
required: true
- type: textarea
id: description
validations:
required: true
attributes:
label: "Describe the bug*"
description: "A clear and concise description of what the bug is."
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "Steps to Reproduce"
description: "Steps to reproduce the behaviour, please make it detailed"
placeholder: |
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See the error
- type: textarea
id: expected-behavior
validations:
required: true
attributes:
label: "Expected behaviour"
description: "A clear and concise description of what you expected to happen."
placeholder: "..."
- type: checkboxes
id: deployment
attributes:
label: "Deployment Method"
options:
- label: "Docker"
- label: "Bare metal"
- type: textarea
id: version-info
validations:
required: true
attributes:
label: "Version Information"
description: "The version info block."
placeholder: "You can get this by going to the `About InvenTree` section in the upper right corner and clicking on the `copy version information` button"
- type: checkboxes
id: can-reproduce
attributes:
label: "Please verify if you can reproduce this bug on the demo site."
description: "You can sign in at [InvenTree Demo](https://demo.inventree.org) with admin:inventree. Note that this instance runs on the latest dev version, so your bug may be fixed there."
options:
- label: "I can reproduce this bug on the demo site."
- type: textarea
id: logs
attributes:
label: "Relevant log output"
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: shell
validations:
required: false

View File

@@ -1,15 +0,0 @@
name: "Documentation"
description: "Create an issue to improve the documentation"
labels: ["documentation", "triage:not-checked"]
body:
- type: markdown
attributes:
value: |
Create a new issue regarding the InvenTree documentation
- type: textarea
id: repro
attributes:
label: Body of the issue
description: Please provide one distinct thing to fix or a clearly defined enhancement
validations:
required: true

View File

@@ -0,0 +1,26 @@
---
name: Feature request
about: Suggest an idea for this project
title: "[FR]"
labels: enhancement
assignees: ''
---
**Is your feature request the result of a bug?**
Please link it here.
**Problem**
A clear and concise description of what the problem is. e.g. I'm always frustrated when [...]
**Suggested solution**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Examples of other systems**
Show how other software handles your FR if you have examples.
**Do you want to develop this?**
If so please describe briefly how you would like to implement it (so we can give advice) and if you have experience in the needed technology (you do not need to be a pro - this is just as a information for us).

View File

@@ -1,53 +0,0 @@
name: Feature Request
description: Suggest an idea for this project
title: "[FR] title"
labels: ["enhancement", "triage:not-checked"]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "Please verify that this feature request has NOT been suggested before."
description: "Search in the issues sections by clicking [HERE](https://github.com/inventree/inventree/issues?q=)"
options:
- label: "I checked and didn't find a similar feature request"
required: true
- type: textarea
id: problem
validations:
required: true
attributes:
label: "Problem statement"
description: "A clear and concise description of the problem or missing feature."
placeholder: "I am always struggling with ..."
- type: textarea
id: solution
validations:
required: true
attributes:
label: "Suggested solution"
description: "A clear and concise description of what you want to happen to solve the problem statement."
placeholder: "In my use-case, ..."
- type: textarea
id: alternatives
validations:
required: true
attributes:
label: "Describe alternatives you've considered"
description: "A clear and concise description of any alternative solutions or features you've considered."
placeholder: "This could also be done by doing ..."
- type: textarea
id: examples
validations:
required: false
attributes:
label: "Examples of other systems"
description: "Show how other software handles your FR if you have examples."
placeholder: "I software xxx this is done in the following way..."
- type: checkboxes
id: self-develop
attributes:
label: "Do you want to develop this?"
description: "This is not required, and you do not need to be a pro - this is just as information for us."
options:
- label: "I want to develop this."
required: false

View File

@@ -1,46 +0,0 @@
name: "Install problems"
description: "If you have problems deploying InvenTree"
labels: ["question", "triage:not-checked", "setup"]
body:
- type: checkboxes
id: deployment
validations:
required: true
attributes:
label: "Deployment Method"
options:
- label: "Installer"
- label: "Docker Development"
- label: "Docker Production"
- label: "Bare metal Development"
- label: "Bare metal Production"
- label: "Digital Ocean image"
- label: "Other (please provide a link `Steps to Reproduce`"
- type: textarea
id: description
validations:
required: true
attributes:
label: "Describe the problem*"
description: "A clear and concise description of what is failing."
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "Steps to Reproduce"
description: "Steps to reproduce the behaviour, please make it detailed"
placeholder: |
0. Link to all docs you used
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See the error
- type: textarea
id: logs
attributes:
label: "Relevant log output"
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: bash
validations:
required: false

View File

@@ -1,17 +0,0 @@
name: 'Migration test'
description: 'Run migration test sequence'
author: 'InvenTree'
runs:
using: 'composite'
steps:
- name: Data Import Export
shell: bash
run: |
invoke migrate
invoke import-fixtures
invoke export-records -f data.json
python3 ./InvenTree/manage.py flush --noinput
invoke migrate
invoke import-records -f data.json
invoke import-records -f data.json

View File

@@ -1,93 +0,0 @@
name: 'Setup Enviroment'
description: 'Setup the environment for general InvenTree tests'
author: 'InvenTree'
inputs:
python:
required: false
description: 'Install python.'
default: 'true'
npm:
required: false
description: 'Install npm.'
default: 'false'
install:
required: false
description: 'Install the InvenTree requirements?'
default: 'false'
dev-install:
required: false
description: 'Install the InvenTree development requirements?'
default: 'false'
update:
required: false
description: 'Should a full update cycle be run?'
default: 'false'
apt-dependency:
required: false
description: 'Extra APT package for install.'
pip-dependency:
required: false
description: 'Extra python package for install.'
runs:
using: 'composite'
steps:
- name: Checkout Code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
# Python installs
- name: Set up Python ${{ env.python_version }}
if: ${{ inputs.python == 'true' }}
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
with:
python-version: ${{ env.python_version }}
cache: pip
- name: Install Base Python Dependencies
if: ${{ inputs.python == 'true' }}
shell: bash
run: |
python3 -m pip install -U pip
pip3 install invoke wheel uv
- name: Set the VIRTUAL_ENV variable for uv to work
run: echo "VIRTUAL_ENV=${Python_ROOT_DIR}" >> $GITHUB_ENV
shell: bash
- name: Install Specific Python Dependencies
if: ${{ inputs.pip-dependency }}
shell: bash
run: uv pip install ${{ inputs.pip-dependency }}
# NPM installs
- name: Install node.js ${{ env.node_version }}
if: ${{ inputs.npm == 'true' }}
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # pin to v3.8.2
with:
node-version: ${{ env.node_version }}
cache: 'npm'
- name: Install npm packages
if: ${{ inputs.npm == 'true' }}
shell: bash
run: npm install
# OS installs
- name: Install OS Dependencies
if: ${{ inputs.apt-dependency }}
shell: bash
run: |
sudo apt-get update
sudo apt-get install ${{ inputs.apt-dependency }}
# Invoke commands
- name: Install dev requirements
if: ${{ inputs.dev-install == 'true' ||inputs.install == 'true' }}
shell: bash
run: uv pip install -r requirements-dev.txt
- name: Run invoke install
if: ${{ inputs.install == 'true' }}
shell: bash
run: invoke install --uv
- name: Run invoke update
if: ${{ inputs.update == 'true' }}
shell: bash
run: invoke update --uv

5
.github/release.yml vendored
View File

@@ -4,7 +4,6 @@ changelog:
exclude:
labels:
- translation
- documentation
categories:
- title: Breaking Changes
labels:
@@ -16,11 +15,7 @@ changelog:
- title: New Features
labels:
- Semver-Minor
- feature
- enhancement
- title: Experimental Features
labels:
- experimental
- title: Bug Fixes
labels:
- Semver-Patch

View File

@@ -1,37 +0,0 @@
# Backport tagged issues to a stable branch.
#
# To enable backporting for a pullrequest, add the label "backport" to the PR.
# Additionally, add a label with the prefix "backport-to-" and the target branch
name: Backport
on:
pull_request_target:
types: [ "labeled", "closed" ]
jobs:
backport:
name: Backport PR
runs-on: ubuntu-latest
if: |
github.event.pull_request.merged == true
&& contains(github.event.pull_request.labels.*.name, 'backport')
&& (
(github.event.action == 'labeled' && github.event.label.name == 'backport')
|| (github.event.action == 'closed')
)
steps:
- name: Backport Action
uses: sqren/backport-github-action@f54e19901f2a57f8b82360f2490d47ee82ec82c6 # pin@v9.2.2
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
auto_backport_label_prefix: backport-to-
add_original_reviewers: true
- name: Info log
if: ${{ success() }}
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() }}
run: cat ~/.backport/backport.debug.log

View File

@@ -8,9 +8,6 @@ on:
branches:
- l10
env:
python_version: 3.9
jobs:
check:
@@ -23,16 +20,17 @@ jobs:
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
INVENTREE_BACKUP_DIR: ./backup
steps:
- name: Checkout Code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
with:
install: true
apt-dependency: gettext
uses: actions/checkout@v2
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install gettext
pip3 install invoke
invoke install
- name: Test Translations
run: invoke translate
- name: Check Migration Files

View File

@@ -1,160 +0,0 @@
# Build, test and push InvenTree docker image
# This workflow runs under any of the following conditions:
#
# - Push to the master branch
# - Publish release
#
# The following actions are performed:
#
# - Check that the version number matches the current branch or tag
# - Build the InvenTree docker image
# - Run suite of unit tests against the build image
# - Push the compiled, tested image to dockerhub
name: Docker
on:
release:
types: [ published ]
push:
branches:
- 'master'
pull_request:
branches:
- 'master'
jobs:
paths-filter:
name: Filter
runs-on: ubuntu-latest
outputs:
docker: ${{ steps.filter.outputs.docker }}
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1
id: filter
with:
filters: |
docker:
- .github/workflows/docker.yaml
- docker/**
- docker-compose.yml
- docker.dev.env
- Dockerfile
- requirements.txt
- tasks.py
# Build the docker image
build:
needs: paths-filter
if: needs.paths-filter.outputs.docker == 'true' || github.event_name == 'release' || github.event_name == 'push'
permissions:
contents: read
packages: write
id-token: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
python_version: "3.11"
runs-on: ubuntu-latest # in the future we can try to use alternative runners here
steps:
- name: Check out repo
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Set Up Python ${{ env.python_version }}
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
with:
python-version: ${{ env.python_version }}
- name: Version Check
run: |
pip install requests
pip install pyyaml
python3 ci/version_check.py
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
- name: Build Docker Image
# Build the development docker image (using docker-compose.yml)
run: docker-compose build --no-cache
- name: Update Docker Image
run: |
docker-compose run inventree-dev-server invoke update
docker-compose run inventree-dev-server invoke setup-dev
docker-compose up -d
docker-compose run inventree-dev-server invoke wait
- name: Check Data Directory
# The following file structure should have been created by the docker image
run: |
test -d data
test -d data/env
test -d data/pgdb
test -d data/media
test -d data/static
test -d data/plugins
test -f data/config.yaml
test -f data/plugins.txt
test -f data/secret_key.txt
- name: Run Unit Tests
run: |
echo "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" >> docker.dev.env
docker-compose run inventree-dev-server invoke test --disable-pty
docker-compose run inventree-dev-server invoke test --migrations --disable-pty
docker-compose down
- name: Clean up test folder
run: |
rm -rf InvenTree/_testfolder
- name: Set up QEMU
if: github.event_name != 'pull_request'
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # pin@v3.0.0
- name: Set up Docker Buildx
if: github.event_name != 'pull_request'
uses: docker/setup-buildx-action@f95db51fddba0c2d1ec667646a06c2ce06100226 # pin@v3.0.0
- name: Set up cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@11086d25041f77fe8fe7b9ea4e48e3b9192b8f19 # pin@v3.1.2
- name: Login to Dockerhub
if: github.event_name != 'pull_request'
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # pin@v3.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log into registry ghcr.io
if: github.event_name != 'pull_request'
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # pin@v3.0.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract Docker metadata
if: github.event_name != 'pull_request'
id: meta
uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # pin@v5.0.0
with:
images: |
inventree/inventree
ghcr.io/inventree/inventree
- name: Build and Push
id: build-and-push
if: github.event_name != 'pull_request'
uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # pin@v5.0.0
with:
context: .
platforms: linux/amd64,linux/arm64
push: true
sbom: true
provenance: false
target: production
tags: ${{ env.docker_tags }}
build-args: |
commit_hash=${{ env.git_commit_hash }}
commit_date=${{ env.git_commit_date }}
- name: Sign the published image
if: ${{ false }} # github.event_name != 'pull_request'
env:
COSIGN_EXPERIMENTAL: "true"
run: cosign sign ${{ steps.meta.outputs.tags }}@${{ steps.build-and-push.outputs.digest }}

51
.github/workflows/docker_latest.yaml vendored Normal file
View File

@@ -0,0 +1,51 @@
# Build and push latest docker image on push to master branch
name: Docker Build
on:
push:
branches:
- 'master'
jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Check version number
run: |
python3 ci/check_version_number.py --dev
- name: Build Docker Image
run: |
cd docker
docker-compose build
docker-compose run inventree-dev-server invoke update
- name: Run unit tests
run: |
cd docker
docker-compose up -d
docker-compose run inventree-dev-server invoke wait
docker-compose run inventree-dev-server invoke test
docker-compose down
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Dockerhub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and Push
uses: docker/build-push-action@v2
with:
context: ./docker
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
target: production
tags: inventree/inventree:latest
- name: Image Digest
run: echo ${{ steps.docker_build.outputs.digest }}

42
.github/workflows/docker_stable.yaml vendored Normal file
View File

@@ -0,0 +1,42 @@
# Build and push docker image on push to 'stable' branch
# Docker build will be uploaded to dockerhub with the 'inventree:stable' tag
name: Docker Build
on:
push:
branches:
- 'stable'
jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Check version number
run: |
python3 ci/check_version_number.py --release
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Dockerhub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and Push
uses: docker/build-push-action@v2
with:
context: ./docker
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
target: production
build-args:
branch=stable
tags: inventree/inventree:stable
- name: Image Digest
run: echo ${{ steps.docker_build.outputs.digest }}

38
.github/workflows/docker_tag.yaml vendored Normal file
View File

@@ -0,0 +1,38 @@
# Publish docker images to dockerhub on a tagged release
# Docker build will be uploaded to dockerhub with the 'invetree:<tag>' tag
name: Docker Publish
on:
release:
types: [published]
jobs:
publish_image:
name: Push InvenTree web server image to dockerhub
runs-on: ubuntu-latest
steps:
- name: Check out repo
uses: actions/checkout@v2
- name: Check Release tag
run: |
python3 ci/check_version_number.py --release --tag ${{ github.event.release.tag_name }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Dockerhub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and Push
uses: docker/build-push-action@v2
with:
context: ./docker
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
target: production
build-args:
tag=${{ github.event.release.tag_name }}
tags: inventree/inventree:${{ github.event.release.tag_name }}

View File

@@ -1,218 +1,133 @@
# Checks for each PR / push
name: QC
name: QC checks
on:
push:
branches-ignore: [ 'l10*' ]
branches-ignore:
- l10*
pull_request:
branches-ignore: [ 'l10*' ]
branches-ignore:
- l10*
env:
python_version: 3.9
node_version: 16
# The OS version must be set per job
server_start_sleep: 60
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_DB_NAME: inventree
INVENTREE_MEDIA_ROOT: ../test_inventree_media
INVENTREE_STATIC_ROOT: ../test_inventree_static
INVENTREE_BACKUP_DIR: ../test_inventree_backup
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
jobs:
paths-filter:
name: Filter
pep_style:
name: PEP style (python)
runs-on: ubuntu-latest
outputs:
server: ${{ steps.filter.outputs.server }}
migrations: ${{ steps.filter.outputs.migrations }}
frontend: ${{ steps.filter.outputs.frontend }}
api: ${{ steps.filter.outputs.api }}
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50 # pin@v2.11.1
id: filter
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Python ${{ env.python_version }}
uses: actions/setup-python@v2
with:
filters: |
server:
- 'InvenTree/**'
- 'requirements.txt'
- 'requirements-dev.txt'
migrations:
- '**/migrations/**'
- '.github/workflows**'
api:
- 'InvenTree/InvenTree/api_version.py'
frontend:
- 'src/frontend/**'
python-version: ${{ env.python_version }}
cache: 'pip'
- name: Install deps
run: |
pip install flake8==3.8.3
pip install pep8-naming==0.11.1
- name: flake8
run: |
flake8 InvenTree
javascript:
name: Style - Classic UI [JS]
runs-on: ubuntu-20.04
needs: [ 'pre-commit' ]
name: javascript template files
needs: pep_style
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
- name: Checkout Code
uses: actions/checkout@v2
- name: Install node.js ${{ env.node_version }}
uses: actions/setup-node@v2
with:
npm: true
install: true
- name: Check Templated JS Files
node-version: ${{ env.node_version }}
cache: 'npm'
- run: npm install
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: ${{ env.python_version }}
cache: 'pip'
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install gettext
pip3 install invoke
invoke install
invoke static
- name: Check Templated Files
run: |
cd ci
python3 check_js_templates.py
python check_js_templates.py
- name: Lint Javascript Files
run: |
python InvenTree/manage.py prerender
npx eslint InvenTree/InvenTree/static_i18n/i18n/*.js
pre-commit:
name: Style [pre-commit]
runs-on: ubuntu-20.04
needs: paths-filter
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.frontend == 'true'
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Set up Python ${{ env.python_version }}
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
with:
python-version: ${{ env.python_version }}
cache: 'pip'
- name: Run pre-commit Checks
uses: pre-commit/action@646c83fcd040023954eafda54b4db0192ce70507 # pin@v3.0.0
- name: Check Version
run: |
pip install requests
python3 ci/version_check.py
mkdocs:
name: Style [Documentation]
runs-on: ubuntu-20.04
needs: paths-filter
html:
name: html template files
needs: pep_style
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Set up Python ${{ env.python_version }}
uses: actions/setup-python@65d7f2d534ac1bc67fcd62888c5f4f3d2cb2b236 # pin@v4.7.1
uses: actions/checkout@v2
- name: Install node.js ${{ env.node_version }}
uses: actions/setup-node@v2
with:
node-version: ${{ env.node_version }}
cache: 'npm'
- run: npm install
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: ${{ env.python_version }}
- name: Check Config
cache: 'pip'
- name: Install Dependencies
run: |
pip install pyyaml
pip install -r docs/requirements.txt
python docs/ci/check_mkdocs_config.py
- name: Check Links
uses: gaurav-nelson/github-action-markdown-link-check@v1
with:
folder-path: docs
config-file: docs/mlc_config.json
check-modified-files-only: 'yes'
use-quiet-mode: 'yes'
schema:
name: Tests - API Schema Documentation
runs-on: ubuntu-20.04
needs: paths-filter
if: needs.paths-filter.outputs.server == 'true'
env:
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DB_NAME: ../inventree_unit_test_db.sqlite3
INVENTREE_ADMIN_USER: testuser
INVENTREE_ADMIN_PASSWORD: testpassword
INVENTREE_ADMIN_EMAIL: test@test.com
INVENTREE_PYTHON_TEST_SERVER: http://localhost:12345
INVENTREE_PYTHON_TEST_USERNAME: testuser
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
outputs:
version: ${{ steps.version.outputs.version }}
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils
dev-install: true
update: true
- name: Export API Documentation
run: invoke schema --ignore-warnings --filename InvenTree/schema.yml
- name: Upload schema
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # pin@v3.1.3
with:
name: schema.yml
path: InvenTree/schema.yml
- name: Download public schema
if: needs.paths-filter.outputs.api == 'false'
sudo apt-get update
sudo apt-get install gettext
pip3 install invoke
invoke install
invoke static
- name: Check HTML Files
run: |
pip install requests >/dev/null 2>&1
version="$(python3 ci/version_check.py only_version 2>&1)"
echo "Version: $version"
url="https://raw.githubusercontent.com/inventree/schema/main/export/${version}/api.yaml"
echo "URL: $url"
curl -s -o api.yaml $url
echo "Downloaded api.yaml"
- name: Check for differences in API Schema
if: needs.paths-filter.outputs.api == 'false'
run: |
diff --color -u InvenTree/schema.yml api.yaml
diff -u InvenTree/schema.yml api.yaml && echo "no difference in API schema " || exit 2
- name: Check schema - including warnings
run: invoke schema
continue-on-error: true
- name: Extract version for publishing
id: version
if: github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true'
run: |
pip install requests >/dev/null 2>&1
version="$(python3 ci/version_check.py only_version 2>&1)"
echo "Version: $version"
echo "version=$version" >> "$GITHUB_OUTPUT"
schema-push:
name: Push new schema
runs-on: ubuntu-20.04
needs: [paths-filter, schema]
if: needs.schema.result == 'success' && github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true'
env:
version: ${{ needs.schema.outputs.version }}
steps:
- uses: actions/checkout@v4
with:
repository: inventree/schema
token: ${{ secrets.SCHEMA_PAT }}
- name: Download schema artifact
uses: actions/download-artifact@v3
with:
name: schema.yml
- name: Move schema to correct location
run: |
echo "Version: $version"
mkdir export/${version}
mv schema.yml export/${version}/api.yaml
- uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "Update API schema for ${version}"
npx markuplint InvenTree/build/templates/build/*.html
npx markuplint InvenTree/company/templates/company/*.html
npx markuplint InvenTree/order/templates/order/*.html
npx markuplint InvenTree/part/templates/part/*.html
npx markuplint InvenTree/stock/templates/stock/*.html
npx markuplint InvenTree/templates/*.html
npx markuplint InvenTree/templates/InvenTree/*.html
npx markuplint InvenTree/templates/InvenTree/settings/*.html
python:
name: Tests - inventree-python
runs-on: ubuntu-20.04
needs: pre-commit
name: python bindings
needs: pep_style
runs-on: ubuntu-latest
env:
wrapper_name: inventree-python
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DB_NAME: ../inventree_unit_test_db.sqlite3
INVENTREE_MEDIA_ROOT: ../test_inventree_media
INVENTREE_STATIC_ROOT: ../test_inventree_static
INVENTREE_ADMIN_USER: testuser
INVENTREE_ADMIN_PASSWORD: testpassword
INVENTREE_ADMIN_EMAIL: test@test.com
@@ -221,66 +136,79 @@ jobs:
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils
dev-install: true
update: true
npm: true
- name: Download Python Code For `${{ env.wrapper_name }}`
run: git clone --depth 1 https://github.com/inventree/${{ env.wrapper_name }} ./${{ env.wrapper_name }}
- name: Start InvenTree Server
- name: Checkout Code
uses: actions/checkout@v2
- name: Install InvenTree
run: |
sudo apt-get update
sudo apt-get install python3-dev python3-pip python3-venv
pip3 install invoke
invoke install
invoke migrate
- name: Download Python Code
run: |
git clone --depth 1 https://github.com/inventree/${{ env.wrapper_name }} ./${{ env.wrapper_name }}
- name: Start Server
run: |
invoke delete-data -f
invoke import-fixtures
invoke server -a 127.0.0.1:12345 &
invoke wait
- name: Run Tests For `${{ env.wrapper_name }}`
- name: Run Tests
run: |
cd ${{ env.wrapper_name }}
invoke check-server
coverage run -m unittest discover -s test/
coverage:
name: Tests - DB [SQLite] + Coverage
runs-on: ubuntu-20.04
needs: [ 'pre-commit' ]
continue-on-error: true # continue if a step fails so that coverage gets pushed
name: Sqlite / coverage
needs: ['javascript', 'html']
runs-on: ubuntu-latest
env:
INVENTREE_DB_NAME: ./inventree.sqlite
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_PLUGINS_ENABLED: true
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
- name: Checkout Code
uses: actions/checkout@v2
- name: Setup Python ${{ env.python_version }}
uses: actions/setup-python@v2
with:
apt-dependency: gettext poppler-utils
dev-install: true
update: true
- name: Data Export Test
uses: ./.github/actions/migration
python-version: ${{ env.python_version }}
cache: 'pip'
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install gettext
python -m pip install -U pip
pip3 install invoke
invoke update
- name: Coverage Tests
run: |
invoke coverage
- name: Data Import Export
run: |
invoke migrate
invoke import-fixtures
invoke export-records -f data.json
rm inventree.sqlite
invoke migrate
invoke import-records -f data.json
invoke import-records -f data.json
- name: Test Translations
run: invoke translate
- name: Check Migration Files
run: python3 ci/check_migration_files.py
- name: Coverage Tests
run: invoke test --coverage
- name: Upload Coverage Report
uses: coverallsapp/github-action@3dfc5567390f6fa9267c0ee9c251e4c8c3f18949 # pin@v2.2.3
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
run: coveralls
postgres:
name: Tests - DB [PostgreSQL]
runs-on: ubuntu-20.04
needs: [ 'pre-commit' ]
name: Postgres
needs: ['javascript', 'html']
runs-on: ubuntu-latest
if: github.event_name == 'push'
env:
INVENTREE_DB_ENGINE: django.db.backends.postgresql
@@ -294,7 +222,7 @@ jobs:
services:
postgres:
image: postgres:14
image: postgres
env:
POSTGRES_USER: inventree
POSTGRES_PASSWORD: password
@@ -307,24 +235,39 @@ jobs:
- 6379:6379
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
- name: Checkout Code
uses: actions/checkout@v2
- name: Setup Python ${{ env.python_version }}
uses: actions/setup-python@v2
with:
apt-dependency: gettext poppler-utils libpq-dev
pip-dependency: psycopg django-redis>=5.0.0
dev-install: true
update: true
python-version: ${{ env.python_version }}
cache: 'pip'
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install libpq-dev gettext
python -m pip install -U pip
pip3 install invoke
pip3 install psycopg2
pip3 install django-redis>=5.0.0
invoke update
- name: Run Tests
run: invoke test
- name: Data Export Test
uses: ./.github/actions/migration
- name: Data Import Export
run: |
invoke migrate
python3 ./InvenTree/manage.py flush --noinput
invoke import-fixtures
invoke export-records -f data.json
python3 ./InvenTree/manage.py flush --noinput
invoke import-records -f data.json
invoke import-records -f data.json
mysql:
name: Tests - DB [MySQL]
runs-on: ubuntu-20.04
needs: [ 'pre-commit' ]
name: MySql
needs: ['javascript', 'html']
runs-on: ubuntu-latest
if: github.event_name == 'push'
env:
# Database backend configuration
@@ -350,167 +293,29 @@ jobs:
- 3306:3306
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
- name: Checkout Code
uses: actions/checkout@v2
- name: Setup Python ${{ env.python_version }}
uses: actions/setup-python@v2
with:
apt-dependency: gettext poppler-utils libmysqlclient-dev
pip-dependency: mysqlclient
dev-install: true
update: true
python-version: ${{ env.python_version }}
cache: 'pip'
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install libmysqlclient-dev gettext
python -m pip install -U pip
pip3 install invoke
pip3 install mysqlclient
invoke update
- name: Run Tests
run: invoke test
- name: Data Export Test
uses: ./.github/actions/migration
migration-tests:
name: Tests - Migrations [PostgreSQL]
runs-on: ubuntu-latest
needs: paths-filter
if: github.ref == 'refs/heads/master' && needs.paths-filter.outputs.migrations == 'true'
env:
INVENTREE_DB_ENGINE: django.db.backends.postgresql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_USER: inventree
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: '127.0.0.1'
INVENTREE_DB_PORT: 5432
INVENTREE_DEBUG: info
INVENTREE_PLUGINS_ENABLED: false
services:
postgres:
image: postgres:14
env:
POSTGRES_USER: inventree
POSTGRES_PASSWORD: password
ports:
- 5432:5432
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils libpq-dev
pip-dependency: psycopg
dev-install: true
update: true
- name: Run Tests
run: invoke test --migrations --report
migrations-checks:
name: Tests - Full Migration [SQLite]
runs-on: ubuntu-latest
needs: paths-filter
if: github.ref == 'refs/heads/master' && needs.paths-filter.outputs.migrations == 'true'
env:
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_DB_NAME: /home/runner/work/InvenTree/db.sqlite3
INVENTREE_DEBUG: info
INVENTREE_PLUGINS_ENABLED: false
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
name: Checkout Code
- name: Environment Setup
uses: ./.github/actions/setup
with:
install: true
- name: Fetch Database
run: git clone --depth 1 https://github.com/inventree/test-db ./test-db
- name: Latest Database
- name: Data Import Export
run: |
cp test-db/latest.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
- name: 0.10.0 Database
run: |
rm /home/runner/work/InvenTree/db.sqlite3
cp test-db/stable_0.10.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
- name: 0.11.0 Database
run: |
rm /home/runner/work/InvenTree/db.sqlite3
cp test-db/stable_0.11.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
- name: 0.12.0 Database
run: |
rm /home/runner/work/InvenTree/db.sqlite3
cp test-db/stable_0.12.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
- name: 0.13.5 Database
run: |
rm /home/runner/work/InvenTree/db.sqlite3
cp test-db/stable_0.13.5.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
platform_ui:
name: Tests - Platform UI
runs-on: ubuntu-20.04
timeout-minutes: 60
needs: [ 'pre-commit', 'paths-filter' ]
if: needs.paths-filter.outputs.frontend == 'true'
env:
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_DB_NAME: /home/runner/work/InvenTree/db.sqlite3
INVENTREE_DEBUG: True
INVENTREE_PLUGINS_ENABLED: false
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
with:
npm: true
install: true
update: true
- name: Set up test data
run: invoke setup-test -i
- name: Install dependencies
run: inv frontend-compile
- name: Install Playwright Browsers
run: cd src/frontend && npx playwright install --with-deps
- name: Run Playwright tests
run: cd src/frontend && npx playwright test
- uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # pin@v3.1.3
if: always()
with:
name: playwright-report
path: src/frontend/playwright-report/
retention-days: 30
platform_ui_build:
name: Build - UI Platform
runs-on: ubuntu-20.04
timeout-minutes: 60
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
with:
npm: true
- name: Install dependencies
run: cd src/frontend && yarn install
- name: Build frontend
run: cd src/frontend && npm run build
- name: Zip frontend
run: |
cd InvenTree/web/static
zip -r frontend-build.zip web/
- uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # pin@v3.1.3
with:
name: frontend-build
path: InvenTree/web/static/web
python3 ./InvenTree/manage.py flush --noinput
invoke import-fixtures
invoke export-records -f data.json
python3 ./InvenTree/manage.py flush --noinput
invoke import-records -f data.json
invoke import-records -f data.json

View File

@@ -1,51 +0,0 @@
# Runs on releases
name: Publish release notes
on:
release:
types: [ published ]
jobs:
stable:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout Code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Version Check
run: |
pip install requests
python3 ci/version_check.py
- name: Push to Stable Branch
uses: ad-m/github-push-action@d91a481090679876dfc4178fef17f286781251df # pin@v0.8.0
if: env.stable_release == 'true'
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: stable
force: true
publish-build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
with:
npm: true
- name: Install dependencies
run: cd src/frontend && yarn install
- name: Build frontend
run: cd src/frontend && npm run build
- name: Zip frontend
run: |
cd InvenTree/web/static/web
zip -r ../frontend-build.zip *
- uses: svenstaro/upload-release-action@1beeb572c19a9242f4361f4cee78f8e0d9aec5df # pin@2.7.0
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: InvenTree/web/static/frontend-build.zip
asset_name: frontend-build.zip
tag: ${{ github.ref }}
overwrite: true

View File

@@ -3,7 +3,7 @@ name: Mark stale issues and pull requests
on:
schedule:
- cron: '24 11 * * *'
- cron: '24 11 * * *'
jobs:
stale:
@@ -14,12 +14,12 @@ jobs:
pull-requests: write
steps:
- uses: actions/stale@1160a2240286f5da8ec72b1c0816ce2481aabf84 # pin@v8.0.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: 'This issue seems stale. Please react to show this is still important.'
stale-pr-message: 'This PR seems stale. Please react to show this is still important.'
stale-issue-label: 'inactive'
stale-pr-label: 'inactive'
start-date: '2022-01-01'
exempt-all-milestones: true
- uses: actions/stale@v3
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: 'This issue seems stale. Please react to show this is still important.'
stale-pr-message: 'This PR seems stale. Please react to show this is still important.'
stale-issue-label: 'inactive'
stale-pr-label: 'inactive'
start-date: '2022-01-01'
exempt-all-milestones: true

View File

@@ -5,10 +5,6 @@ on:
branches:
- master
env:
python_version: 3.9
node_version: 16
jobs:
build:
@@ -21,19 +17,23 @@ jobs:
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
INVENTREE_BACKUP_DIR: ./backup
steps:
- name: Checkout Code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Environment Setup
uses: ./.github/actions/setup
uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v1
with:
install: true
npm: true
apt-dependency: gettext
python-version: 3.9
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install -y gettext
pip3 install invoke
invoke install
- name: Make Translations
run: invoke translate
run: |
invoke translate
- name: Commit files
run: |
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
@@ -42,7 +42,7 @@ jobs:
git add "*.po"
git commit -m "updated translation base"
- name: Push changes
uses: ad-m/github-push-action@d91a481090679876dfc4178fef17f286781251df # pin@v0.8.0
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: l10

View File

@@ -1,22 +0,0 @@
name: Update dependency files regularly
on:
workflow_dispatch: null
schedule:
- cron: "0 0 * * *"
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Setup
run: pip install -r requirements-dev.txt
- name: Update requirements.txt
run: pip-compile --output-file=requirements.txt requirements.in -U
- name: Update requirements-dev.txt
run: pip-compile --generate-hashes --output-file=requirements-dev.txt requirements-dev.in -U
- uses: stefanzweifel/git-auto-commit-action@fd157da78fa13d9383e5580d1fd1184d89554b51 # pin@v4.15.1
with:
commit_message: "[Bot] Updated dependency"
branch: dep-update

21
.github/workflows/version.yml vendored Normal file
View File

@@ -0,0 +1,21 @@
# Checks version number
name: version number
on:
pull_request:
branches-ignore:
- l10*
jobs:
check_version:
name: version number
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Check version number
run: |
python3 ci/check_version_number.py --branch ${{ github.base_ref }}

25
.github/workflows/welcome.yml vendored Normal file
View File

@@ -0,0 +1,25 @@
# welcome new contributers
name: Welcome
on:
pull_request:
types: [opened]
issues:
types: [opened]
jobs:
run:
runs-on: ubuntu-latest
permissions:
pull-requests: write
steps:
- uses: actions/first-interaction@v1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
issue-message: |
Welcome to InvenTree! Please check the [contributing docs](https://inventree.readthedocs.io/en/latest/contribute/) on how to help.
If you experience setup / install issues please read all [install docs]( https://inventree.readthedocs.io/en/latest/start/intro/).
pr-message: |
This is your first PR, welcome!
Please check [Contributing](https://github.com/inventree/InvenTree/blob/master/CONTRIBUTING.md) to make sure your submission fits our general code-style and workflow.
Make sure to document why this PR is needed and to link connected issues so we can review it faster.

36
.gitignore vendored
View File

@@ -8,7 +8,6 @@ __pycache__/
env/
inventree-env/
./build/
.cache/
develop-eggs/
dist/
bin/
@@ -26,7 +25,7 @@ var/
*.egg-info/
.installed.cfg
*.egg
*.DS_Store
# Django stuff:
*.log
@@ -37,8 +36,11 @@ local_settings.py
*.old
# Files used for testing
inventree-demo-dataset/
inventree-data/
dummy_image.*
_tmp.csv
# Sphinx files
docs/_build
# Local static and media file storage (only when running in development mode)
inventree_media
@@ -60,16 +62,7 @@ secret_key.txt
# IDE / development files
.idea/
*.code-workspace
.bash_history
.DS_Store
# https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
.vscode/*
#!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
#!.vscode/extensions.json
#!.vscode/*.code-snippets
.vscode/
# Coverage reports
.coverage
@@ -80,8 +73,6 @@ js_tmp/
# Development files
dev/
data/
env/
# Locale stats file
locale_stats.json
@@ -93,18 +84,7 @@ node_modules/
maintenance_mode_state.txt
# plugin dev directory
InvenTree/plugins/
plugins/
# Compiled translation files
*.mo
messages.ts
# Generated API schema file
api.yaml
# web frontend (static files)
InvenTree/web/static
# Generated docs files
docs/docs/api/*.yml
docs/docs/api/schema/*.yml

47
.gitpod.yml Normal file
View File

@@ -0,0 +1,47 @@
tasks:
- name: Setup django
before: |
export INVENTREE_DB_ENGINE='sqlite3'
export INVENTREE_DB_NAME='/workspace/InvenTree/dev/database.sqlite3'
export INVENTREE_MEDIA_ROOT='/workspace/InvenTree/inventree-data/media'
export INVENTREE_STATIC_ROOT='/workspace/InvenTree/dev/static'
export PIP_USER='no'
python3 -m venv venv
source venv/bin/activate
pip install invoke
inv install
mkdir dev
inv update
gp sync-done setup_server
- name: Start server
init: gp sync-await setup_server
command: |
gp sync-await setup_server
export INVENTREE_DB_ENGINE='sqlite3'
export INVENTREE_DB_NAME='/workspace/InvenTree/dev/database.sqlite3'
export INVENTREE_MEDIA_ROOT='/workspace/InvenTree/inventree-data/media'
export INVENTREE_STATIC_ROOT='/workspace/InvenTree/dev/static'
source venv/bin/activate
rm /workspace/InvenTree/inventree-data -r
git clone https://github.com/inventree/demo-dataset /workspace/InvenTree/inventree-data
invoke delete-data -f
invoke import-records -f /workspace/InvenTree/inventree-data/inventree_data.json
inv server
# List the ports to expose. Learn more https://www.gitpod.io/docs/config-ports/
ports:
- port: 8000
onOpen: open-preview
github:
prebuilds:
master: true
pullRequests: false
pullRequestsFromForks: true
addBadge: true
addLabel: gitpod-ready
addCheck: false

View File

@@ -1,38 +0,0 @@
name: inventree
description: Open Source Inventory Management System
homepage: https://inventree.org
notifications: true
buildpack: https://github.com/mjmair/heroku-buildpack-python#v216-mjmair
env:
- STACK=heroku-20
- DISABLE_COLLECTSTATIC=1
- INVENTREE_DB_ENGINE=sqlite3
- INVENTREE_DB_NAME=database.sqlite3
- INVENTREE_PLUGINS_ENABLED
- INVENTREE_MEDIA_ROOT=/opt/inventree/media
- INVENTREE_STATIC_ROOT=/opt/inventree/static
- INVENTREE_BACKUP_DIR=/opt/inventree/backup
- INVENTREE_PLUGIN_FILE=/opt/inventree/plugins.txt
- INVENTREE_CONFIG_FILE=/opt/inventree/config.yaml
after_install: contrib/packager.io/postinstall.sh
before:
- contrib/packager.io/before.sh
dependencies:
- curl
- "python3.9 | python3.10 | python3.11"
- "python3.9-venv | python3.10-venv | python3.11-venv"
- "python3.9-dev | python3.10-dev | python3.11-dev"
- python3-pip
- python3-cffi
- python3-brotli
- python3-wheel
- libpango-1.0-0
- libharfbuzz0b
- libpangoft2-1.0-0
- gettext
- nginx
- jq
- libffi7
targets:
ubuntu-20.04: true
debian-11: true

View File

@@ -1,73 +1,19 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
exclude: |
(?x)^(
InvenTree/InvenTree/static/.*|
InvenTree/locale/.*|
src/frontend/src/locales/.*|
.*/migrations/.*
)$
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v4.2.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: mixed-line-ending
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.2
- repo: https://github.com/pycqa/flake8
rev: '4.0.1'
hooks:
- id: ruff-format
args: [--preview]
- id: ruff
args: [
--fix,
--preview
]
- repo: https://github.com/matmair/ruff-pre-commit
rev: 830893bf46db844d9c99b6c468e285199adf2de6 # uv-018
- id: flake8
- repo: https://github.com/pycqa/isort
rev: '5.10.1'
hooks:
- id: pip-compile
name: pip-compile requirements-dev.in
args: [requirements-dev.in, -o, requirements-dev.txt, --python-version=3.9]
files: ^requirements-dev\.(in|txt)$
- id: pip-compile
name: pip-compile requirements.txt
args: [requirements.in, -o, requirements.txt,--python-version=3.9]
files: ^requirements\.(in|txt)$
- repo: https://github.com/Riverside-Healthcare/djLint
rev: v1.34.1
hooks:
- id: djlint-django
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6
hooks:
- id: codespell
exclude: >
(?x)^(
docs/docs/stylesheets/.*|
docs/docs/javascripts/.*|
docs/docs/webfonts/.* |
src/frontend/src/locales/.* |
)$
- repo: https://github.com/pre-commit/mirrors-prettier
rev: "v4.0.0-alpha.8"
hooks:
- id: prettier
files: ^src/frontend/.*\.(js|jsx|ts|tsx)$
additional_dependencies:
- "prettier@^2.4.1"
- "@trivago/prettier-plugin-sort-imports"
- repo: https://github.com/pre-commit/mirrors-eslint
rev: "v9.0.0-beta.0"
hooks:
- id: eslint
additional_dependencies:
- eslint@^8.41.0
- eslint-config-google@^0.14.0
- eslint-plugin-react@6.10.3
- babel-eslint@6.1.2
- "@typescript-eslint/eslint-plugin@latest"
- "@typescript-eslint/parser"
files: ^src/frontend/.*\.(js|jsx|ts|tsx)$
- id: isort

33
.vscode/launch.json vendored
View File

@@ -1,33 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "InvenTree Server",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/InvenTree/manage.py",
"args": ["runserver"],
"django": true,
"justMyCode": true
},
{
"name": "InvenTree Server - 3rd party",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/InvenTree/manage.py",
"args": ["runserver"],
"django": true,
"justMyCode": false
},
{
"name": "InvenTree Frontend - Vite",
"type": "chrome",
"request": "launch",
"url": "http://localhost:5173",
"webRoot": "${workspaceFolder}/src/frontend"
}
]
}

70
.vscode/tasks.json vendored
View File

@@ -1,70 +0,0 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
// the problemMatchers should prevent vscode from asking how it should check the output
"version": "2.0.0",
"tasks": [
{
"label": "worker",
"type": "shell",
"command": "inv worker",
"problemMatcher": [],
},
{
"label": "clean-settings",
"type": "shell",
"command": "inv clean-settings",
"problemMatcher": [],
},
{
"label": "delete-data",
"type": "shell",
"command": "inv delete-data",
"problemMatcher": [],
},
{
"label": "migrate",
"type": "shell",
"command": "inv migrate",
"problemMatcher": [],
},
{
"label": "server",
"type": "shell",
"command": "inv server",
"problemMatcher": [],
},
{
"label": "setup-dev",
"type": "shell",
"command": "inv setup-dev",
"problemMatcher": [],
},
{
"label": "setup-test",
"type": "shell",
"command": "inv setup-test -i --path dev/inventree-demo-dataset",
"problemMatcher": [],
},
{
"label": "superuser",
"type": "shell",
"command": "inv superuser",
"problemMatcher": [],
},
{
"label": "test",
"type": "shell",
"command": "inv test",
"problemMatcher": [],
},
{
"label": "update",
"type": "shell",
"command": "inv update",
"problemMatcher": [],
},
]
}

View File

@@ -1,6 +1,168 @@
### Contributing to InvenTree
Please read the contribution guidelines below, before submitting your first pull request to the InvenTree codebase.
Hi there, thank you for your interest in contributing!
Please read our contribution guidelines, before submitting your first pull request to the InvenTree codebase.
## Setup
Refer to our [contribution guidelines](https://docs.inventree.org/en/latest/develop/contributing/) for more information!
Please run `invoke setup_dev` in the root directory of your InvenTree code base to set up your development setup before starting to contribute. This will install and set up pre-commit to run some checks before each commit and help reduce the style errors.
## Branches and Versioning
InvenTree roughly follow the [GitLab flow](https://docs.gitlab.com/ee/topics/gitlab_flow.html) branching style, to allow simple management of multiple tagged releases, short-lived branches, and development on the main branch.
### Version Numbering
InvenTree version numbering follows the [semantic versioning](https://semver.org/) specification.
### Master Branch
The HEAD of the "main" or "master" branch of InvenTree represents the current "latest" state of code development.
- All feature branches are merged into master
- All bug fixes are merged into master
**No pushing to master:** New featues must be submitted as a pull request from a separate branch (one branch per feature).
### Feature Branches
Feature branches should be branched *from* the *master* branch.
- One major feature per branch / pull request
- Feature pull requests are merged back *into* the master branch
- Features *may* also be merged into a release candidate branch
### Stable Branch
The HEAD of the "stable" branch represents the latest stable release code.
- Versioned releases are merged into the "stable" branch
- Bug fix branches are made *from* the "stable" branch
#### Release Candidate Branches
- Release candidate branches are made from master, and merged into stable.
- RC branches are targetted at a major/minor version e.g. "0.5"
- When a release candidate branch is merged into *stable*, the release is tagged
#### Bugfix Branches
- If a bug is discovered in a tagged release version of InvenTree, a "bugfix" or "hotfix" branch should be made *from* that tagged release
- When approved, the branch is merged back *into* stable, with an incremented PATCH number (e.g. 0.4.1 -> 0.4.2)
- The bugfix *must* also be cherry picked into the *master* branch.
## Environment
### Target version
We are currently targeting:
| Name | Minimum version |
|---|---|
| Python | 3.9 |
| Django | 3.2 |
### Auto creating updates
The following tools can be used to auto-upgrade syntax that was depreciated in new versions:
```bash
pip install pyupgrade
pip install django-upgrade
```
To update the codebase run the following script.
```bash
pyupgrade `find . -name "*.py"`
django-upgrade --target-version 3.2 `find . -name "*.py"`
```
## Credits
If you add any new dependencies / libraries, they need to be added to [the docs](https://github.com/inventree/inventree-docs/blob/master/docs/credits.md). Please try to do that as timely as possible.
## Migration Files
Any required migration files **must** be included in the commit, or the pull-request will be rejected. If you change the underlying database schema, make sure you run `invoke migrate` and commit the migration files before submitting the PR.
*Note: A github action checks for unstaged migration files and will reject the PR if it finds any!*
## Unit Testing
Any new code should be covered by unit tests - a submitted PR may not be accepted if the code coverage for any new features is insufficient, or the overall code coverage is decreased.
The InvenTree code base makes use of [GitHub actions](https://github.com/features/actions) to run a suite of automated tests against the code base every time a new pull request is received. These actions include (but are not limited to):
- Checking Python and Javascript code against standard style guides
- Running unit test suite
- Automated building and pushing of docker images
- Generating translation files
The various github actions can be found in the `./github/workflows` directory
## Code Style
Sumbitted Python code is automatically checked against PEP style guidelines. Locally you can run `invoke style` to ensure the style checks will pass, before submitting the PR.
Please write docstrings for each function and class - we follow the [google doc-style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings) for python. Docstrings for general javascript code is encouraged! Docstyles are checked by `invoke style`.
## Documentation
New features or updates to existing features should be accompanied by user documentation. A PR with associated documentation should link to the matching PR at https://github.com/inventree/inventree-docs/
## Translations
Any user-facing strings *must* be passed through the translation engine.
- InvenTree code is written in English
- User translatable strings are provided in English as the primary language
- Secondary language translations are provided [via Crowdin](https://crowdin.com/project/inventree)
*Note: Translation files are updated via GitHub actions - you do not need to compile translations files before submitting a pull request!*
### Python Code
For strings exposed via Python code, use the following format:
```python
from django.utils.translation import ugettext_lazy as _
user_facing_string = _('This string will be exposed to the translation engine!')
```
### Templated Strings
HTML and javascript files are passed through the django templating engine. Translatable strings are implemented as follows:
```html
{% load i18n %}
<span>{% trans "This string will be translated" %} - this string will not!</span>
```
## Github use
### Tags
The tags describe issues and PRs in multiple areas:
| Area | Name | Description |
|---|---|---|
| Type Labels | | |
| | bug | Identifies a bug which needs to be addressed |
| | dependency | Relates to a project dependency |
| | duplicate | Duplicate of another issue or PR |
| | enhancement | This is an suggested enhancement or new feature |
| | help wanted | Assistance required |
| | invalid | This issue or PR is considered invalid |
| | inactive | Indicates lack of activity |
| | question | This is a question |
| | roadmap | This is a roadmap feature with no immediate plans for implementation |
| | security | Relates to a security issue |
| | starter | Good issue for a developer new to the project |
| | wontfix | No work will be done against this issue or PR |
| Feature Labels | | |
| | API | Relates to the API |
| | barcode | Barcode scanning and integration |
| | build | Build orders |
| | importer | Data importing and processing |
| | order | Purchase order and sales orders |
| | part | Parts |
| | plugin | Plugin ecosystem |
| | pricing | Pricing functionality |
| | report | Report generation |
| | stock | Stock item management |
| | user interface | User interface |
| Ecosystem Labels | | |
| | demo | Relates to the InvenTree demo server or dataset |
| | docker | Docker / docker-compose |
| | CI | CI / unit testing ecosystem |
| | setup | Relates to the InvenTree setup / installation process |

View File

@@ -1,166 +0,0 @@
# The InvenTree dockerfile provides two build targets:
#
# production:
# - Required files are copied into the image
# - Runs InvenTree web server under gunicorn
#
# dev:
# - Expects source directories to be loaded as a run-time volume
# - Runs InvenTree web server under django development server
# - Monitors source files for any changes, and live-reloads server
ARG base_image=python:3.11-alpine3.18
FROM ${base_image} as inventree_base
# Build arguments for this image
ARG commit_tag=""
ARG commit_hash=""
ARG commit_date=""
ARG data_dir="data"
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK 1
ENV INVOKE_RUN_SHELL="/bin/ash"
ENV INVENTREE_LOG_LEVEL="WARNING"
ENV INVENTREE_DOCKER="true"
# InvenTree paths
ENV INVENTREE_HOME="/home/inventree"
ENV INVENTREE_MNG_DIR="${INVENTREE_HOME}/InvenTree"
ENV INVENTREE_DATA_DIR="${INVENTREE_HOME}/${data_dir}"
ENV INVENTREE_STATIC_ROOT="${INVENTREE_DATA_DIR}/static"
ENV INVENTREE_MEDIA_ROOT="${INVENTREE_DATA_DIR}/media"
ENV INVENTREE_BACKUP_DIR="${INVENTREE_DATA_DIR}/backup"
ENV INVENTREE_PLUGIN_DIR="${INVENTREE_DATA_DIR}/plugins"
# InvenTree configuration files
ENV INVENTREE_CONFIG_FILE="${INVENTREE_DATA_DIR}/config.yaml"
ENV INVENTREE_SECRET_KEY_FILE="${INVENTREE_DATA_DIR}/secret_key.txt"
ENV INVENTREE_PLUGIN_FILE="${INVENTREE_DATA_DIR}/plugins.txt"
# Worker configuration (can be altered by user)
ENV INVENTREE_GUNICORN_WORKERS="4"
ENV INVENTREE_BACKGROUND_WORKERS="4"
# Default web server address:port
ENV INVENTREE_WEB_ADDR=0.0.0.0
ENV INVENTREE_WEB_PORT=8000
ENV VIRTUAL_ENV=/usr/local
LABEL org.label-schema.schema-version="1.0" \
org.label-schema.build-date=${DATE} \
org.label-schema.vendor="inventree" \
org.label-schema.name="inventree/inventree" \
org.label-schema.url="https://hub.docker.com/r/inventree/inventree" \
org.label-schema.vcs-url="https://github.com/inventree/InvenTree.git" \
org.label-schema.vcs-ref=${commit_tag}
# Install required system level packages
RUN apk add --no-cache \
git gettext py-cryptography \
# Image format support
libjpeg libwebp zlib \
# Weasyprint requirements : https://doc.courtbouillon.org/weasyprint/stable/first_steps.html#alpine-3-12
py3-pip py3-pillow py3-cffi py3-brotli pango poppler-utils openldap \
# Core database packages
postgresql13-client && \
# fonts
apk --update --upgrade --no-cache add fontconfig ttf-freefont font-noto terminus-font && fc-cache -f
EXPOSE 8000
RUN mkdir -p ${INVENTREE_HOME}
WORKDIR ${INVENTREE_HOME}
COPY ./docker/requirements.txt base_requirements.txt
COPY ./requirements.txt ./
COPY ./docker/install_build_packages.sh .
RUN chmod +x install_build_packages.sh
# For ARMv7 architecture, add the piwheels repo (for cryptography library)
# Otherwise, we have to build from source, which is difficult
# Ref: https://github.com/inventree/InvenTree/pull/4598
RUN if [ `apk --print-arch` = "armv7" ]; then \
printf "[global]\nextra-index-url=https://www.piwheels.org/simple\n" > /etc/pip.conf ; \
fi
COPY tasks.py docker/gunicorn.conf.py docker/init.sh ./
RUN chmod +x init.sh
ENTRYPOINT ["/bin/ash", "./init.sh"]
FROM inventree_base as prebuild
ENV PATH=/root/.local/bin:$PATH
RUN ./install_build_packages.sh --no-cache --virtual .build-deps && \
pip install --user uv --no-cache-dir && pip install -r base_requirements.txt -r requirements.txt --no-cache && \
apk --purge del .build-deps
# Frontend builder image:
FROM prebuild as frontend
RUN apk add --no-cache --update nodejs npm && npm install -g yarn
RUN yarn config set network-timeout 600000 -g
COPY InvenTree ${INVENTREE_HOME}/InvenTree
COPY src ${INVENTREE_HOME}/src
COPY tasks.py ${INVENTREE_HOME}/tasks.py
RUN cd ${INVENTREE_HOME}/InvenTree && inv frontend-compile
# InvenTree production image:
# - Copies required files from local directory
# - Starts a gunicorn webserver
FROM inventree_base as production
ENV INVENTREE_DEBUG=False
# As .git directory is not available in production image, we pass the commit information via ENV
ENV INVENTREE_COMMIT_HASH="${commit_hash}"
ENV INVENTREE_COMMIT_DATE="${commit_date}"
# use dependencies and compiled wheels from the prebuild image
ENV PATH=/root/.local/bin:$PATH
COPY --from=prebuild /root/.local /root/.local
# Copy source code
COPY InvenTree ./InvenTree
COPY --from=frontend ${INVENTREE_HOME}/InvenTree/web/static/web ./InvenTree/web/static/web
# Launch the production server
# TODO: Work out why environment variables cannot be interpolated in this command
# TODO: e.g. -b ${INVENTREE_WEB_ADDR}:${INVENTREE_WEB_PORT} fails here
CMD gunicorn -c ./gunicorn.conf.py InvenTree.wsgi -b 0.0.0.0:8000 --chdir ./InvenTree
FROM inventree_base as dev
# Vite server (for local frontend development)
EXPOSE 5173
# Install packages required for building python packages
RUN ./install_build_packages.sh
RUN pip install uv --no-cache-dir && pip install -r base_requirements.txt --no-cache
# Install nodejs / npm / yarn
RUN apk add --no-cache --update nodejs npm && npm install -g yarn
RUN yarn config set network-timeout 600000 -g
# The development image requires the source code to be mounted to /home/inventree/
# So from here, we don't actually "do" anything, apart from some file management
ENV INVENTREE_DEBUG=True
# Location for python virtual environment
# If the INVENTREE_PY_ENV variable is set, the entrypoint script will use it!
ENV INVENTREE_PY_ENV="${INVENTREE_DATA_DIR}/env"
WORKDIR ${INVENTREE_HOME}
# Entrypoint ensures that we are running in the python virtual environment
ENTRYPOINT ["/bin/ash", "./docker/init.sh"]
# Launch the development server
CMD ["invoke", "server", "-a", "${INVENTREE_WEB_ADDR}:${INVENTREE_WEB_PORT}"]

View File

@@ -1,4 +1,5 @@
"""The InvenTree module provides high-level management and functionality.
"""
The InvenTree module provides high-level management and functionality.
It provides a number of helper functions and generic classes which are used by InvenTree apps.
"""

View File

@@ -1,61 +1,17 @@
"""Admin classes."""
"""Admin classes"""
from django.contrib import admin
from django.db.models.fields import CharField
from django.http.request import HttpRequest
from djmoney.contrib.exchange.admin import RateAdmin
from djmoney.contrib.exchange.models import Rate
from import_export.exceptions import ImportExportError
from import_export.resources import ModelResource
class InvenTreeResource(ModelResource):
"""Custom subclass of the ModelResource class provided by django-import-export".
"""Custom subclass of the ModelResource class provided by django-import-export"
Ensures that exported data are escaped to prevent malicious formula injection.
Ref: https://owasp.org/www-community/attacks/CSV_Injection
"""
MAX_IMPORT_ROWS = 1000
MAX_IMPORT_COLS = 100
# List of fields which should be converted to empty strings if they are null
CONVERT_NULL_FIELDS = []
def import_data_inner(
self,
dataset,
dry_run,
raise_errors,
using_transactions,
collect_failed_rows,
rollback_on_validation_errors=None,
**kwargs,
):
"""Override the default import_data_inner function to provide better error handling."""
if len(dataset) > self.MAX_IMPORT_ROWS:
raise ImportExportError(
f'Dataset contains too many rows (max {self.MAX_IMPORT_ROWS})'
)
if len(dataset.headers) > self.MAX_IMPORT_COLS:
raise ImportExportError(
f'Dataset contains too many columns (max {self.MAX_IMPORT_COLS})'
)
return super().import_data_inner(
dataset,
dry_run,
raise_errors,
using_transactions,
collect_failed_rows,
rollback_on_validation_errors=rollback_on_validation_errors,
**kwargs,
)
def export_resource(self, obj):
"""Custom function to override default row export behavior.
"""Custom function to override default row export behaviour.
Specifically, strip illegal leading characters to prevent formula injection
"""
@@ -75,39 +31,3 @@ class InvenTreeResource(ModelResource):
row[idx] = val
return row
def get_fields(self, **kwargs):
"""Return fields, with some common exclusions."""
fields = super().get_fields(**kwargs)
fields_to_exclude = ['metadata', 'lft', 'rght', 'tree_id', 'level']
return [f for f in fields if f.column_name not in fields_to_exclude]
def before_import_row(self, row, row_number=None, **kwargs):
"""Run custom code before importing each row.
- Convert any null fields to empty strings, for fields which do not support null values
"""
# We can automatically determine which fields might need such a conversion
for field in self.Meta.model._meta.fields:
if isinstance(field, CharField):
if field.blank and not field.null:
if field.name not in self.CONVERT_NULL_FIELDS:
self.CONVERT_NULL_FIELDS.append(field.name)
for field in self.CONVERT_NULL_FIELDS:
if field in row and row[field] is None:
row[field] = ''
class CustomRateAdmin(RateAdmin):
"""Admin interface for the Rate class."""
def has_add_permission(self, request: HttpRequest) -> bool:
"""Disable the 'add' permission for Rate objects."""
return False
admin.site.unregister(Rate)
admin.site.register(Rate, CustomRateAdmin)

View File

@@ -1,316 +1,61 @@
"""Main JSON interface views."""
import sys
"""
Main JSON interface views
"""
from django.conf import settings
from django.db import transaction
from django.http import JsonResponse
from django.utils.translation import gettext_lazy as _
from django_q.models import OrmQ
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework import permissions, serializers
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
from rest_framework.serializers import ValidationError
from rest_framework.views import APIView
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters, permissions
import InvenTree.version
import users.models
from InvenTree.filters import SEARCH_ORDER_FILTER
from InvenTree.mixins import ListCreateAPI
from InvenTree.permissions import RolePermission
from InvenTree.templatetags.inventree_extras import plugins_info
from part.models import Part
from plugin.serializers import MetadataSerializer
from users.models import ApiToken
from .email import is_email_configured
from .mixins import ListAPI, RetrieveUpdateAPI
from .status import check_system_health, is_worker_running
from .version import inventreeApiText
from .status import is_worker_running
from .version import (inventreeApiVersion, inventreeInstanceName,
inventreeVersion)
from .views import AjaxView
class VersionViewSerializer(serializers.Serializer):
"""Serializer for a single version."""
class VersionSerializer(serializers.Serializer):
"""Serializer for server version."""
server = serializers.CharField()
api = serializers.IntegerField()
commit_hash = serializers.CharField()
commit_date = serializers.CharField()
commit_branch = serializers.CharField()
python = serializers.CharField()
django = serializers.CharField()
class LinkSerializer(serializers.Serializer):
"""Serializer for all possible links."""
doc = serializers.URLField()
code = serializers.URLField()
credit = serializers.URLField()
app = serializers.URLField()
bug = serializers.URLField()
dev = serializers.BooleanField()
up_to_date = serializers.BooleanField()
version = VersionSerializer()
links = LinkSerializer()
class VersionView(APIView):
"""Simple JSON endpoint for InvenTree version information."""
permission_classes = [permissions.IsAdminUser]
@extend_schema(responses={200: OpenApiResponse(response=VersionViewSerializer)})
def get(self, request, *args, **kwargs):
"""Return information about the InvenTree server."""
return JsonResponse({
'dev': InvenTree.version.isInvenTreeDevelopmentVersion(),
'up_to_date': InvenTree.version.isInvenTreeUpToDate(),
'version': {
'server': InvenTree.version.inventreeVersion(),
'api': InvenTree.version.inventreeApiVersion(),
'commit_hash': InvenTree.version.inventreeCommitHash(),
'commit_date': InvenTree.version.inventreeCommitDate(),
'commit_branch': InvenTree.version.inventreeBranch(),
'python': InvenTree.version.inventreePythonVersion(),
'django': InvenTree.version.inventreeDjangoVersion(),
},
'links': {
'doc': InvenTree.version.inventreeDocUrl(),
'code': InvenTree.version.inventreeGithubUrl(),
'credit': InvenTree.version.inventreeCreditsUrl(),
'app': InvenTree.version.inventreeAppUrl(),
'bug': f'{InvenTree.version.inventreeGithubUrl()}issues',
},
})
class VersionSerializer(serializers.Serializer):
"""Serializer for a single version."""
version = serializers.CharField()
date = serializers.CharField()
gh = serializers.CharField()
text = serializers.CharField()
latest = serializers.BooleanField()
class Meta:
"""Meta class for VersionSerializer."""
fields = ['version', 'date', 'gh', 'text', 'latest']
class VersionApiSerializer(serializers.Serializer):
"""Serializer for the version api endpoint."""
VersionSerializer(many=True)
class VersionTextView(ListAPI):
"""Simple JSON endpoint for InvenTree version text."""
serializer_class = VersionSerializer
permission_classes = [permissions.IsAdminUser]
@extend_schema(responses={200: OpenApiResponse(response=VersionApiSerializer)})
def list(self, request, *args, **kwargs):
"""Return information about the InvenTree server."""
return JsonResponse(inventreeApiText())
class InfoView(AjaxView):
"""Simple JSON endpoint for InvenTree information.
""" Simple JSON endpoint for InvenTree information.
Use to confirm that the server is running, etc.
"""
permission_classes = [permissions.AllowAny]
def worker_pending_tasks(self):
"""Return the current number of outstanding background tasks."""
return OrmQ.objects.count()
def get(self, request, *args, **kwargs):
"""Serve current server information."""
is_staff = request.user.is_staff
if not is_staff and request.user.is_anonymous:
# Might be Token auth - check if so
is_staff = self.check_auth_header(request)
data = {
'server': 'InvenTree',
'version': InvenTree.version.inventreeVersion(),
'instance': InvenTree.version.inventreeInstanceName(),
'apiVersion': InvenTree.version.inventreeApiVersion(),
'version': inventreeVersion(),
'instance': inventreeInstanceName(),
'apiVersion': inventreeApiVersion(),
'worker_running': is_worker_running(),
'worker_pending_tasks': self.worker_pending_tasks(),
'plugins_enabled': settings.PLUGINS_ENABLED,
'plugins_install_disabled': settings.PLUGINS_INSTALL_DISABLED,
'active_plugins': plugins_info(),
'email_configured': is_email_configured(),
'debug_mode': settings.DEBUG,
'docker_mode': settings.DOCKER,
'default_locale': settings.LANGUAGE_CODE,
# Following fields are only available to staff users
'system_health': check_system_health() if is_staff else None,
'database': InvenTree.version.inventreeDatabase() if is_staff else None,
'platform': InvenTree.version.inventreePlatform() if is_staff else None,
'installer': InvenTree.version.inventreeInstaller() if is_staff else None,
'target': InvenTree.version.inventreeTarget() if is_staff else None,
}
return JsonResponse(data)
def check_auth_header(self, request):
"""Check if user is authenticated via a token in the header."""
from InvenTree.middleware import get_token_from_request
if token := get_token_from_request(request):
# Does the provided token match a valid user?
try:
token = ApiToken.objects.get(key=token)
# Check if the token is active and the user is a staff member
if token.active and token.user and token.user.is_staff:
return True
except ApiToken.DoesNotExist:
pass
return False
class NotFoundView(AjaxView):
"""Simple JSON view when accessing an invalid API view."""
"""
Simple JSON view when accessing an invalid API view.
"""
permission_classes = [permissions.AllowAny]
def not_found(self, request):
"""Return a 404 error."""
return JsonResponse(
{
'detail': _('API endpoint not found'),
'url': request.build_absolute_uri(),
},
status=404,
)
def options(self, request, *args, **kwargs):
"""Return 404."""
return self.not_found(request)
def get(self, request, *args, **kwargs):
"""Return 404."""
return self.not_found(request)
def post(self, request, *args, **kwargs):
"""Return 404."""
return self.not_found(request)
def patch(self, request, *args, **kwargs):
"""Return 404."""
return self.not_found(request)
def put(self, request, *args, **kwargs):
"""Return 404."""
return self.not_found(request)
def delete(self, request, *args, **kwargs):
"""Return 404."""
return self.not_found(request)
class BulkDeleteMixin:
"""Mixin class for enabling 'bulk delete' operations for various models.
Bulk delete allows for multiple items to be deleted in a single API query,
rather than using multiple API calls to the various detail endpoints.
This is implemented for two major reasons:
- Atomicity (guaranteed that either *all* items are deleted, or *none*)
- Speed (single API call and DB query)
"""
def filter_delete_queryset(self, queryset, request):
"""Provide custom filtering for the queryset *before* it is deleted."""
return queryset
def delete(self, request, *args, **kwargs):
"""Perform a DELETE operation against this list endpoint.
We expect a list of primary-key (ID) values to be supplied as a JSON object, e.g.
{
items: [4, 8, 15, 16, 23, 42]
data = {
'details': _('API endpoint not found'),
'url': request.build_absolute_uri(),
}
"""
model = self.serializer_class.Meta.model
# Extract the items from the request body
try:
items = request.data.getlist('items', None)
except AttributeError:
items = request.data.get('items', None)
# Extract the filters from the request body
try:
filters = request.data.getlist('filters', None)
except AttributeError:
filters = request.data.get('filters', None)
if not items and not filters:
raise ValidationError({
'non_field_errors': [
'List of items or filters must be provided for bulk deletion'
]
})
if items and type(items) is not list:
raise ValidationError({
'items': ["'items' must be supplied as a list object"]
})
if filters and type(filters) is not dict:
raise ValidationError({
'filters': ["'filters' must be supplied as a dict object"]
})
# Keep track of how many items we deleted
n_deleted = 0
with transaction.atomic():
# Start with *all* models and perform basic filtering
queryset = model.objects.all()
queryset = self.filter_delete_queryset(queryset, request)
# Filter by provided item ID values
if items:
queryset = queryset.filter(id__in=items)
# Filter by provided filters
if filters:
queryset = queryset.filter(**filters)
n_deleted = queryset.count()
queryset.delete()
return Response({'success': f'Deleted {n_deleted} items'}, status=204)
class ListCreateDestroyAPIView(BulkDeleteMixin, ListCreateAPI):
"""Custom API endpoint which provides BulkDelete functionality in addition to List and Create."""
...
return JsonResponse(data, status=404)
class APIDownloadMixin:
"""Mixin for enabling a LIST endpoint to be downloaded a file.
"""
Mixin for enabling a LIST endpoint to be downloaded a file.
To download the data, add the ?export=<fmt> to the query string.
@@ -331,170 +76,38 @@ class APIDownloadMixin:
"""
def get(self, request, *args, **kwargs):
"""Generic handler for a download request."""
export_format = request.query_params.get('export', None)
if export_format and export_format in ['csv', 'tsv', 'xls', 'xlsx']:
queryset = self.filter_queryset(self.get_queryset())
return self.download_queryset(queryset, export_format)
# Default to the parent class implementation
return super().get(request, *args, **kwargs)
else:
# Default to the parent class implementation
return super().get(request, *args, **kwargs)
def download_queryset(self, queryset, export_format):
"""This function must be implemented to provide a downloadFile request."""
raise NotImplementedError('download_queryset method not implemented!')
raise NotImplementedError("download_queryset method not implemented!")
class AttachmentMixin:
"""Mixin for creating attachment objects, and ensuring the user information is saved correctly."""
permission_classes = [permissions.IsAuthenticated, RolePermission]
filter_backends = SEARCH_ORDER_FILTER
search_fields = ['attachment', 'comment', 'link']
def perform_create(self, serializer):
"""Save the user information when a file is uploaded."""
attachment = serializer.save()
attachment.user = self.request.user
attachment.save()
class APISearchViewSerializer(serializers.Serializer):
"""Serializer for the APISearchView."""
search = serializers.CharField()
search_regex = serializers.BooleanField(default=False, required=False)
search_whole = serializers.BooleanField(default=False, required=False)
limit = serializers.IntegerField(default=1, required=False)
offset = serializers.IntegerField(default=0, required=False)
class APISearchView(GenericAPIView):
"""A general-purpose 'search' API endpoint.
Returns hits against a number of different models simultaneously,
to consolidate multiple API requests into a single query.
Is much more efficient and simplifies code!
"""
Mixin for creating attachment objects,
and ensuring the user information is saved correctly.
"""
permission_classes = [permissions.IsAuthenticated]
serializer_class = APISearchViewSerializer
def get_result_types(self):
"""Construct a list of search types we can return."""
import build.api
import company.api
import order.api
import part.api
import stock.api
filter_backends = [
DjangoFilterBackend,
filters.OrderingFilter,
filters.SearchFilter,
]
return {
'build': build.api.BuildList,
'company': company.api.CompanyList,
'manufacturerpart': company.api.ManufacturerPartList,
'supplierpart': company.api.SupplierPartList,
'part': part.api.PartList,
'partcategory': part.api.CategoryList,
'purchaseorder': order.api.PurchaseOrderList,
'returnorder': order.api.ReturnOrderList,
'salesorder': order.api.SalesOrderList,
'stockitem': stock.api.StockList,
'stocklocation': stock.api.StockLocationList,
}
def perform_create(self, serializer):
""" Save the user information when a file is uploaded """
def post(self, request, *args, **kwargs):
"""Perform search query against available models."""
data = request.data
results = {}
# These parameters are passed through to the individual queries, with optional default values
pass_through_params = {
'search': '',
'search_regex': False,
'search_whole': False,
'limit': 1,
'offset': 0,
}
if 'search' not in data:
raise ValidationError({'search': 'Search term must be provided'})
for key, cls in self.get_result_types().items():
# Only return results which are specifically requested
if key in data:
params = data[key]
for k, v in pass_through_params.items():
params[k] = request.data.get(k, v)
# Enforce json encoding
params['format'] = 'json'
# Ignore if the params are wrong
if type(params) is not dict:
continue
view = cls()
# Override regular query params with specific ones for this search request
request._request.GET = params
view.request = request
view.format_kwarg = 'format'
# Check permissions and update results dict with particular query
model = view.serializer_class.Meta.model
app_label = model._meta.app_label
model_name = model._meta.model_name
table = f'{app_label}_{model_name}'
try:
if users.models.RuleSet.check_table_permission(
request.user, table, 'view'
):
results[key] = view.list(request, *args, **kwargs).data
else:
results[key] = {
'error': _(
'User does not have permission to view this model'
)
}
except Exception as exc:
results[key] = {'error': str(exc)}
return Response(results)
class MetadataView(RetrieveUpdateAPI):
"""Generic API endpoint for reading and editing metadata for a model."""
MODEL_REF = 'model'
def get_model_type(self):
"""Return the model type associated with this API instance."""
model = self.kwargs.get(self.MODEL_REF, None)
if model is None:
raise ValidationError(
f"MetadataView called without '{self.MODEL_REF}' parameter"
)
return model
def get_permission_model(self):
"""Return the 'permission' model associated with this view."""
return self.get_model_type()
def get_queryset(self):
"""Return the queryset for this endpoint."""
return self.get_model_type().objects.all()
def get_serializer(self, *args, **kwargs):
"""Return MetadataSerializer instance."""
# Detect if we are currently generating the OpenAPI schema
if 'spectacular' in sys.argv:
return MetadataSerializer(Part, *args, **kwargs)
return MetadataSerializer(self.get_model_type(), *args, **kwargs)
attachment = serializer.save()
attachment.user = self.request.user
attachment.save()

View File

@@ -0,0 +1,266 @@
"""
Helper functions for performing API unit tests
"""
import csv
import io
import re
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.http.response import StreamingHttpResponse
from rest_framework.test import APITestCase
class UserMixin:
# User information
username = 'testuser'
password = 'mypassword'
email = 'test@testing.com'
superuser = False
is_staff = True
auto_login = True
# Set list of roles automatically associated with the user
roles = []
def setUp(self):
super().setUp()
# Create a user to log in with
self.user = get_user_model().objects.create_user(
username=self.username,
password=self.password,
email=self.email
)
# Create a group for the user
self.group = Group.objects.create(name='my_test_group')
self.user.groups.add(self.group)
if self.superuser:
self.user.is_superuser = True
if self.is_staff:
self.user.is_staff = True
self.user.save()
# Assign all roles if set
if self.roles == 'all':
self.assignRole(assign_all=True)
# else filter the roles
else:
for role in self.roles:
self.assignRole(role)
if self.auto_login:
self.client.login(username=self.username, password=self.password)
def assignRole(self, role=None, assign_all: bool = False):
"""
Set the user roles for the registered user
"""
# role is of the format 'rule.permission' e.g. 'part.add'
if not assign_all and role:
rule, perm = role.split('.')
for ruleset in self.group.rule_sets.all():
if assign_all or ruleset.name == rule:
if assign_all or perm == 'view':
ruleset.can_view = True
elif assign_all or perm == 'change':
ruleset.can_change = True
elif assign_all or perm == 'delete':
ruleset.can_delete = True
elif assign_all or perm == 'add':
ruleset.can_add = True
ruleset.save()
break
class InvenTreeAPITestCase(UserMixin, APITestCase):
"""
Base class for running InvenTree API tests
"""
def getActions(self, url):
"""
Return a dict of the 'actions' available at a given endpoint.
Makes use of the HTTP 'OPTIONS' method to request this.
"""
response = self.client.options(url)
self.assertEqual(response.status_code, 200)
actions = response.data.get('actions', None)
if not actions:
actions = {}
return actions
def get(self, url, data={}, expected_code=200):
"""
Issue a GET request
"""
response = self.client.get(url, data, format='json')
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def post(self, url, data, expected_code=None, format='json'):
"""
Issue a POST request
"""
response = self.client.post(url, data=data, format=format)
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def delete(self, url, expected_code=None):
"""
Issue a DELETE request
"""
response = self.client.delete(url)
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def patch(self, url, data, expected_code=None, format='json'):
"""
Issue a PATCH request
"""
response = self.client.patch(url, data=data, format=format)
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def put(self, url, data, expected_code=None, format='json'):
"""
Issue a PUT request
"""
response = self.client.put(url, data=data, format=format)
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def options(self, url, expected_code=None):
"""
Issue an OPTIONS request
"""
response = self.client.options(url, format='json')
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def download_file(self, url, data, expected_code=None, expected_fn=None, decode=True):
"""
Download a file from the server, and return an in-memory file
"""
response = self.client.get(url, data=data, format='json')
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
# Check that the response is of the correct type
if not isinstance(response, StreamingHttpResponse):
raise ValueError("Response is not a StreamingHttpResponse object as expected")
# Extract filename
disposition = response.headers['Content-Disposition']
result = re.search(r'attachment; filename="([\w.]+)"', disposition)
fn = result.groups()[0]
if expected_fn is not None:
self.assertEqual(expected_fn, fn)
if decode:
# Decode data and return as StringIO file object
fo = io.StringIO()
fo.name = fo
fo.write(response.getvalue().decode('UTF-8'))
else:
# Return a a BytesIO file object
fo = io.BytesIO()
fo.name = fn
fo.write(response.getvalue())
fo.seek(0)
return fo
def process_csv(self, fo, delimiter=',', required_cols=None, excluded_cols=None, required_rows=None):
"""
Helper function to process and validate a downloaded csv file
"""
# Check that the correct object type has been passed
self.assertTrue(isinstance(fo, io.StringIO))
fo.seek(0)
reader = csv.reader(fo, delimiter=delimiter)
headers = []
rows = []
for idx, row in enumerate(reader):
if idx == 0:
headers = row
else:
rows.append(row)
if required_cols is not None:
for col in required_cols:
self.assertIn(col, headers)
if excluded_cols is not None:
for col in excluded_cols:
self.assertNotIn(col, headers)
if required_rows is not None:
self.assertEqual(len(rows), required_rows)
# Return the file data as a list of dict items, based on the headers
data = []
for row in rows:
entry = {}
for idx, col in enumerate(headers):
entry[col] = row[idx]
data.append(entry)
return data

View File

@@ -1,471 +1,13 @@
"""InvenTree API version information."""
"""
InvenTree API version information
"""
# InvenTree API version
INVENTREE_API_VERSION = 180
"""Increment this API version number whenever there is a significant change to the API that any clients need to know about."""
INVENTREE_API_VERSION = 50
INVENTREE_API_TEXT = """
v180 - 2024-3-02 : https://github.com/inventree/InvenTree/pull/6463
- Tweaks to API documentation to allow automatic documentation generation
v179 - 2024-03-01 : https://github.com/inventree/InvenTree/pull/6605
- Adds "subcategories" count to PartCategory serializer
- Adds "sublocations" count to StockLocation serializer
- Adds "image" field to PartBrief serializer
- Adds "image" field to CompanyBrief serializer
v178 - 2024-02-29 : https://github.com/inventree/InvenTree/pull/6604
- Adds "external_stock" field to the Part API endpoint
- Adds "external_stock" field to the BomItem API endpoint
- Adds "external_stock" field to the BuildLine API endpoint
- Stock quantites represented in the BuildLine API endpoint are now filtered by Build.source_location
v177 - 2024-02-27 : https://github.com/inventree/InvenTree/pull/6581
- Adds "subcategoies" count to PartCategoryTree serializer
- Adds "sublocations" count to StockLocationTree serializer
v176 - 2024-02-26 : https://github.com/inventree/InvenTree/pull/6535
- Adds the field "plugins_install_disabled" to the Server info API endpoint
v175 - 2024-02-21 : https://github.com/inventree/InvenTree/pull/6538
- Adds "parts" count to PartParameterTemplate serializer
v174 - 2024-02-21 : https://github.com/inventree/InvenTree/pull/6536
- Expose PartCategory filters to the API documentation
- Expose StockLocation filters to the API documentation
v173 - 2024-02-20 : https://github.com/inventree/InvenTree/pull/6483
- Adds "merge_items" to the PurchaseOrderLine create API endpoint
- Adds "auto_pricing" to the PurchaseOrderLine create/update API endpoint
v172 - 2024-02-20 : https://github.com/inventree/InvenTree/pull/6526
- Adds "enabled" field to the PartTestTemplate API endpoint
- Adds "enabled" filter to the PartTestTemplate list
- Adds "enabled" filter to the StockItemTestResult list
v171 - 2024-02-19 : https://github.com/inventree/InvenTree/pull/6516
- Adds "key" as a filterable parameter to PartTestTemplate list endpoint
v170 -> 2024-02-19 : https://github.com/inventree/InvenTree/pull/6514
- Adds "has_results" filter to the PartTestTemplate list endpoint
v169 -> 2024-02-14 : https://github.com/inventree/InvenTree/pull/6430
- Adds 'key' field to PartTestTemplate API endpoint
- Adds annotated 'results' field to PartTestTemplate API endpoint
- Adds 'template' field to StockItemTestResult API endpoint
v168 -> 2024-02-14 : https://github.com/inventree/InvenTree/pull/4824
- Adds machine CRUD API endpoints
- Adds machine settings API endpoints
- Adds machine restart API endpoint
- Adds machine types/drivers list API endpoints
- Adds machine registry status API endpoint
- Adds 'required' field to the global Settings API
- Discover sub-sub classes of the StatusCode API
v167 -> 2024-02-07: https://github.com/inventree/InvenTree/pull/6440
- Fixes for OpenAPI schema generation
v166 -> 2024-02-04 : https://github.com/inventree/InvenTree/pull/6400
- Adds package_name to plugin API
- Adds mechanism for uninstalling plugins via the API
v165 -> 2024-01-28 : https://github.com/inventree/InvenTree/pull/6040
- Adds supplier_part.name, part.creation_user, part.required_for_sales_order
v164 -> 2024-01-24 : https://github.com/inventree/InvenTree/pull/6343
- Adds "building" quantity to BuildLine API serializer
v163 -> 2024-01-22 : https://github.com/inventree/InvenTree/pull/6314
- Extends API endpoint to expose auth configuration information for signin pages
v162 -> 2024-01-14 : https://github.com/inventree/InvenTree/pull/6230
- Adds API endpoints to provide information on background tasks
v161 -> 2024-01-13 : https://github.com/inventree/InvenTree/pull/6222
- Adds API endpoint for system error information
v160 -> 2023-12-11 : https://github.com/inventree/InvenTree/pull/6072
- Adds API endpoint for allocating stock items against a sales order via barcode scan
v159 -> 2023-12-08 : https://github.com/inventree/InvenTree/pull/6056
- Adds API endpoint for reloading plugin registry
v158 -> 2023-11-21 : https://github.com/inventree/InvenTree/pull/5953
- Adds API endpoint for listing all settings of a particular plugin
- Adds API endpoint for registry status (errors)
v157 -> 2023-12-02 : https://github.com/inventree/InvenTree/pull/6021
- Add write-only "existing_image" field to Part API serializer
v156 -> 2023-11-26 : https://github.com/inventree/InvenTree/pull/5982
- Add POST endpoint for report and label creation
v155 -> 2023-11-24 : https://github.com/inventree/InvenTree/pull/5979
- Add "creation_date" field to Part instance serializer
v154 -> 2023-11-21 : https://github.com/inventree/InvenTree/pull/5944
- Adds "responsible" field to the ProjectCode table
v153 -> 2023-11-21 : https://github.com/inventree/InvenTree/pull/5956
- Adds override_min and override_max fields to part pricing API
v152 -> 2023-11-20 : https://github.com/inventree/InvenTree/pull/5949
- Adds barcode support for manufacturerpart model
- Adds API endpoint for adding parts to purchase order using barcode scan
v151 -> 2023-11-13 : https://github.com/inventree/InvenTree/pull/5906
- Allow user list API to be filtered by user active status
- Allow owner list API to be filtered by user active status
v150 -> 2023-11-07: https://github.com/inventree/InvenTree/pull/5875
- Extended user API endpoints to enable ordering
- Extended user API endpoints to enable user role changes
- Added endpoint to create a new user
v149 -> 2023-11-07 : https://github.com/inventree/InvenTree/pull/5876
- Add 'building' quantity to BomItem serializer
- Add extra ordering options for the BomItem list API
v148 -> 2023-11-06 : https://github.com/inventree/InvenTree/pull/5872
- Allow "quantity" to be specified when installing an item into another item
v147 -> 2023-11-04: https://github.com/inventree/InvenTree/pull/5860
- Adds "completed_lines" field to SalesOrder API endpoint
- Adds "completed_lines" field to PurchaseOrder API endpoint
v146 -> 2023-11-02: https://github.com/inventree/InvenTree/pull/5822
- Extended SSO Provider endpoint to contain if a provider is configured
- Adds API endpoints for Email Address model
v145 -> 2023-10-30: https://github.com/inventree/InvenTree/pull/5786
- Allow printing labels via POST including printing options in the body
v144 -> 2023-10-23: https://github.com/inventree/InvenTree/pull/5811
- Adds version information API endpoint
v143 -> 2023-10-29: https://github.com/inventree/InvenTree/pull/5810
- Extends the status endpoint to include information about system status and health
v142 -> 2023-10-20: https://github.com/inventree/InvenTree/pull/5759
- Adds generic API endpoints for looking up status models
v141 -> 2023-10-23 : https://github.com/inventree/InvenTree/pull/5774
- Changed 'part.responsible' from User to Owner
v140 -> 2023-10-20 : https://github.com/inventree/InvenTree/pull/5664
- Expand API token functionality
- Multiple API tokens can be generated per user
v139 -> 2023-10-11 : https://github.com/inventree/InvenTree/pull/5509
- Add new BarcodePOReceive endpoint to receive line items by scanning supplier barcodes
v138 -> 2023-10-11 : https://github.com/inventree/InvenTree/pull/5679
- Settings keys are no longer case sensitive
- Include settings units in API serializer
v137 -> 2023-10-04 : https://github.com/inventree/InvenTree/pull/5588
- Adds StockLocationType API endpoints
- Adds custom_icon, location_type to StockLocation endpoint
v136 -> 2023-09-23 : https://github.com/inventree/InvenTree/pull/5595
- Adds structural to StockLocation and PartCategory tree endpoints
v135 -> 2023-09-19 : https://github.com/inventree/InvenTree/pull/5569
- Adds location path detail to StockLocation and StockItem API endpoints
- Adds category path detail to PartCategory and Part API endpoints
v134 -> 2023-09-11 : https://github.com/inventree/InvenTree/pull/5525
- Allow "Attachment" list endpoints to be searched by attachment, link and comment fields
v133 -> 2023-09-08 : https://github.com/inventree/InvenTree/pull/5518
- Add extra optional fields which can be used for StockAdjustment endpoints
v132 -> 2023-09-07 : https://github.com/inventree/InvenTree/pull/5515
- Add 'issued_by' filter to BuildOrder API list endpoint
v131 -> 2023-08-09 : https://github.com/inventree/InvenTree/pull/5415
- Annotate 'available_variant_stock' to the SalesOrderLine serializer
v130 -> 2023-07-14 : https://github.com/inventree/InvenTree/pull/5251
- Refactor label printing interface
v129 -> 2023-07-06 : https://github.com/inventree/InvenTree/pull/5189
- Changes 'serial_lte' and 'serial_gte' stock filters to point to 'serial_int' field
v128 -> 2023-07-06 : https://github.com/inventree/InvenTree/pull/5186
- Adds 'available' filter for BuildLine API endpoint
v127 -> 2023-06-24 : https://github.com/inventree/InvenTree/pull/5094
- Enhancements for the PartParameter API endpoints
v126 -> 2023-06-19 : https://github.com/inventree/InvenTree/pull/5075
- Adds API endpoint for setting the "category" for multiple parts simultaneously
v125 -> 2023-06-17 : https://github.com/inventree/InvenTree/pull/5064
- Adds API endpoint for setting the "status" field for multiple stock items simultaneously
v124 -> 2023-06-17 : https://github.com/inventree/InvenTree/pull/5057
- Add "created_before" and "created_after" filters to the Part API
v123 -> 2023-06-15 : https://github.com/inventree/InvenTree/pull/5019
- Add Metadata to: Plugin Config
v122 -> 2023-06-14 : https://github.com/inventree/InvenTree/pull/5034
- Adds new BuildLineLabel label type
v121 -> 2023-06-14 : https://github.com/inventree/InvenTree/pull/4808
- Adds "ProjectCode" link to Build model
v120 -> 2023-06-07 : https://github.com/inventree/InvenTree/pull/4855
- Major overhaul of the build order API
- Adds new BuildLine model
v119 -> 2023-06-01 : https://github.com/inventree/InvenTree/pull/4898
- Add Metadata to: Part test templates, Part parameters, Part category parameter templates, BOM item substitute, Related Parts, Stock item test result
v118 -> 2023-06-01 : https://github.com/inventree/InvenTree/pull/4935
- Adds extra fields for the PartParameterTemplate model
v117 -> 2023-05-22 : https://github.com/inventree/InvenTree/pull/4854
- Part.units model now supports physical units (e.g. "kg", "m", "mm", etc)
- Replaces SupplierPart "pack_size" field with "pack_quantity"
- New field supports physical units, and allows for conversion between compatible units
v116 -> 2023-05-18 : https://github.com/inventree/InvenTree/pull/4823
- Updates to part parameter implementation, to use physical units
v115 -> 2023-05-18 : https://github.com/inventree/InvenTree/pull/4846
- Adds ability to partially scrap a build output
v114 -> 2023-05-16 : https://github.com/inventree/InvenTree/pull/4825
- Adds "delivery_date" to shipments
v113 -> 2023-05-13 : https://github.com/inventree/InvenTree/pull/4800
- Adds API endpoints for scrapping a build output
v112 -> 2023-05-13: https://github.com/inventree/InvenTree/pull/4741
- Adds flag use_pack_size to the stock addition API, which allows adding packs
v111 -> 2023-05-02 : https://github.com/inventree/InvenTree/pull/4367
- Adds tags to the Part serializer
- Adds tags to the SupplierPart serializer
- Adds tags to the ManufacturerPart serializer
- Adds tags to the StockItem serializer
- Adds tags to the StockLocation serializer
v110 -> 2023-04-26 : https://github.com/inventree/InvenTree/pull/4698
- Adds 'order_currency' field for PurchaseOrder / SalesOrder endpoints
v109 -> 2023-04-19 : https://github.com/inventree/InvenTree/pull/4636
- Adds API endpoints for the "ProjectCode" model
v108 -> 2023-04-17 : https://github.com/inventree/InvenTree/pull/4615
- Adds functionality to upload images for rendering in markdown notes
v107 -> 2023-04-04 : https://github.com/inventree/InvenTree/pull/4575
- Adds barcode support for PurchaseOrder model
- Adds barcode support for ReturnOrder model
- Adds barcode support for SalesOrder model
- Adds barcode support for BuildOrder model
v106 -> 2023-04-03 : https://github.com/inventree/InvenTree/pull/4566
- Adds 'search_regex' parameter to all searchable API endpoints
v105 -> 2023-03-31 : https://github.com/inventree/InvenTree/pull/4543
- Adds API endpoints for status label information on various models
v104 -> 2023-03-23 : https://github.com/inventree/InvenTree/pull/4488
- Adds various endpoints for new "ReturnOrder" models
- Adds various endpoints for new "ReturnOrderReport" templates
- Exposes API endpoints for "Contact" model
v103 -> 2023-03-17 : https://github.com/inventree/InvenTree/pull/4410
- Add metadata to several more models
v102 -> 2023-03-18 : https://github.com/inventree/InvenTree/pull/4505
- Adds global search API endpoint for consolidated search results
v101 -> 2023-03-07 : https://github.com/inventree/InvenTree/pull/4462
- Adds 'total_in_stock' to Part serializer, and supports API ordering
v100 -> 2023-03-04 : https://github.com/inventree/InvenTree/pull/4452
- Adds bulk delete of PurchaseOrderLineItems to API
v99 -> 2023-03-03 : https://github.com/inventree/InvenTree/pull/4445
- Adds sort by "responsible" to PurchaseOrderAPI
v98 -> 2023-02-24 : https://github.com/inventree/InvenTree/pull/4408
- Adds "responsible" filter to Build API
v97 -> 2023-02-20 : https://github.com/inventree/InvenTree/pull/4377
- Adds "external" attribute to StockLocation model
v96 -> 2023-02-16 : https://github.com/inventree/InvenTree/pull/4345
- Adds stocktake report generation functionality
v95 -> 2023-02-16 : https://github.com/inventree/InvenTree/pull/4346
- Adds "CompanyAttachment" model (and associated API endpoints)
v94 -> 2023-02-10 : https://github.com/inventree/InvenTree/pull/4327
- Adds API endpoints for the "Group" auth model
v93 -> 2023-02-03 : https://github.com/inventree/InvenTree/pull/4300
- Adds extra information to the currency exchange endpoint
- Adds API endpoint for manually updating exchange rates
v92 -> 2023-02-02 : https://github.com/inventree/InvenTree/pull/4293
- Adds API endpoint for currency exchange information
v91 -> 2023-01-31 : https://github.com/inventree/InvenTree/pull/4281
- Improves the API endpoint for creating new Part instances
v90 -> 2023-01-25 : https://github.com/inventree/InvenTree/pull/4186/files
- Adds a dedicated endpoint to activate a plugin
v89 -> 2023-01-25 : https://github.com/inventree/InvenTree/pull/4214
- Adds updated field to SupplierPart API
- Adds API date ordering for supplier part list
v88 -> 2023-01-17: https://github.com/inventree/InvenTree/pull/4225
- Adds 'priority' field to Build model and api endpoints
v87 -> 2023-01-04 : https://github.com/inventree/InvenTree/pull/4067
- Add API date filter for stock table on Expiry date
v86 -> 2022-12-22 : https://github.com/inventree/InvenTree/pull/4069
- Adds API endpoints for part stocktake
v85 -> 2022-12-21 : https://github.com/inventree/InvenTree/pull/3858
- Add endpoints serving ICS calendars for purchase and sales orders through API
v84 -> 2022-12-21: https://github.com/inventree/InvenTree/pull/4083
- Add support for listing PO, BO, SO by their reference
v83 -> 2022-11-19 : https://github.com/inventree/InvenTree/pull/3949
- Add support for structural Stock locations
v82 -> 2022-11-16 : https://github.com/inventree/InvenTree/pull/3931
- Add support for structural Part categories
v81 -> 2022-11-08 : https://github.com/inventree/InvenTree/pull/3710
- Adds cached pricing information to Part API
- Adds cached pricing information to BomItem API
- Allows Part and BomItem list endpoints to be filtered by 'has_pricing'
- Remove calculated 'price_string' values from API endpoints
- Allows PurchaseOrderLineItem API endpoint to be filtered by 'has_pricing'
- Allows SalesOrderLineItem API endpoint to be filtered by 'has_pricing'
- Allows SalesOrderLineItem API endpoint to be filtered by 'order_status'
- Adds more information to SupplierPriceBreak serializer
v80 -> 2022-11-07 : https://github.com/inventree/InvenTree/pull/3906
- Adds 'barcode_hash' to Part API serializer
- Adds 'barcode_hash' to StockLocation API serializer
- Adds 'barcode_hash' to SupplierPart API serializer
v79 -> 2022-11-03 : https://github.com/inventree/InvenTree/pull/3895
- Add metadata to Company
v78 -> 2022-10-25 : https://github.com/inventree/InvenTree/pull/3854
- Make PartCategory to be filtered by name and description
v77 -> 2022-10-12 : https://github.com/inventree/InvenTree/pull/3772
- Adds model permission checks for barcode assignment actions
v76 -> 2022-09-10 : https://github.com/inventree/InvenTree/pull/3640
- Refactor of barcode data on the API
- StockItem.uid renamed to StockItem.barcode_hash
v75 -> 2022-09-05 : https://github.com/inventree/InvenTree/pull/3644
- Adds "pack_size" attribute to SupplierPart API serializer
v74 -> 2022-08-28 : https://github.com/inventree/InvenTree/pull/3615
- Add confirmation field for completing PurchaseOrder if the order has incomplete lines
- Add confirmation field for completing SalesOrder if the order has incomplete lines
v73 -> 2022-08-24 : https://github.com/inventree/InvenTree/pull/3605
- Add 'description' field to PartParameterTemplate model
v72 -> 2022-08-18 : https://github.com/inventree/InvenTree/pull/3567
- Allow PurchaseOrder to be duplicated via the API
v71 -> 2022-08-18 : https://github.com/inventree/InvenTree/pull/3564
- Updates to the "part scheduling" API endpoint
v70 -> 2022-08-02 : https://github.com/inventree/InvenTree/pull/3451
- Adds a 'depth' parameter to the PartCategory list API
- Adds a 'depth' parameter to the StockLocation list API
v69 -> 2022-08-01 : https://github.com/inventree/InvenTree/pull/3443
- Updates the PartCategory list API:
- Improve query efficiency: O(n) becomes O(1)
- Rename 'parts' field to 'part_count'
- Updates the StockLocation list API:
- Improve query efficiency: O(n) becomes O(1)
v68 -> 2022-07-27 : https://github.com/inventree/InvenTree/pull/3417
- Allows SupplierPart list to be filtered by SKU value
- Allows SupplierPart list to be filtered by MPN value
v67 -> 2022-07-25 : https://github.com/inventree/InvenTree/pull/3395
- Adds a 'requirements' endpoint for Part instance
- Provides information on outstanding order requirements for a given part
v66 -> 2022-07-24 : https://github.com/inventree/InvenTree/pull/3393
- Part images can now be downloaded from a remote URL via the API
- Company images can now be downloaded from a remote URL via the API
v65 -> 2022-07-15 : https://github.com/inventree/InvenTree/pull/3335
- Annotates 'in_stock' quantity to the SupplierPart API
v64 -> 2022-07-08 : https://github.com/inventree/InvenTree/pull/3310
- Annotate 'on_order' quantity to BOM list API
- Allow BOM List API endpoint to be filtered by "on_order" parameter
v63 -> 2022-07-06 : https://github.com/inventree/InvenTree/pull/3301
- Allow BOM List API endpoint to be filtered by "available_stock" parameter
v62 -> 2022-07-05 : https://github.com/inventree/InvenTree/pull/3296
- Allows search on BOM List API endpoint
- Allows ordering on BOM List API endpoint
v61 -> 2022-06-12 : https://github.com/inventree/InvenTree/pull/3183
- Migrate the "Convert Stock Item" form class to use the API
- There is now an API endpoint for converting a stock item to a valid variant
v60 -> 2022-06-08 : https://github.com/inventree/InvenTree/pull/3148
- Add availability data fields to the SupplierPart model
v59 -> 2022-06-07 : https://github.com/inventree/InvenTree/pull/3154
- Adds further improvements to BulkDelete mixin class
- Fixes multiple bugs in custom OPTIONS metadata implementation
- Adds 'bulk delete' for Notifications
v58 -> 2022-06-06 : https://github.com/inventree/InvenTree/pull/3146
- Adds a BulkDelete API mixin class for fast, safe deletion of multiple objects with a single API request
v57 -> 2022-06-05 : https://github.com/inventree/InvenTree/pull/3130
- Transfer PartCategoryTemplateParameter actions to the API
v56 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3123
- Expose the PartParameterTemplate model to use the API
v55 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3120
- Converts the 'StockItemReturn' functionality to make use of the API
v54 -> 2022-06-02 : https://github.com/inventree/InvenTree/pull/3117
- Adds 'available_stock' annotation on the SalesOrderLineItem API
- Adds (well, fixes) 'overdue' annotation on the SalesOrderLineItem API
v53 -> 2022-06-01 : https://github.com/inventree/InvenTree/pull/3110
- Adds extra search fields to the BuildOrder list API endpoint
v52 -> 2022-05-31 : https://github.com/inventree/InvenTree/pull/3103
- Allow part list API to be searched by supplier SKU
v51 -> 2022-05-24 : https://github.com/inventree/InvenTree/pull/3058
- Adds new fields to the SalesOrderShipment model
"""
Increment this API version number whenever there is a significant change to the API that any clients need to know about
v50 -> 2022-05-18 : https://github.com/inventree/InvenTree/pull/2912
- Implement Attachments for manufacturer parts

View File

@@ -1,76 +1,46 @@
"""AppConfig for InvenTree app."""
# -*- coding: utf-8 -*-
import logging
from importlib import import_module
from pathlib import Path
from django.apps import AppConfig, apps
from django.apps import AppConfig
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.exceptions import AppRegistryNotReady
from django.db import transaction
from django.db.utils import IntegrityError, OperationalError
from django.db.utils import IntegrityError
import InvenTree.conversion
import InvenTree.ready
import InvenTree.tasks
from InvenTree.config import get_setting
from InvenTree.ready import canAppAccessDatabase, isInTestMode
logger = logging.getLogger('inventree')
from .config import get_setting
logger = logging.getLogger("inventree")
class InvenTreeConfig(AppConfig):
"""AppConfig for inventree app."""
name = 'InvenTree'
def ready(self):
"""Run system wide setup init steps.
Like:
- Checking if migrations should be run
- Cleaning up tasks
- Starting regular tasks
- Updating exchange rates
- Collecting notification methods
- Collecting state transition methods
- Adding users set in the current environment
"""
# skip loading if plugin registry is not loaded or we run in a background thread
if (
not InvenTree.ready.isPluginRegistryLoaded()
or not InvenTree.ready.isInMainThread()
):
return
if canAppAccessDatabase():
# Skip if running migrations
if InvenTree.ready.isRunningMigrations():
return
if InvenTree.ready.canAppAccessDatabase() or settings.TESTING_ENV:
self.remove_obsolete_tasks()
self.collect_tasks()
self.start_background_tasks()
if not InvenTree.ready.isInTestMode(): # pragma: no cover
if not isInTestMode(): # pragma: no cover
self.update_exchange_rates()
# Let the background worker check for migrations
InvenTree.tasks.offload_task(InvenTree.tasks.check_for_migrations)
self.update_site_url()
self.collect_notification_methods()
self.collect_state_transition_methods()
# Ensure the unit registry is loaded
InvenTree.conversion.get_unit_registry()
if InvenTree.ready.canAppAccessDatabase() or settings.TESTING_ENV:
if canAppAccessDatabase() or settings.TESTING_ENV:
self.add_user_on_startup()
self.add_user_from_file()
def remove_obsolete_tasks(self):
"""Delete any obsolete scheduled tasks in the database."""
"""
Delete any obsolete scheduled tasks in the database
"""
obsolete = [
'InvenTree.tasks.delete_expired_sessions',
'stock.tasks.delete_old_stock_items',
@@ -82,98 +52,62 @@ class InvenTreeConfig(AppConfig):
return
# Remove any existing obsolete tasks
try:
Schedule.objects.filter(func__in=obsolete).delete()
except Exception:
logger.exception('Failed to remove obsolete tasks - database not ready')
Schedule.objects.filter(func__in=obsolete).delete()
def start_background_tasks(self):
"""Start all background tests for InvenTree."""
logger.info('Starting background tasks...')
from django_q.models import Schedule
# List of existing scheduled tasks (in the database)
existing_tasks = {}
for existing_task in Schedule.objects.all():
existing_tasks[existing_task.func] = existing_task
tasks_to_create = []
tasks_to_update = []
# List of collected tasks found with the @scheduled_task decorator
tasks = InvenTree.tasks.tasks.task_list
for task in tasks:
ref_name = f'{task.func.__module__}.{task.func.__name__}'
if ref_name in existing_tasks.keys():
# This task already exists - update the details if required
existing_task = existing_tasks[ref_name]
if (
existing_task.schedule_type != task.interval
or existing_task.minutes != task.minutes
):
existing_task.schedule_type = task.interval
existing_task.minutes = task.minutes
tasks_to_update.append(existing_task)
else:
# This task does *not* already exist - create it
tasks_to_create.append(
Schedule(
name=ref_name,
func=ref_name,
schedule_type=task.interval,
minutes=task.minutes,
)
)
if len(tasks_to_create) > 0:
Schedule.objects.bulk_create(tasks_to_create)
logger.info('Created %s new scheduled tasks', len(tasks_to_create))
if len(tasks_to_update) > 0:
Schedule.objects.bulk_update(tasks_to_update, ['schedule_type', 'minutes'])
logger.info('Updated %s existing scheduled tasks', len(tasks_to_update))
self.add_heartbeat()
logger.info('Started %s scheduled background tasks...', len(tasks))
def add_heartbeat(self):
"""Ensure there is at least one background task in the queue."""
import django_q.models
try:
if django_q.models.OrmQ.objects.count() == 0:
InvenTree.tasks.offload_task(
InvenTree.tasks.heartbeat, force_async=True
)
except Exception:
pass
from django_q.models import Schedule
except AppRegistryNotReady: # pragma: no cover
return
def collect_tasks(self):
"""Collect all background tasks."""
for app_name, app in apps.app_configs.items():
if app_name == 'InvenTree':
continue
logger.info("Starting background tasks...")
if Path(app.path).joinpath('tasks.py').exists():
try:
import_module(f'{app.module.__package__}.tasks')
except Exception as e: # pragma: no cover
logger.exception('Error loading tasks for %s: %s', app_name, e)
# Remove successful task results from the database
InvenTree.tasks.schedule_task(
'InvenTree.tasks.delete_successful_tasks',
schedule_type=Schedule.DAILY,
)
# Check for InvenTree updates
InvenTree.tasks.schedule_task(
'InvenTree.tasks.check_for_updates',
schedule_type=Schedule.DAILY
)
# Heartbeat to let the server know the background worker is running
InvenTree.tasks.schedule_task(
'InvenTree.tasks.heartbeat',
schedule_type=Schedule.MINUTES,
minutes=15
)
# Keep exchange rates up to date
InvenTree.tasks.schedule_task(
'InvenTree.tasks.update_exchange_rates',
schedule_type=Schedule.DAILY,
)
# Delete old error messages
InvenTree.tasks.schedule_task(
'InvenTree.tasks.delete_old_error_logs',
schedule_type=Schedule.DAILY,
)
# Delete old notification records
InvenTree.tasks.schedule_task(
'common.tasks.delete_old_notifications',
schedule_type=Schedule.DAILY,
)
def update_exchange_rates(self): # pragma: no cover
"""Update exchange rates each time the server is started.
"""
Update exchange rates each time the server is started, *if*:
Only runs *if*:
a) Have not been updated recently (one day or less)
b) The base exchange rate has been altered
"""
try:
from djmoney.contrib.exchange.models import ExchangeBackend
@@ -187,100 +121,56 @@ class InvenTreeConfig(AppConfig):
update = False
try:
backend = ExchangeBackend.objects.filter(name='InvenTreeExchange')
backend = ExchangeBackend.objects.get(name='InvenTreeExchange')
if backend.exists():
backend = backend.first()
last_update = backend.last_update
last_update = backend.last_update
if last_update is None:
# Never been updated
logger.info("Exchange backend has never been updated")
update = True
if last_update is None:
# Never been updated
logger.info('Exchange backend has never been updated')
update = True
# Backend currency has changed?
if base_currency != backend.base_currency:
logger.info(f"Base currency changed from {backend.base_currency} to {base_currency}")
update = True
# Backend currency has changed?
if base_currency != backend.base_currency:
logger.info(
'Base currency changed from %s to %s',
backend.base_currency,
base_currency,
)
update = True
except ExchangeBackend.DoesNotExist:
logger.info('Exchange backend not found - updating')
except (ExchangeBackend.DoesNotExist):
logger.info("Exchange backend not found - updating")
update = True
except Exception:
except:
# Some other error - potentially the tables are not ready yet
return
if update:
try:
update_exchange_rates()
except OperationalError:
logger.warning('Could not update exchange rates - database not ready')
except Exception as e:
logger.exception('Error updating exchange rates: %s (%s)', e, type(e))
def update_site_url(self):
"""Update the site URL setting.
- If a fixed SITE_URL is specified (via configuration), it should override the INVENTREE_BASE_URL setting
- If multi-site support is enabled, update the site URL for the current site
"""
import common.models
if not InvenTree.ready.canAppAccessDatabase():
return
if InvenTree.ready.isImportingData() or InvenTree.ready.isRunningMigrations():
return
if settings.SITE_URL:
try:
if (
common.models.InvenTreeSetting.get_setting('INVENTREE_BASE_URL')
!= settings.SITE_URL
):
common.models.InvenTreeSetting.set_setting(
'INVENTREE_BASE_URL', settings.SITE_URL
)
logger.info('Updated INVENTREE_SITE_URL to %s', settings.SITE_URL)
except Exception:
pass
# If multi-site support is enabled, update the site URL for the current site
try:
from django.contrib.sites.models import Site
site = Site.objects.get_current()
site.domain = settings.SITE_URL
site.save()
logger.info('Updated current site URL to %s', settings.SITE_URL)
except Exception:
pass
logger.error(f"Error updating exchange rates: {e}")
def add_user_on_startup(self):
"""Add a user on startup."""
"""Add a user on startup"""
# stop if checks were already created
if hasattr(settings, 'USER_ADDED') and settings.USER_ADDED:
return
# get values
add_user = get_setting('INVENTREE_ADMIN_USER', 'admin_user')
add_email = get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email')
add_password = get_setting('INVENTREE_ADMIN_PASSWORD', 'admin_password')
add_password_file = get_setting(
'INVENTREE_ADMIN_PASSWORD_FILE', 'admin_password_file', None
add_user = get_setting(
'INVENTREE_ADMIN_USER',
settings.CONFIG.get('admin_user', False)
)
add_email = get_setting(
'INVENTREE_ADMIN_EMAIL',
settings.CONFIG.get('admin_email', False)
)
add_password = get_setting(
'INVENTREE_ADMIN_PASSWORD',
settings.CONFIG.get('admin_password', False)
)
# check if all values are present
set_variables = 0
for tested_var in [add_user, add_email, add_password]:
if tested_var:
set_variables += 1
@@ -292,77 +182,29 @@ class InvenTreeConfig(AppConfig):
# not all needed variables set
if set_variables < 3:
logger.warn('Not all required settings for adding a user on startup are present:\nINVENTREE_ADMIN_USER, INVENTREE_ADMIN_EMAIL, INVENTREE_ADMIN_PASSWORD')
settings.USER_ADDED = True
# if a password file is present, do not warn - will be handled later
if add_password_file:
return
logger.warning(
'Not all required settings for adding a user on startup are present:\nINVENTREE_ADMIN_USER, INVENTREE_ADMIN_EMAIL, INVENTREE_ADMIN_PASSWORD'
)
return
# good to go -> create user
self._create_admin_user(add_user, add_email, add_password)
# do not try again
settings.USER_ADDED = True
def _create_admin_user(self, add_user, add_email, add_password):
user = get_user_model()
try:
with transaction.atomic():
if user.objects.filter(username=add_user).exists():
logger.info('User %s already exists - skipping creation', add_user)
logger.info(f"User {add_user} already exists - skipping creation")
else:
new_user = user.objects.create_superuser(
add_user, add_email, add_password
)
logger.info('User %s was created!', str(new_user))
except IntegrityError:
logger.warning('The user "%s" could not be created', add_user)
def add_user_from_file(self):
"""Add the superuser from a file."""
# stop if checks were already created
if hasattr(settings, 'USER_ADDED_FILE') and settings.USER_ADDED_FILE:
return
# get values
add_password_file = get_setting(
'INVENTREE_ADMIN_PASSWORD_FILE', 'admin_password_file', None
)
# no variable set -> do not try anything
if not add_password_file:
settings.USER_ADDED_FILE = True
return
# check if file exists
add_password_file = Path(str(add_password_file))
if not add_password_file.exists():
logger.warning('The file "%s" does not exist', add_password_file)
settings.USER_ADDED_FILE = True
return
# good to go -> create user
self._create_admin_user(
get_setting('INVENTREE_ADMIN_USER', 'admin_user', 'admin'),
get_setting('INVENTREE_ADMIN_EMAIL', 'admin_email', ''),
add_password_file.read_text(encoding='utf-8'),
)
new_user = user.objects.create_superuser(add_user, add_email, add_password)
logger.info(f'User {str(new_user)} was created!')
except IntegrityError as _e:
logger.warning(f'The user "{add_user}" could not be created due to the following error:\n{str(_e)}')
# do not try again
settings.USER_ADDED_FILE = True
settings.USER_ADDED = True
def collect_notification_methods(self):
"""Collect all notification methods."""
"""
Collect all notification methods
"""
from common.notifications import storage
storage.collect()
def collect_state_transition_methods(self):
"""Collect all state transition methods."""
from generic.states import storage
storage.collect()

View File

@@ -1,85 +0,0 @@
"""Custom backend implementations."""
import datetime
import logging
import time
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
from maintenance_mode.backends import AbstractStateBackend
import common.models
logger = logging.getLogger('inventree')
class InvenTreeMaintenanceModeBackend(AbstractStateBackend):
"""Custom backend for managing state of maintenance mode.
Stores a timestamp in the database to determine when maintenance mode will elapse.
"""
SETTING_KEY = '_MAINTENANCE_MODE'
def get_value(self) -> bool:
"""Get the current state of the maintenance mode.
Returns:
bool: True if maintenance mode is active, False otherwise.
"""
try:
setting = common.models.InvenTreeSetting.objects.get(key=self.SETTING_KEY)
value = str(setting.value).strip()
except common.models.InvenTreeSetting.DoesNotExist:
# Database is accessible, but setting is not available - assume False
return False
except (IntegrityError, OperationalError, ProgrammingError):
# Database is inaccessible - assume we are not in maintenance mode
logger.debug('Failed to read maintenance mode state - assuming True')
return True
# Extract timestamp from string
try:
# If the timestamp is in the past, we are now *out* of maintenance mode
timestamp = datetime.datetime.fromisoformat(value)
return timestamp > datetime.datetime.now()
except ValueError:
# If the value is not a valid timestamp, assume maintenance mode is not active
return False
def set_value(self, value: bool, retries: int = 5, minutes: int = 5):
"""Set the state of the maintenance mode.
Instead of simply writing "true" or "false" to the setting,
we write a timestamp to the setting, which is used to determine
when maintenance mode will elapse.
This ensures that we will always *exit* maintenance mode after a certain time period.
"""
logger.debug('Setting maintenance mode state: %s', value)
if value:
# Save as isoformat
timestamp = datetime.datetime.now() + datetime.timedelta(minutes=minutes)
timestamp = timestamp.isoformat()
else:
# Blank timestamp means maintenance mode is not active
timestamp = ''
while retries > 0:
try:
common.models.InvenTreeSetting.set_setting(self.SETTING_KEY, timestamp)
# Read the value back to confirm
if self.get_value() == value:
break
except (IntegrityError, OperationalError, ProgrammingError):
# In the database is locked, then
logger.debug(
'Failed to set maintenance mode state (%s retries left)', retries
)
time.sleep(0.1)
retries -= 1
if retries == 0:
logger.warning('Failed to set maintenance mode state')

View File

@@ -1,46 +1,59 @@
"""Pull rendered copies of the templated.
Only used for testing the js files! - This file is omitted from coverage.
"""
Pull rendered copies of the templated
only used for testing the js files! - This file is omited from coverage
"""
import os # pragma: no cover
import pathlib # pragma: no cover
from InvenTree.unit_test import InvenTreeTestCase # pragma: no cover
from InvenTree.helpers import InvenTreeTestCase # pragma: no cover
class RenderJavascriptFiles(InvenTreeTestCase): # pragma: no cover
"""A unit test to "render" javascript files.
"""
A unit test to "render" javascript files.
The server renders templated javascript files,
we need the fully-rendered files for linting and static tests.
"""
def download_file(self, filename, prefix):
"""Function to `download`(copy) a file to a temporary firectory."""
url = os.path.join(prefix, filename)
response = self.client.get(url)
here = os.path.abspath(os.path.dirname(__file__))
output_dir = os.path.join(here, '..', '..', 'js_tmp')
output_dir = os.path.join(
here,
'..',
'..',
'js_tmp',
)
output_dir = os.path.abspath(output_dir)
if not os.path.exists(output_dir):
os.mkdir(output_dir)
output_file = os.path.join(output_dir, filename)
output_file = os.path.join(
output_dir,
filename,
)
with open(output_file, 'wb') as output:
output.write(response.content)
def download_files(self, subdir, prefix):
"""Download files in directory."""
here = os.path.abspath(os.path.dirname(__file__))
js_template_dir = os.path.join(here, '..', 'templates', 'js')
js_template_dir = os.path.join(
here,
'..',
'templates',
'js',
)
directory = os.path.join(js_template_dir, subdir)
@@ -60,12 +73,15 @@ class RenderJavascriptFiles(InvenTreeTestCase): # pragma: no cover
return n
def test_render_files(self):
"""Look for all javascript files."""
"""
Look for all javascript files
"""
n = 0
print('Rendering javascript files...')
print("Rendering javascript files...")
n += self.download_files('translated', '/js/i18n')
n += self.download_files('dynamic', '/js/dynamic')
print(f'Rendered {n} javascript files.')
print(f"Rendered {n} javascript files.")

View File

@@ -1,453 +1,89 @@
"""Helper functions for loading InvenTree configuration options."""
"""
Helper functions for loading InvenTree configuration options
"""
import datetime
import json
import logging
import os
import random
import shutil
import string
import warnings
from pathlib import Path
from django.core.files.base import ContentFile
from django.core.files.storage import Storage
logger = logging.getLogger('inventree')
CONFIG_DATA = None
CONFIG_LOOKUPS = {}
def to_list(value, delimiter=','):
"""Take a configuration setting and make sure it is a list.
def get_base_dir():
""" Returns the base (top-level) InvenTree directory """
return os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
For example, we might have a configuration setting taken from the .config file,
which is already a list.
However, the same setting may be specified via an environment variable,
using a comma delimited string!
def get_config_file():
"""
if type(value) in [list, tuple]:
return value
# Otherwise, force string value
value = str(value)
return [x.strip() for x in value.split(delimiter)]
def to_dict(value):
"""Take a configuration setting and make sure it is a dict.
For example, we might have a configuration setting taken from the .config file,
which is already an object/dict.
However, the same setting may be specified via an environment variable,
using a valid JSON string!
"""
if value is None:
return {}
if isinstance(value, dict):
return value
try:
return json.loads(value)
except Exception as error:
logger.exception(
"Failed to parse value '%s' as JSON with error %s. Ensure value is a valid JSON string.",
value,
error,
)
return {}
def is_true(x):
"""Shortcut function to determine if a value "looks" like a boolean."""
return str(x).strip().lower() in ['1', 'y', 'yes', 't', 'true', 'on']
def get_base_dir() -> Path:
"""Returns the base (top-level) InvenTree directory."""
return Path(__file__).parent.parent.resolve()
def ensure_dir(path: Path, storage=None) -> None:
"""Ensure that a directory exists.
If it does not exist, create it.
"""
if storage and isinstance(storage, Storage):
if not storage.exists(str(path)):
storage.save(str(path / '.empty'), ContentFile(''))
return
if not path.exists():
path.mkdir(parents=True, exist_ok=True)
def get_config_file(create=True) -> Path:
"""Returns the path of the InvenTree configuration file.
Returns the path of the InvenTree configuration file.
Note: It will be created it if does not already exist!
"""
base_dir = get_base_dir()
cfg_filename = os.getenv('INVENTREE_CONFIG_FILE')
if cfg_filename:
cfg_filename = Path(cfg_filename.strip()).resolve()
cfg_filename = cfg_filename.strip()
cfg_filename = os.path.abspath(cfg_filename)
else:
# Config file is *not* specified - use the default
cfg_filename = base_dir.joinpath('config.yaml').resolve()
cfg_filename = os.path.join(base_dir, 'config.yaml')
if not cfg_filename.exists() and create:
print(
"InvenTree configuration file 'config.yaml' not found - creating default file"
)
ensure_dir(cfg_filename.parent)
if not os.path.exists(cfg_filename):
print("InvenTree configuration file 'config.yaml' not found - creating default file")
cfg_template = base_dir.joinpath('config_template.yaml')
cfg_template = os.path.join(base_dir, "config_template.yaml")
shutil.copyfile(cfg_template, cfg_filename)
print(f'Created config file {cfg_filename}')
print(f"Created config file {cfg_filename}")
return cfg_filename
def load_config_data(set_cache: bool = False) -> map:
"""Load configuration data from the config file.
Arguments:
set_cache(bool): If True, the configuration data will be cached for future use after load.
"""
global CONFIG_DATA
# use cache if populated
# skip cache if cache should be set
if CONFIG_DATA is not None and not set_cache:
return CONFIG_DATA
import yaml
cfg_file = get_config_file()
with open(cfg_file, 'r') as cfg:
data = yaml.safe_load(cfg)
# Set the cache if requested
if set_cache:
CONFIG_DATA = data
return data
def do_typecast(value, type, var_name=None):
"""Attempt to typecast a value.
Arguments:
value: Value to typecast
type: Function to use for typecasting the value e.g. int, float, str, list, dict
var_name: Name that should be logged e.g. 'INVENTREE_STATIC_ROOT'. Set if logging is required.
Returns:
Typecasted value or original value if typecasting failed.
"""
# Force 'list' of strings
if type is list:
value = to_list(value)
# Valid JSON string is required
elif type is dict:
value = to_dict(value)
elif type is not None:
# Try to typecast the value
try:
val = type(value)
return val
except Exception as error:
if var_name:
logger.exception(
"Failed to typecast '%s' with value '%s' to type '%s' with error %s",
var_name,
value,
type,
error,
)
return value
def get_setting(env_var=None, config_key=None, default_value=None, typecast=None):
"""Helper function for retrieving a configuration setting value.
- First preference is to look for the environment variable
- Second preference is to look for the value of the settings file
- Third preference is the default value
Arguments:
env_var: Name of the environment variable e.g. 'INVENTREE_STATIC_ROOT'
config_key: Key to lookup in the configuration file
default_value: Value to return if first two options are not provided
typecast: Function to use for typecasting the value e.g. int, float, str, list, dict
"""
def set_metadata(source: str):
"""Set lookup metadata for the setting."""
key = env_var or config_key
CONFIG_LOOKUPS[key] = {
'env_var': env_var,
'config_key': config_key,
'source': source,
'accessed': datetime.datetime.now(),
}
# First, try to load from the environment variables
if env_var is not None:
val = os.getenv(env_var, None)
if val is not None:
set_metadata('env')
return do_typecast(val, typecast, var_name=env_var)
# Next, try to load from configuration file
if config_key is not None:
cfg_data = load_config_data()
result = None
# Hack to allow 'path traversal' in configuration file
for key in config_key.strip().split('.'):
if type(cfg_data) is not dict or key not in cfg_data:
result = None
break
result = cfg_data[key]
cfg_data = cfg_data[key]
if result is not None:
set_metadata('yaml')
return do_typecast(result, typecast, var_name=env_var)
# Finally, return the default value
set_metadata('default')
return do_typecast(default_value, typecast, var_name=env_var)
def get_boolean_setting(env_var=None, config_key=None, default_value=False):
"""Helper function for retrieving a boolean configuration setting."""
return is_true(get_setting(env_var, config_key, default_value))
def get_media_dir(create=True):
"""Return the absolute path for the 'media' directory (where uploaded files are stored)."""
md = get_setting('INVENTREE_MEDIA_ROOT', 'media_root')
if not md:
raise FileNotFoundError('INVENTREE_MEDIA_ROOT not specified')
md = Path(md).resolve()
if create:
md.mkdir(parents=True, exist_ok=True)
return md
def get_static_dir(create=True):
"""Return the absolute path for the 'static' directory (where static files are stored)."""
sd = get_setting('INVENTREE_STATIC_ROOT', 'static_root')
if not sd:
raise FileNotFoundError('INVENTREE_STATIC_ROOT not specified')
sd = Path(sd).resolve()
if create:
sd.mkdir(parents=True, exist_ok=True)
return sd
def get_backup_dir(create=True):
"""Return the absolute path for the backup directory."""
bd = get_setting('INVENTREE_BACKUP_DIR', 'backup_dir')
if not bd:
raise FileNotFoundError('INVENTREE_BACKUP_DIR not specified')
bd = Path(bd).resolve()
if create:
bd.mkdir(parents=True, exist_ok=True)
return bd
def get_plugin_file():
"""Returns the path of the InvenTree plugins specification file.
"""
Returns the path of the InvenTree plugins specification file.
Note: It will be created if it does not already exist!
"""
# Check if the plugin.txt file (specifying required plugins) is specified
plugin_file = get_setting('INVENTREE_PLUGIN_FILE', 'plugin_file')
PLUGIN_FILE = os.getenv('INVENTREE_PLUGIN_FILE')
if not plugin_file:
if not PLUGIN_FILE:
# If not specified, look in the same directory as the configuration file
config_dir = get_config_file().parent
plugin_file = config_dir.joinpath('plugins.txt')
else:
# Make sure we are using a modern Path object
plugin_file = Path(plugin_file)
if not plugin_file.exists():
logger.warning(
'Plugin configuration file does not exist - creating default file'
)
logger.info("Creating plugin file at '%s'", plugin_file)
ensure_dir(plugin_file.parent)
config_dir = os.path.dirname(get_config_file())
PLUGIN_FILE = os.path.join(config_dir, 'plugins.txt')
if not os.path.exists(PLUGIN_FILE):
logger.warning("Plugin configuration file does not exist")
logger.info(f"Creating plugin file at '{PLUGIN_FILE}'")
# If opening the file fails (no write permission, for example), then this will throw an error
plugin_file.write_text(
'# InvenTree Plugins (uses PIP framework to install)\n\n'
)
with open(PLUGIN_FILE, 'w') as plugin_file:
plugin_file.write("# InvenTree Plugins (uses PIP framework to install)\n\n")
return plugin_file
return PLUGIN_FILE
def get_plugin_dir():
"""Returns the path of the custom plugins directory."""
return get_setting('INVENTREE_PLUGIN_DIR', 'plugin_dir')
def get_secret_key():
"""Return the secret key value which will be used by django.
Following options are tested, in descending order of preference:
A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data
B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file
C) Look for default key file "secret_key.txt"
D) Create "secret_key.txt" if it does not exist
def get_setting(environment_var, backup_val, default_value=None):
"""
# Look for environment variable
if secret_key := get_setting('INVENTREE_SECRET_KEY', 'secret_key'):
logger.info('SECRET_KEY loaded by INVENTREE_SECRET_KEY') # pragma: no cover
return secret_key
Helper function for retrieving a configuration setting value
# Look for secret key file
if secret_key_file := get_setting('INVENTREE_SECRET_KEY_FILE', 'secret_key_file'):
secret_key_file = Path(secret_key_file).resolve()
else:
# Default location for secret key file
secret_key_file = get_base_dir().joinpath('secret_key.txt').resolve()
if not secret_key_file.exists():
logger.info("Generating random key file at '%s'", secret_key_file)
ensure_dir(secret_key_file.parent)
# Create a random key file
options = string.digits + string.ascii_letters + string.punctuation
key = ''.join([random.choice(options) for i in range(100)])
secret_key_file.write_text(key)
logger.debug("Loading SECRET_KEY from '%s'", secret_key_file)
key_data = secret_key_file.read_text().strip()
return key_data
def get_custom_file(
env_ref: str, conf_ref: str, log_ref: str, lookup_media: bool = False
):
"""Returns the checked path to a custom file.
Set lookup_media to True to also search in the media folder.
- First preference is to look for the environment variable
- Second preference is to look for the value of the settings file
- Third preference is the default value
"""
from django.contrib.staticfiles.storage import StaticFilesStorage
from django.core.files.storage import default_storage
value = get_setting(env_ref, conf_ref, None)
val = os.getenv(environment_var)
if not value:
return None
if val is not None:
return val
static_storage = StaticFilesStorage()
if backup_val is not None:
return backup_val
if static_storage.exists(value):
logger.info('Loading %s from %s directory: %s', log_ref, 'static', value)
elif lookup_media and default_storage.exists(value):
logger.info('Loading %s from %s directory: %s', log_ref, 'media', value)
else:
add_dir_str = ' or media' if lookup_media else ''
logger.warning(
"The %s file '%s' could not be found in the static %s directories",
log_ref,
value,
add_dir_str,
)
value = False
return value
def get_frontend_settings(debug=True):
"""Return a dictionary of settings for the frontend interface.
Note that the new config settings use the 'FRONTEND' key,
whereas the legacy key was 'PUI' (platform UI) which is now deprecated
"""
# Legacy settings
pui_settings = get_setting(
'INVENTREE_PUI_SETTINGS', 'pui_settings', {}, typecast=dict
)
if len(pui_settings) > 0:
warnings.warn(
"The 'INVENTREE_PUI_SETTINGS' key is deprecated. Please use 'INVENTREE_FRONTEND_SETTINGS' instead",
DeprecationWarning,
stacklevel=2,
)
# New settings
frontend_settings = get_setting(
'INVENTREE_FRONTEND_SETTINGS', 'frontend_settings', {}, typecast=dict
)
# Merge settings
settings = {**pui_settings, **frontend_settings}
# Set the base URL
if 'base_url' not in settings:
base_url = get_setting('INVENTREE_PUI_URL_BASE', 'pui_url_base', '')
if base_url:
warnings.warn(
"The 'INVENTREE_PUI_URL_BASE' key is deprecated. Please use 'INVENTREE_FRONTEND_URL_BASE' instead",
DeprecationWarning,
stacklevel=2,
)
else:
base_url = get_setting(
'INVENTREE_FRONTEND_URL_BASE', 'frontend_url_base', 'platform'
)
settings['base_url'] = base_url
# Set the server list
settings['server_list'] = settings.get('server_list', [])
# Set the debug flag
settings['debug'] = debug
if 'environment' not in settings:
settings['environment'] = 'development' if debug else 'production'
if debug and 'show_server_selector' not in settings:
# In debug mode, show server selector by default
settings['show_server_selector'] = True
elif len(settings['server_list']) == 0:
# If no servers are specified, show server selector
settings['show_server_selector'] = True
return settings
return default_value

View File

@@ -1,20 +1,24 @@
# -*- coding: utf-8 -*-
"""Provides extra global data to all templates."""
"""
Provides extra global data to all templates.
"""
import InvenTree.email
import InvenTree.status
from generic.states import StatusCode
from InvenTree.helpers import inheritors
from users.models import RuleSet, check_user_role
from InvenTree.status_codes import (BuildStatus, PurchaseOrderStatus,
SalesOrderStatus, StockHistoryCode,
StockStatus)
from users.models import RuleSet
def health_status(request):
"""Provide system health status information to the global context.
"""
Provide system health status information to the global context.
- Not required for AJAX requests
- Do not provide if it is already provided to the context
"""
if request.path.endswith('.js'):
# Do not provide to script requests
return {} # pragma: no cover
@@ -27,11 +31,13 @@ def health_status(request):
status = {
'django_q_running': InvenTree.status.is_worker_running(),
'email_configured': InvenTree.email.is_email_configured(),
'email_configured': InvenTree.status.is_email_configured(),
}
# The following keys are required to denote system health
health_keys = ['django_q_running']
health_keys = [
'django_q_running',
]
all_healthy = True
@@ -47,17 +53,29 @@ def health_status(request):
def status_codes(request):
"""Provide status code enumerations."""
"""
Provide status code enumerations.
"""
if hasattr(request, '_inventree_status_codes'):
# Do not duplicate efforts
return {}
request._inventree_status_codes = True
return {cls.__name__: cls.template_context() for cls in inheritors(StatusCode)}
return {
# Expose the StatusCode classes to the templates
'SalesOrderStatus': SalesOrderStatus,
'PurchaseOrderStatus': PurchaseOrderStatus,
'BuildStatus': BuildStatus,
'StockStatus': StockStatus,
'StockHistoryCode': StockHistoryCode,
}
def user_roles(request):
"""Return a map of the current roles assigned to the user.
"""
Return a map of the current roles assigned to the user.
Roles are denoted by their simple names, and then the permission type.
@@ -68,16 +86,37 @@ def user_roles(request):
Each value will return a boolean True / False
"""
user = request.user
roles = {}
roles = {
}
for role in RuleSet.get_ruleset_models().keys():
permissions = {}
if user.is_superuser:
for ruleset in RuleSet.RULESET_MODELS.keys(): # pragma: no cover
roles[ruleset] = {
'view': True,
'add': True,
'change': True,
'delete': True,
}
else:
for group in user.groups.all():
for rule in group.rule_sets.all():
for perm in ['view', 'add', 'change', 'delete']:
permissions[perm] = user.is_superuser or check_user_role(user, role, perm)
# Ensure the role name is in the dict
if rule.name not in roles:
roles[rule.name] = {
'view': user.is_superuser,
'add': user.is_superuser,
'change': user.is_superuser,
'delete': user.is_superuser
}
roles[role] = permissions
# Roles are additive across groups
roles[rule.name]['view'] |= rule.can_view
roles[rule.name]['add'] |= rule.can_add
roles[rule.name]['change'] |= rule.can_change
roles[rule.name]['delete'] |= rule.can_delete
return {'roles': roles}

View File

@@ -1,246 +0,0 @@
"""Helper functions for converting between units."""
import logging
import re
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
import pint
_unit_registry = None
logger = logging.getLogger('inventree')
def get_unit_registry():
"""Return a custom instance of the Pint UnitRegistry."""
global _unit_registry
# Cache the unit registry for speedier access
if _unit_registry is None:
return reload_unit_registry()
return _unit_registry
def reload_unit_registry():
"""Reload the unit registry from the database.
This function is called at startup, and whenever the database is updated.
"""
import time
t_start = time.time()
global _unit_registry
_unit_registry = None
reg = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
# Aliases for temperature units
reg.define('@alias degC = celsius = Celsius')
reg.define('@alias degF = fahrenheit = Fahrenheit')
reg.define('@alias degK = kelvin = Kelvin')
# Define some "standard" additional units
reg.define('piece = 1')
reg.define('each = 1 = ea')
reg.define('dozen = 12 = dz')
reg.define('hundred = 100')
reg.define('thousand = 1000')
# Allow for custom units to be defined in the database
try:
from common.models import CustomUnit
for cu in CustomUnit.objects.all():
try:
reg.define(cu.fmt_string())
except Exception as e:
logger.exception(
'Failed to load custom unit: %s - %s', cu.fmt_string(), e
)
# Once custom units are loaded, save registry
_unit_registry = reg
except Exception:
# Database is not ready, or CustomUnit model is not available
pass
dt = time.time() - t_start
logger.debug('Loaded unit registry in %.3f s', dt)
return reg
def from_engineering_notation(value):
"""Convert a provided value to 'natural' representation from 'engineering' notation.
Ref: https://en.wikipedia.org/wiki/Engineering_notation
In "engineering notation", the unit (or SI prefix) is often combined with the value,
and replaces the decimal point.
Examples:
- 1K2 -> 1.2K
- 3n05 -> 3.05n
- 8R6 -> 8.6R
And, we should also take into account any provided trailing strings:
- 1K2 ohm -> 1.2K ohm
- 10n005F -> 10.005nF
"""
value = str(value).strip()
pattern = f'(\d+)([a-zA-Z]+)(\d+)(.*)'
if match := re.match(pattern, value):
left, prefix, right, suffix = match.groups()
return f'{left}.{right}{prefix}{suffix}'
return value
def convert_value(value, unit):
"""Attempt to convert a value to a specified unit.
Arguments:
value: The value to convert
unit: The target unit to convert to
Returns:
The converted value (ideally a pint.Quantity value)
Raises:
Exception if the value cannot be converted to the specified unit
"""
ureg = get_unit_registry()
# Convert the provided value to a pint.Quantity object
value = ureg.Quantity(value)
# Convert to the specified unit
if unit:
if is_dimensionless(value):
magnitude = value.to_base_units().magnitude
value = ureg.Quantity(magnitude, unit)
else:
value = value.to(unit)
return value
def convert_physical_value(value: str, unit: str = None, strip_units=True):
"""Validate that the provided value is a valid physical quantity.
Arguments:
value: Value to validate (str)
unit: Optional unit to convert to, and validate against
strip_units: If True, strip units from the returned value, and return only the dimension
Raises:
ValidationError: If the value is invalid or cannot be converted to the specified unit
Returns:
The converted quantity, in the specified units
"""
ureg = get_unit_registry()
# Check that the provided unit is available in the unit registry
if unit:
try:
valid = unit in ureg
except Exception as exc:
valid = False
if not valid:
raise ValidationError(_(f'Invalid unit provided ({unit})'))
original = str(value).strip()
# Ensure that the value is a string
value = str(value).strip() if value else ''
unit = str(unit).strip() if unit else ''
# Error on blank values
if not value:
raise ValidationError(_('No value provided'))
# Construct a list of values to "attempt" to convert
attempts = [value]
# Attempt to convert from engineering notation
eng = from_engineering_notation(value)
attempts.append(eng)
# Append the unit, if provided
# These are the "final" attempts to convert the value, and *must* appear after previous attempts
if unit:
attempts.append(f'{value}{unit}')
attempts.append(f'{eng}{unit}')
value = None
# Run through the available "attempts", take the first successful result
for attempt in attempts:
try:
value = convert_value(attempt, unit)
break
except Exception as exc:
value = None
pass
if value is None:
if unit:
raise ValidationError(_(f'Could not convert {original} to {unit}'))
else:
raise ValidationError(_('Invalid quantity supplied'))
# Calculate the "magnitude" of the value, as a float
# If the value is specified strangely (e.g. as a fraction or a dozen), this can cause issues
# So, we ensure that it is converted to a floating point value
# If we wish to return a "raw" value, some trickery is required
try:
if unit:
magnitude = ureg.Quantity(value.to(ureg.Unit(unit))).magnitude
else:
magnitude = ureg.Quantity(value.to_base_units()).magnitude
magnitude = float(ureg.Quantity(magnitude).to_base_units().magnitude)
except Exception as exc:
raise ValidationError(_(f'Invalid quantity supplied ({exc})'))
if strip_units:
return magnitude
elif unit or value.units:
return ureg.Quantity(magnitude, unit or value.units)
return ureg.Quantity(magnitude)
def is_dimensionless(value):
"""Determine if the provided value is 'dimensionless'.
A dimensionless value might look like:
0.1
1/2 dozen
three thousand
1.2 dozen
(etc)
"""
ureg = get_unit_registry()
# Ensure the provided value is in the right format
value = ureg.Quantity(value)
if value.units == ureg.dimensionless:
return True
if value.to_base_units().units == ureg.dimensionless:
return True
# At this point, the value is not dimensionless
return False

View File

@@ -1,89 +0,0 @@
"""Code for managing email functionality in InvenTree."""
import logging
from django.conf import settings
from django.core import mail as django_mail
import InvenTree.ready
import InvenTree.tasks
logger = logging.getLogger('inventree')
def is_email_configured():
"""Check if email backend is configured.
NOTE: This does not check if the configuration is valid!
"""
configured = True
testing = settings.TESTING
if InvenTree.ready.isInTestMode():
return False
if InvenTree.ready.isImportingData():
return False
if not settings.EMAIL_HOST:
configured = False
# Display warning unless in test mode
if not testing: # pragma: no cover
logger.debug('EMAIL_HOST is not configured')
# Display warning unless in test mode
if not settings.EMAIL_HOST_USER and not testing: # pragma: no cover
logger.debug('EMAIL_HOST_USER is not configured')
# Display warning unless in test mode
if not settings.EMAIL_HOST_PASSWORD and testing: # pragma: no cover
logger.debug('EMAIL_HOST_PASSWORD is not configured')
# Email sender must be configured
if not settings.DEFAULT_FROM_EMAIL:
configured = False
if not testing: # pragma: no cover
logger.debug('DEFAULT_FROM_EMAIL is not configured')
return configured
def send_email(subject, body, recipients, from_email=None, html_message=None):
"""Send an email with the specified subject and body, to the specified recipients list."""
if isinstance(recipients, str):
recipients = [recipients]
import InvenTree.ready
import InvenTree.status
if InvenTree.ready.isImportingData():
# If we are importing data, don't send emails
return
if not InvenTree.email.is_email_configured() and not settings.TESTING:
# Email is not configured / enabled
return
# If a *from_email* is not specified, ensure that the default is set
if not from_email:
from_email = settings.DEFAULT_FROM_EMAIL
# If we still don't have a valid from_email, then we can't send emails
if not from_email:
if settings.TESTING:
from_email = 'from@test.com'
else:
logger.error('send_email failed: DEFAULT_FROM_EMAIL not specified')
return
InvenTree.tasks.offload_task(
django_mail.send_mail,
subject,
body,
from_email,
recipients,
fail_silently=False,
html_message=html_message,
)

View File

@@ -1,16 +1,17 @@
"""Custom exception handling for the DRF API."""
"""
Custom exception handling for the DRF API
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import sys
import traceback
from django.conf import settings
from django.core.exceptions import ValidationError as DjangoValidationError
from django.db.utils import IntegrityError, OperationalError
from django.utils.translation import gettext_lazy as _
from django.views.debug import ExceptionReporter
import rest_framework.views as drfviews
from error_report.models import Error
@@ -18,76 +19,16 @@ from rest_framework import serializers
from rest_framework.exceptions import ValidationError as DRFValidationError
from rest_framework.response import Response
import InvenTree.sentry
logger = logging.getLogger('inventree')
def log_error(path, error_name=None, error_info=None, error_data=None):
"""Log an error to the database.
- Uses python exception handling to extract error details
Arguments:
path: The 'path' (most likely a URL) associated with this error (optional)
kwargs:
error_name: The name of the error (optional, overrides 'kind')
error_info: The error information (optional, overrides 'info')
error_data: The error data (optional, overrides 'data')
"""
kind, info, data = sys.exc_info()
# Check if the error is on the ignore list
if kind in settings.IGNORED_ERRORS:
return
if error_name:
kind = error_name
else:
kind = getattr(kind, '__name__', 'Unknown Error')
if error_info:
info = error_info
if error_data:
data = error_data
else:
try:
data = '\n'.join(traceback.format_exception(kind, info, data))
except AttributeError:
data = 'No traceback information available'
# Log error to stderr
logger.error(info)
# Ensure the error information does not exceed field size limits
path = path[:200]
kind = kind[:128]
try:
Error.objects.create(kind=kind, info=info or '', data=data or '', path=path)
except Exception:
# Not much we can do if logging the error throws a db exception
logger.exception('Failed to log exception to database')
def exception_handler(exc, context):
"""Custom exception handler for DRF framework.
Ref: https://www.django-rest-framework.org/api-guide/exceptions/#custom-exception-handling
Catches any errors not natively handled by DRF, and re-throws as an error DRF can handle.
If sentry error reporting is enabled, we will also provide the original exception to sentry.io
"""
response = None
Custom exception handler for DRF framework.
Ref: https://www.django-rest-framework.org/api-guide/exceptions/#custom-exception-handling
# Pass exception to sentry.io handler
try:
InvenTree.sentry.report_exception(exc)
except Exception:
# If sentry.io fails, we don't want to crash the server!
pass
Catches any errors not natively handled by DRF, and re-throws as an error DRF can handle
"""
response = None
# Catch any django validation error, and re-throw a DRF validation error
if isinstance(exc, DjangoValidationError):
@@ -99,14 +40,10 @@ def exception_handler(exc, context):
if response is None:
# DRF handler did not provide a default response for this exception
if settings.TESTING:
# If in TESTING mode, re-throw the exception for traceback
raise exc
elif settings.DEBUG:
# If in DEBUG mode, provide error information in the response
if settings.DEBUG:
error_detail = str(exc)
else:
error_detail = _('Error details can be found in the admin panel')
error_detail = _("Error details can be found in the admin panel")
response_data = {
'error': type(exc).__name__,
@@ -118,7 +55,16 @@ def exception_handler(exc, context):
response = Response(response_data, status=500)
log_error(context['request'].path)
# Log the exception to the database, too
kind, info, data = sys.exc_info()
Error.objects.create(
kind=kind.__name__,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
path=context['request'].path,
html=ExceptionReporter(context['request'], kind, info, data).get_traceback_html(),
)
if response is not None:
# Convert errors returned under the label '__all__' to 'non_field_errors'

View File

@@ -1,116 +1,65 @@
"""Custom exchange backend which hooks into the InvenTree plugin system to fetch exchange rates from an external API."""
import ssl
from urllib.error import URLError
from urllib.request import urlopen
import logging
from django.db.transaction import atomic
from django.db.utils import OperationalError
import certifi
from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend
from djmoney.contrib.exchange.models import ExchangeBackend, Rate
from common.settings import currency_code_default, currency_codes
logger = logging.getLogger('inventree')
class InvenTreeExchange(SimpleExchangeBackend):
"""Backend for automatically updating currency exchange rates.
"""
Backend for automatically updating currency exchange rates.
Uses the plugin system to actually fetch the rates from an external API.
Uses the exchangerate.host service API
"""
name = 'InvenTreeExchange'
name = "InvenTreeExchange"
def get_rates(self, **kwargs) -> None:
"""Set the requested currency codes and get rates."""
from common.models import InvenTreeSetting
from plugin import registry
def __init__(self):
self.url = "https://api.exchangerate.host/latest"
base_currency = kwargs.get('base_currency', currency_code_default())
symbols = kwargs.get('symbols', currency_codes())
super().__init__()
# Find the selected exchange rate plugin
slug = InvenTreeSetting.get_setting('CURRENCY_UPDATE_PLUGIN', '', create=False)
def get_params(self):
# No API key is required
return {
}
if slug:
plugin = registry.get_plugin(slug)
else:
plugin = None
def get_response(self, **kwargs):
"""
Custom code to get response from server.
Note: Adds a 5-second timeout
"""
if not plugin:
# Find the first active currency exchange plugin
plugins = registry.with_mixin('currencyexchange', active=True)
url = self.get_url(**kwargs)
if len(plugins) > 0:
plugin = plugins[0]
if not plugin:
logger.warning(
'No active currency exchange plugins found - skipping update'
)
return {}
logger.info("Running exchange rate update using plugin '%s'", plugin.name)
# Plugin found - run the update task
try:
rates = plugin.update_exchange_rates(base_currency, symbols)
except Exception as exc:
logger.exception('Exchange rate update failed: %s', exc)
return {}
context = ssl.create_default_context(cafile=certifi.where())
response = urlopen(url, timeout=5, context=context)
return response.read()
except:
# Returning None here will raise an error upstream
return None
if not rates:
logger.warning(
'Exchange rate update failed - no data returned from plugin %s', slug
)
return {}
def update_rates(self, base_currency=currency_code_default()):
# Update exchange rates based on returned data
if type(rates) is not dict:
logger.warning(
'Invalid exchange rate data returned from plugin %s (type %s)',
slug,
type(rates),
)
return {}
symbols = ','.join(currency_codes())
# Ensure base currency is provided
rates[base_currency] = 1.00
return rates
@atomic
def update_rates(self, base_currency=None, **kwargs):
"""Call to update all exchange rates."""
backend, _ = ExchangeBackend.objects.update_or_create(
name=self.name, defaults={'base_currency': base_currency}
)
if base_currency is None:
base_currency = currency_code_default()
symbols = currency_codes()
logger.info(
'Updating exchange rates for %s (%s currencies)',
base_currency,
len(symbols),
)
# Fetch new rates from the backend
# If the backend fails, the existing rates will not be updated
rates = self.get_rates(base_currency=base_currency, symbols=symbols)
if rates:
# Clear out existing rates
backend.clear_rates()
Rate.objects.bulk_create([
Rate(currency=currency, value=amount, backend=backend)
for currency, amount in rates.items()
])
else:
logger.info(
'No exchange rates returned from backend - currencies not updated'
)
logger.info('Updated exchange rates for %s', base_currency)
try:
super().update_rates(base=base_currency, symbols=symbols)
# catch connection errors
except URLError:
print('Encountered connection error while updating')
except OperationalError as e:
if 'SerializationFailure' in e.__cause__.__class__.__name__:
print('Serialization Failure while updating exchange rates')
# We are just going to swallow this exception because the
# exchange rates will be updated later by the scheduled task
else:
# Other operational errors probably are still show stoppers
# so reraise them so that the log contains the stacktrace
raise

View File

@@ -1,148 +1,108 @@
"""Custom fields used in InvenTree."""
""" Custom fields used in InvenTree """
import sys
from decimal import Decimal
from django import forms
from django.db import models
from django.core import validators
from django.db import models as models
from django.forms.fields import URLField as FormURLField
from django.utils.translation import gettext_lazy as _
from djmoney.forms.fields import MoneyField
from djmoney.models.fields import MoneyField as ModelMoneyField
from djmoney.models.validators import MinMoneyValidator
from rest_framework.fields import URLField as RestURLField
from rest_framework.fields import empty
import InvenTree.helpers
from .validators import AllowedURLValidator, allowable_url_schemes
from .validators import allowable_url_schemes
class InvenTreeRestURLField(RestURLField):
"""Custom field for DRF with custom scheme validators."""
class InvenTreeURLFormField(FormURLField):
""" Custom URL form field with custom scheme validators """
def __init__(self, **kwargs):
"""Update schemes."""
# Enforce 'max length' parameter in form validation
if 'max_length' not in kwargs:
kwargs['max_length'] = 200
super().__init__(**kwargs)
self.validators[-1].schemes = allowable_url_schemes()
def run_validation(self, data=empty):
"""Override default validation behaviour for this field type."""
import common.models
strict_urls = common.models.InvenTreeSetting.get_setting(
'INVENTREE_STRICT_URLS', True, cache=False
)
if not strict_urls and data is not empty:
if '://' not in data:
# Validate as if there were a schema provided
data = 'http://' + data
return super().run_validation(data=data)
default_validators = [validators.URLValidator(schemes=allowable_url_schemes())]
class InvenTreeURLField(models.URLField):
"""Custom URL field which has custom scheme validators."""
""" Custom URL field which has custom scheme validators """
default_validators = [AllowedURLValidator()]
default_validators = [validators.URLValidator(schemes=allowable_url_schemes())]
def __init__(self, **kwargs):
"""Initialization method for InvenTreeURLField."""
# Max length for InvenTreeURLField is set to 200
kwargs['max_length'] = 200
super().__init__(**kwargs)
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': InvenTreeURLFormField
})
def money_kwargs(**kwargs):
"""Returns the database settings for MoneyFields."""
def money_kwargs():
""" returns the database settings for MoneyFields """
from common.settings import currency_code_default, currency_code_mappings
# Default values (if not specified)
if 'max_digits' not in kwargs:
kwargs['max_digits'] = 19
if 'decimal_places' not in kwargs:
kwargs['decimal_places'] = 6
if 'currency_choices' not in kwargs:
kwargs['currency_choices'] = currency_code_mappings()
if 'default_currency' not in kwargs:
kwargs['default_currency'] = currency_code_default()
kwargs = {}
kwargs['currency_choices'] = currency_code_mappings()
kwargs['default_currency'] = currency_code_default()
return kwargs
class InvenTreeModelMoneyField(ModelMoneyField):
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
"""
Custom MoneyField for clean migrations while using dynamic currency settings
"""
def __init__(self, **kwargs):
"""Overwrite default values and validators."""
# detect if creating migration
if 'migrate' in sys.argv or 'makemigrations' in sys.argv:
# remove currency information for a clean migration
kwargs['default_currency'] = ''
kwargs['currency_choices'] = []
kwargs = money_kwargs(**kwargs)
else:
# set defaults
kwargs.update(money_kwargs())
# Set a minimum value validator
validators = kwargs.get('validators', [])
allow_negative = kwargs.pop('allow_negative', False)
# If no validators are provided, add some "standard" ones
if len(validators) == 0:
if not allow_negative:
validators.append(MinMoneyValidator(0))
validators.append(
MinMoneyValidator(0),
)
kwargs['validators'] = validators
super().__init__(**kwargs)
def formfield(self, **kwargs):
"""Override form class to use own function."""
""" override form class to use own function """
kwargs['form_class'] = InvenTreeMoneyField
return super().formfield(**kwargs)
def to_python(self, value):
"""Convert value to python type."""
value = super().to_python(value)
return round_decimal(value, self.decimal_places)
def prepare_value(self, value):
"""Override the 'prepare_value' method, to remove trailing zeros when displaying.
Why? It looks nice!
"""
return round_decimal(value, self.decimal_places, normalize=True)
class InvenTreeMoneyField(MoneyField):
"""Custom MoneyField for clean migrations while using dynamic currency settings."""
""" custom MoneyField for clean migrations while using dynamic currency settings """
def __init__(self, *args, **kwargs):
"""Override initial values with the real info from database."""
kwargs = money_kwargs(**kwargs)
# override initial values with the real info from database
kwargs.update(money_kwargs())
super().__init__(*args, **kwargs)
class DatePickerFormField(forms.DateField):
"""Custom date-picker field."""
"""
Custom date-picker field
"""
def __init__(self, **kwargs):
"""Set up custom values."""
help_text = kwargs.get('help_text', _('Enter date'))
label = kwargs.get('label', None)
required = kwargs.get('required', False)
initial = kwargs.get('initial', None)
widget = forms.DateInput(attrs={'type': 'date'})
widget = forms.DateInput(
attrs={
'type': 'date',
}
)
forms.DateField.__init__(
self,
@@ -150,64 +110,49 @@ class DatePickerFormField(forms.DateField):
initial=initial,
help_text=help_text,
widget=widget,
label=label,
label=label
)
def round_decimal(value, places, normalize=False):
"""Round value to the specified number of places."""
if type(value) in [Decimal, float]:
value = round(value, places)
if normalize:
# Remove any trailing zeroes
value = InvenTree.helpers.normalize(value)
def round_decimal(value, places):
"""
Round value to the specified number of places.
"""
if value is not None:
# see https://docs.python.org/2/library/decimal.html#decimal.Decimal.quantize for options
return value.quantize(Decimal(10) ** -places)
return value
class RoundingDecimalFormField(forms.DecimalField):
"""Custom FormField that automatically rounds inputs."""
def to_python(self, value):
"""Convert value to python type."""
value = super().to_python(value)
return round_decimal(value, self.decimal_places)
value = round_decimal(value, self.decimal_places)
return value
def prepare_value(self, value):
"""Override the 'prepare_value' method, to remove trailing zeros when displaying.
"""
Override the 'prepare_value' method, to remove trailing zeros when displaying.
Why? It looks nice!
"""
return round_decimal(value, self.decimal_places, normalize=True)
if type(value) == Decimal:
return InvenTree.helpers.normalize(value)
else:
return value
class RoundingDecimalField(models.DecimalField):
"""Custom Field that automatically rounds inputs."""
def to_python(self, value):
"""Convert value to python type."""
value = super().to_python(value)
return round_decimal(value, self.decimal_places)
def formfield(self, **kwargs):
"""Return a Field instance for this field."""
kwargs['form_class'] = RoundingDecimalFormField
defaults = {
'form_class': RoundingDecimalFormField
}
defaults.update(kwargs)
return super().formfield(**kwargs)
class InvenTreeNotesField(models.TextField):
"""Custom implementation of a 'notes' field."""
# Maximum character limit for the various 'notes' fields
NOTES_MAX_LENGTH = 50000
def __init__(self, **kwargs):
"""Configure default initial values for this field."""
kwargs['max_length'] = self.NOTES_MAX_LENGTH
kwargs['verbose_name'] = _('Notes')
kwargs['blank'] = True
kwargs['null'] = True
super().__init__(**kwargs)

View File

@@ -1,89 +1,9 @@
"""General filters for InvenTree."""
from datetime import datetime
from django.conf import settings
from django.utils import timezone
from django.utils.timezone import make_aware
from django_filters import rest_framework as rest_filters
from rest_framework import filters
import InvenTree.helpers
from rest_framework.filters import OrderingFilter
class InvenTreeDateFilter(rest_filters.DateFilter):
"""Custom DateFilter class which handles timezones correctly."""
def filter(self, qs, value):
"""Override the filter method to handle timezones correctly."""
if settings.USE_TZ:
if value is not None:
tz = timezone.get_current_timezone()
value = datetime(value.year, value.month, value.day)
value = make_aware(value, tz, True)
return super().filter(qs, value)
class InvenTreeSearchFilter(filters.SearchFilter):
"""Custom search filter which allows adjusting of search terms dynamically."""
def get_search_fields(self, view, request):
"""Return a set of search fields for the request, adjusted based on request params.
The following query params are available to 'augment' the search (in decreasing order of priority)
- search_regex: If True, search is performed on 'regex' comparison
"""
regex = InvenTree.helpers.str2bool(
request.query_params.get('search_regex', False)
)
search_fields = super().get_search_fields(view, request)
fields = []
if search_fields:
for field in search_fields:
if regex:
field = '$' + field
fields.append(field)
return fields
def get_search_terms(self, request):
"""Return the search terms for this search request.
Depending on the request parameters, we may "augment" these somewhat
"""
whole = InvenTree.helpers.str2bool(
request.query_params.get('search_whole', False)
)
terms = []
search_terms = super().get_search_terms(request)
if search_terms:
for term in search_terms:
term = term.strip()
if not term:
# Ignore blank inputs
continue
if whole:
# Wrap the search term to enable word-boundary matching
term = r'\y' + term + r'\y'
terms.append(term)
return terms
class InvenTreeOrderingFilter(filters.OrderingFilter):
"""Custom OrderingFilter class which allows aliased filtering of related fields.
class InvenTreeOrderingFilter(OrderingFilter):
"""
Custom OrderingFilter class which allows aliased filtering of related fields.
To use, simply specify this filter in the "filter_backends" section.
@@ -100,19 +20,22 @@ class InvenTreeOrderingFilter(filters.OrderingFilter):
"""
def get_ordering(self, request, queryset, view):
"""Override ordering for supporting aliases."""
ordering = super().get_ordering(request, queryset, view)
aliases = getattr(view, 'ordering_field_aliases', None)
# Attempt to map ordering fields based on provided aliases
if ordering is not None and aliases is not None:
"""Ordering fields should be mapped to separate fields."""
"""
Ordering fields should be mapped to separate fields
"""
ordering_initial = ordering
ordering = []
for field in ordering_initial:
reverse = field.startswith('-')
if reverse:
@@ -152,18 +75,3 @@ class InvenTreeOrderingFilter(filters.OrderingFilter):
ordering.append(a)
return ordering
SEARCH_ORDER_FILTER = [
rest_filters.DjangoFilterBackend,
InvenTreeSearchFilter,
filters.OrderingFilter,
]
SEARCH_ORDER_FILTER_ALIAS = [
rest_filters.DjangoFilterBackend,
InvenTreeSearchFilter,
InvenTreeOrderingFilter,
]
ORDER_FILTER = [rest_filters.DjangoFilterBackend, filters.OrderingFilter]

View File

@@ -1,219 +0,0 @@
"""Custom string formatting functions and helpers."""
import re
import string
from django.conf import settings
from django.utils import translation
from django.utils.translation import gettext_lazy as _
from babel import Locale
from babel.numbers import parse_pattern
from djmoney.money import Money
def parse_format_string(fmt_string: str) -> dict:
"""Extract formatting information from the provided format string.
Returns a dict object which contains structured information about the format groups
"""
groups = string.Formatter().parse(fmt_string)
info = {}
seen_groups = set()
for group in groups:
# Skip any group which does not have a named value
if not group[1]:
continue
name = group[1]
# Check for duplicate named groups
if name in seen_groups:
raise ValueError(f"Duplicate group '{name}'")
else:
seen_groups.add(name)
info[group[1]] = {'format': group[1], 'prefix': group[0]}
return info
def construct_format_regex(fmt_string: str) -> str:
r"""Construct a regular expression based on a provided format string.
This function turns a python format string into a regular expression,
which can be used for two purposes:
- Ensure that a particular string matches the specified format
- Extract named variables from a matching string
This function also provides support for wildcard characters:
- '?' provides single character matching; is converted to a '.' (period) for regex
- '#' provides single digit matching; is converted to '\d'
Args:
fmt_string: A typical format string e.g. "PO-???-{ref:04d}"
Returns:
str: A regular expression pattern e.g. ^PO\-...\-(?P<ref>.*)$
Raises:
ValueError: Format string is invalid
"""
pattern = '^'
for group in string.Formatter().parse(fmt_string):
prefix = group[0] # Prefix (literal text appearing before this group)
name = group[1] # Name of this format variable
format = group[2] # Format specifier e.g :04d
rep = [
'+',
'-',
'.',
'{',
'}',
'(',
')',
'^',
'$',
'~',
'!',
'@',
':',
';',
'|',
"'",
'"',
]
# Escape any special regex characters
for ch in rep:
prefix = prefix.replace(ch, '\\' + ch)
# Replace ? with single-character match
prefix = prefix.replace('?', '.')
# Replace # with single-digit match
prefix = prefix.replace('#', r'\d')
pattern += prefix
# Add a named capture group for the format entry
if name:
# Check if integer values are required
if format.endswith('d'):
chr = '\d'
else:
chr = '.'
# Specify width
# TODO: Introspect required width
w = '+'
pattern += f'(?P<{name}>{chr}{w})'
pattern += '$'
return pattern
def validate_string(value: str, fmt_string: str) -> str:
"""Validate that the provided string matches the specified format.
Args:
value: The string to be tested e.g. 'SO-1234-ABC',
fmt_string: The required format e.g. 'SO-{ref}-???',
Returns:
bool: True if the value matches the required format, else False
Raises:
ValueError: The provided format string is invalid
"""
pattern = construct_format_regex(fmt_string)
result = re.match(pattern, value)
return result is not None
def extract_named_group(name: str, value: str, fmt_string: str) -> str:
"""Extract a named value from the provided string, given the provided format string.
Args:
name: Name of group to extract e.g. 'ref'
value: Raw string e.g. 'PO-ABC-1234'
fmt_string: Format pattern e.g. 'PO-???-{ref}
Returns:
str: String value of the named group
Raises:
ValueError: format string is incorrectly specified, or provided value does not match format string
NameError: named value does not exist in the format string
IndexError: named value could not be found in the provided entry
"""
info = parse_format_string(fmt_string)
if name not in info.keys():
raise NameError(_(f"Value '{name}' does not appear in pattern format"))
# Construct a regular expression for matching against the provided format string
# Note: This will raise a ValueError if 'fmt_string' is incorrectly specified
pattern = construct_format_regex(fmt_string)
# Run the regex matcher against the raw string
result = re.match(pattern, value)
if not result:
raise ValueError(
_('Provided value does not match required pattern: ') + fmt_string
)
# And return the value we are interested in
# Note: This will raise an IndexError if the named group was not matched
return result.group(name)
def format_money(
money: Money,
decimal_places: int = None,
format: str = None,
include_symbol: bool = True,
) -> str:
"""Format money object according to the currently set local.
Args:
money (Money): The money object to format
decimal_places (int): Number of decimal places to use
format (str): Format pattern according LDML / the babel format pattern syntax (https://babel.pocoo.org/en/latest/numbers.html)
Returns:
str: The formatted string
Raises:
ValueError: format string is incorrectly specified
"""
language = None and translation.get_language() or settings.LANGUAGE_CODE
locale = Locale.parse(translation.to_locale(language))
if format:
pattern = parse_pattern(format)
else:
pattern = locale.currency_formats['standard']
if decimal_places is not None:
pattern.frac_prec = (decimal_places, decimal_places)
result = pattern.apply(
money.amount,
locale,
currency=money.currency.code if include_symbol else '',
currency_digits=decimal_places is None,
decimal_quantization=decimal_places is not None,
)
return result

View File

@@ -1,4 +1,6 @@
"""Helper forms which subclass Django forms to provide additional functionality."""
"""
Helper forms which subclass Django forms to provide additional functionality
"""
import logging
from urllib.parse import urlencode
@@ -11,27 +13,25 @@ from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from allauth.account.adapter import DefaultAccountAdapter
from allauth.account.forms import LoginForm, SignupForm, set_form_field_order
from allauth.core.exceptions import ImmediateHttpResponse
from allauth.account.forms import SignupForm, set_form_field_order
from allauth.exceptions import ImmediateHttpResponse
from allauth.socialaccount.adapter import DefaultSocialAccountAdapter
from allauth_2fa.adapter import OTPAdapter
from allauth_2fa.forms import TOTPDeviceRemoveForm
from allauth_2fa.utils import user_has_valid_totp_device
from crispy_forms.bootstrap import AppendedText, PrependedAppendedText, PrependedText
from crispy_forms.bootstrap import (AppendedText, Div, PrependedAppendedText,
PrependedText, StrictButton)
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Field, Layout
from dj_rest_auth.registration.serializers import RegisterSerializer
from rest_framework import serializers
import InvenTree.helpers_model
import InvenTree.sso
from common.models import InvenTreeSetting
from InvenTree.exceptions import log_error
from part.models import PartCategory
logger = logging.getLogger('inventree')
class HelperForm(forms.ModelForm):
"""Provides simple integration of crispy_forms extension."""
""" Provides simple integration of crispy_forms extension. """
# Custom field decorations can be specified here, per form class
field_prefix = {}
@@ -39,7 +39,6 @@ class HelperForm(forms.ModelForm):
field_placeholder = {}
def __init__(self, *args, **kwargs):
"""Setup layout."""
super(forms.ModelForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
@@ -56,8 +55,14 @@ class HelperForm(forms.ModelForm):
self.rebuild_layout()
def is_valid(self):
valid = super().is_valid()
return valid
def rebuild_layout(self):
"""Build crispy layout out of current fields."""
layouts = []
for field in self.fields:
@@ -67,10 +72,10 @@ class HelperForm(forms.ModelForm):
# Look for font-awesome icons
if prefix and prefix.startswith('fa-'):
prefix = f"<i class='fas {prefix}'/>"
prefix = r"<i class='fas {fa}'/>".format(fa=prefix)
if suffix and suffix.startswith('fa-'):
suffix = f"<i class='fas {suffix}'/>"
suffix = r"<i class='fas {fa}'/>".format(fa=suffix)
if prefix and suffix:
layouts.append(
@@ -79,19 +84,31 @@ class HelperForm(forms.ModelForm):
field,
prepended_text=prefix,
appended_text=suffix,
placeholder=placeholder,
placeholder=placeholder
)
)
)
elif prefix:
layouts.append(
Field(PrependedText(field, prefix, placeholder=placeholder))
Field(
PrependedText(
field,
prefix,
placeholder=placeholder
)
)
)
elif suffix:
layouts.append(
Field(AppendedText(field, suffix, placeholder=placeholder))
Field(
AppendedText(
field,
suffix,
placeholder=placeholder
)
)
)
else:
@@ -100,239 +117,188 @@ class HelperForm(forms.ModelForm):
self.helper.layout = Layout(*layouts)
class EditUserForm(HelperForm):
"""Form for editing user information."""
class ConfirmForm(forms.Form):
""" Generic confirmation form """
confirm = forms.BooleanField(
required=False, initial=False,
help_text=_("Confirm")
)
class Meta:
"""Metaclass options."""
fields = [
'confirm'
]
class DeleteForm(forms.Form):
""" Generic deletion form which provides simple user confirmation
"""
confirm_delete = forms.BooleanField(
required=False,
initial=False,
label=_('Confirm delete'),
help_text=_('Confirm item deletion')
)
class Meta:
fields = [
'confirm_delete'
]
class EditUserForm(HelperForm):
"""
Form for editing user information
"""
class Meta:
model = User
fields = ['first_name', 'last_name']
fields = [
'first_name',
'last_name',
]
class SetPasswordForm(HelperForm):
"""Form for setting user password."""
""" Form for setting user password
"""
enter_password = forms.CharField(max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Enter password'),
help_text=_('Enter new password'))
confirm_password = forms.CharField(max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Confirm password'),
help_text=_('Confirm new password'))
class Meta:
"""Metaclass options."""
model = User
fields = ['enter_password', 'confirm_password', 'old_password']
fields = [
'enter_password',
'confirm_password'
]
enter_password = forms.CharField(
max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Enter password'),
help_text=_('Enter new password'),
)
confirm_password = forms.CharField(
max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Confirm password'),
help_text=_('Confirm new password'),
)
class SettingCategorySelectForm(forms.ModelForm):
""" Form for setting category settings """
old_password = forms.CharField(
label=_('Old password'),
strip=False,
required=False,
widget=forms.PasswordInput(
attrs={'autocomplete': 'current-password', 'autofocus': True}
),
)
category = forms.ModelChoiceField(queryset=PartCategory.objects.all())
class Meta:
model = PartCategory
fields = [
'category'
]
def __init__(self, *args, **kwargs):
super(SettingCategorySelectForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
# Form rendering
self.helper.form_show_labels = False
self.helper.layout = Layout(
Div(
Div(Field('category'),
css_class='col-sm-6',
style='width: 70%;'),
Div(StrictButton(_('Select Category'), css_class='btn btn-primary', type='submit'),
css_class='col-sm-6',
style='width: 30%; padding-left: 0;'),
css_class='row',
),
)
# override allauth
class CustomLoginForm(LoginForm):
"""Custom login form to override default allauth behaviour."""
def login(self, request, redirect_url=None):
"""Perform login action.
First check that:
- A valid user has been supplied
"""
if not self.user:
# No user supplied - redirect to the login page
return HttpResponseRedirect(reverse('account_login'))
# Now perform default login action
return super().login(request, redirect_url)
class CustomSignupForm(SignupForm):
"""Override to use dynamic settings."""
"""
Override to use dynamic settings
"""
def __init__(self, *args, **kwargs):
"""Check settings to influence which fields are needed."""
kwargs['email_required'] = InvenTreeSetting.get_setting('LOGIN_MAIL_REQUIRED')
super().__init__(*args, **kwargs)
# check for two mail fields
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
self.fields['email2'] = forms.EmailField(
label=_('Email (again)'),
self.fields["email2"] = forms.EmailField(
label=_("Email (again)"),
widget=forms.TextInput(
attrs={
'type': 'email',
'placeholder': _('Email address confirmation'),
"type": "email",
"placeholder": _("Email address confirmation"),
}
),
)
# check for two password fields
if not InvenTreeSetting.get_setting('LOGIN_SIGNUP_PWD_TWICE'):
self.fields.pop('password2')
self.fields.pop("password2")
# reorder fields
set_form_field_order(
self, ['username', 'email', 'email2', 'password1', 'password2']
)
set_form_field_order(self, ["username", "email", "email2", "password1", "password2", ])
def clean(self):
"""Make sure the supplied emails match if enabled in settings."""
cleaned_data = super().clean()
# check for two mail fields
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_MAIL_TWICE'):
email = cleaned_data.get('email')
email2 = cleaned_data.get('email2')
email = cleaned_data.get("email")
email2 = cleaned_data.get("email2")
if (email and email2) and email != email2:
self.add_error('email2', _('You must type the same email each time.'))
self.add_error("email2", _("You must type the same email each time."))
return cleaned_data
def registration_enabled():
"""Determine whether user registration is enabled."""
if (
InvenTreeSetting.get_setting('LOGIN_ENABLE_REG')
or InvenTree.sso.registration_enabled()
):
if settings.EMAIL_HOST:
return True
else:
logger.error(
'Registration cannot be enabled, because EMAIL_HOST is not configured.'
)
return False
class RegistratonMixin:
"""Mixin to check if registration should be enabled."""
"""
Mixin to check if registration should be enabled
"""
def is_open_for_signup(self, request, *args, **kwargs):
"""Check if signup is enabled in settings.
Configure the class variable `REGISTRATION_SETTING` to set which setting should be used, default: `LOGIN_ENABLE_REG`.
"""
if registration_enabled():
if settings.EMAIL_HOST and InvenTreeSetting.get_setting('LOGIN_ENABLE_REG', True):
return super().is_open_for_signup(request, *args, **kwargs)
return False
def clean_email(self, email):
"""Check if the mail is valid to the pattern in LOGIN_SIGNUP_MAIL_RESTRICTION (if enabled in settings)."""
mail_restriction = InvenTreeSetting.get_setting(
'LOGIN_SIGNUP_MAIL_RESTRICTION', None
)
if not mail_restriction:
return super().clean_email(email)
split_email = email.split('@')
if len(split_email) != 2:
logger.error('The user %s has an invalid email address', email)
raise forms.ValidationError(
_('The provided primary email address is not valid.')
)
mailoptions = mail_restriction.split(',')
for option in mailoptions:
if not option.startswith('@'):
log_error('LOGIN_SIGNUP_MAIL_RESTRICTION is not configured correctly')
raise forms.ValidationError(
_('The provided primary email address is not valid.')
)
else:
if split_email[1] == option[1:]:
return super().clean_email(email)
logger.info('The provided email domain for %s is not approved', email)
raise forms.ValidationError(_('The provided email domain is not approved.'))
def save_user(self, request, user, form, commit=True):
"""Check if a default group is set in settings."""
# Create the user
user = super().save_user(request, user, form)
# Check if a default group is set in settings
start_group = InvenTreeSetting.get_setting('SIGNUP_GROUP')
if start_group:
try:
group = Group.objects.get(id=start_group)
user.groups.add(group)
except Group.DoesNotExist:
logger.exception(
'The setting `SIGNUP_GROUP` contains an non existent group',
start_group,
)
logger.error('The setting `SIGNUP_GROUP` contains an non existant group', start_group)
user.save()
return user
class CustomUrlMixin:
"""Mixin to set urls."""
def get_email_confirmation_url(self, request, emailconfirmation):
"""Custom email confirmation (activation) url."""
url = reverse('account_confirm_email', args=[emailconfirmation.key])
return InvenTree.helpers_model.construct_absolute_url(url)
class CustomAccountAdapter(
CustomUrlMixin, RegistratonMixin, OTPAdapter, DefaultAccountAdapter
):
"""Override of adapter to use dynamic settings."""
class CustomAccountAdapter(RegistratonMixin, OTPAdapter, DefaultAccountAdapter):
"""
Override of adapter to use dynamic settings
"""
def send_mail(self, template_prefix, email, context):
"""Only send mail if backend configured."""
"""only send mail if backend configured"""
if settings.EMAIL_HOST:
try:
result = super().send_mail(template_prefix, email, context)
except Exception:
# An exception occurred while attempting to send email
# Log it (for admin users) and return silently
log_error('account email')
result = False
return result
return super().send_mail(template_prefix, email, context)
return False
def get_email_confirmation_url(self, request, emailconfirmation):
"""Construct the email confirmation url."""
from InvenTree.helpers_model import construct_absolute_url
url = super().get_email_confirmation_url(request, emailconfirmation)
url = construct_absolute_url(url)
return url
class CustomSocialAccountAdapter(
CustomUrlMixin, RegistratonMixin, DefaultSocialAccountAdapter
):
"""Override of adapter to use dynamic settings."""
class CustomSocialAccountAdapter(RegistratonMixin, DefaultSocialAccountAdapter):
"""
Override of adapter to use dynamic settings
"""
def is_auto_signup_allowed(self, request, sociallogin):
"""Check if auto signup is enabled in settings."""
if InvenTreeSetting.get_setting('LOGIN_SIGNUP_SSO_AUTO', True):
return super().is_auto_signup_allowed(request, sociallogin)
return False
@@ -343,7 +309,6 @@ class CustomSocialAccountAdapter(
return user_has_valid_totp_device(user)
def login(self, request, user):
"""Ensure user is send to 2FA before login if enabled."""
# Require two-factor authentication if it has been configured.
if self.has_2fa_enabled(user):
# Cast to string for the case when this is not a JSON serializable
@@ -355,41 +320,42 @@ class CustomSocialAccountAdapter(
if request.GET:
redirect_url += '?' + urlencode(request.GET)
raise ImmediateHttpResponse(response=HttpResponseRedirect(redirect_url))
raise ImmediateHttpResponse(
response=HttpResponseRedirect(redirect_url)
)
# Otherwise defer to the original allauth adapter.
return super().login(request, user)
def authentication_error(
self, request, provider_id, error=None, exception=None, extra_context=None
):
"""Callback method for authentication errors."""
if not error:
error = request.GET.get('error', None)
if not exception:
exception = request.GET.get('error_description', None)
# Temporary fix for django-allauth-2fa # TODO remove
# See https://github.com/inventree/InvenTree/security/advisories/GHSA-8j76-mm54-52xq
path = request.path or 'sso'
class CustomTOTPDeviceRemoveForm(TOTPDeviceRemoveForm):
"""Custom Form to ensure a token is provided before removing MFA"""
# User must input a valid token so 2FA can be removed
token = forms.CharField(
label=_('Token'),
)
# Log the error to the database
log_error(path, error_name=error, error_data=exception)
logger.error("SSO error for provider '%s' - check admin error log", provider_id)
def __init__(self, user, **kwargs):
"""Add token field."""
super().__init__(user, **kwargs)
self.fields['token'].widget.attrs.update(
{
'autofocus': 'autofocus',
'autocomplete': 'off',
}
)
def clean_token(self):
"""Ensure at least one valid token is provided."""
# Ensure that the user has provided a valid token
token = self.cleaned_data.get('token')
# override dj-rest-auth
class CustomRegisterSerializer(RegisterSerializer):
"""Override of serializer to use dynamic settings."""
# Verify that the user has provided a valid token
for device in self.user.totpdevice_set.filter(confirmed=True):
if device.verify_token(token):
return token
email = serializers.EmailField()
def __init__(self, instance=None, data=..., **kwargs):
"""Check settings to influence which fields are needed."""
kwargs['email_required'] = InvenTreeSetting.get_setting('LOGIN_MAIL_REQUIRED')
super().__init__(instance, data, **kwargs)
def save(self, request):
"""Override to check if registration is open."""
if registration_enabled():
return super().save(request)
raise forms.ValidationError(_('Registration is disabled.'))
raise forms.ValidationError(_("The entered token is not valid"))

File diff suppressed because it is too large Load Diff

View File

@@ -1,106 +0,0 @@
"""Provides helper mixins that are used throughout the InvenTree project."""
import inspect
from pathlib import Path
from django.conf import settings
from plugin import registry as plg_registry
class ClassValidationMixin:
"""Mixin to validate class attributes and overrides.
Class attributes:
required_attributes: List of class attributes that need to be defined
required_overrides: List of functions that need override, a nested list mean either one of them needs an override
Example:
```py
class Parent(ClassValidationMixin):
NAME: str
def test(self):
pass
required_attributes = ["NAME"]
required_overrides = [test]
class MyClass(Parent):
pass
myClass = MyClass()
myClass.validate() # raises NotImplementedError
```
"""
required_attributes = []
required_overrides = []
@classmethod
def validate(cls):
"""Validate the class against the required attributes/overrides."""
def attribute_missing(key):
"""Check if attribute is missing."""
return not hasattr(cls, key) or getattr(cls, key) == ''
def override_missing(base_implementation):
"""Check if override is missing."""
if isinstance(base_implementation, list):
return all(override_missing(x) for x in base_implementation)
return base_implementation == getattr(
cls, base_implementation.__name__, None
)
missing_attributes = list(filter(attribute_missing, cls.required_attributes))
missing_overrides = list(filter(override_missing, cls.required_overrides))
errors = []
if len(missing_attributes) > 0:
errors.append(
f"did not provide the following attributes: {', '.join(missing_attributes)}"
)
if len(missing_overrides) > 0:
missing_overrides_list = []
for base_implementation in missing_overrides:
if isinstance(base_implementation, list):
missing_overrides_list.append(
'one of '
+ ' or '.join(attr.__name__ for attr in base_implementation)
)
else:
missing_overrides_list.append(base_implementation.__name__)
errors.append(
f"did not override the required attributes: {', '.join(missing_overrides_list)}"
)
if len(errors) > 0:
raise NotImplementedError(f"'{cls}' " + ' and '.join(errors))
class ClassProviderMixin:
"""Mixin to get metadata about a class itself, e.g. the plugin that provided that class."""
@classmethod
def get_provider_file(cls):
"""File that contains the Class definition."""
return inspect.getfile(cls)
@classmethod
def get_provider_plugin(cls):
"""Plugin that contains the Class definition, otherwise None."""
for plg in plg_registry.plugins.values():
if plg.package_path == cls.__module__:
return plg
@classmethod
def get_is_builtin(cls):
"""Is this Class build in the Inventree source code?"""
try:
Path(cls.get_provider_file()).relative_to(settings.BASE_DIR)
return True
except ValueError:
# Path(...).relative_to throws an ValueError if its not relative to the InvenTree source base dir
return False

View File

@@ -1,364 +0,0 @@
"""Provides helper functions used throughout the InvenTree project that access the database."""
import io
import logging
from decimal import Decimal
from urllib.parse import urljoin
from django.conf import settings
from django.core.validators import URLValidator
from django.db.utils import OperationalError, ProgrammingError
from django.utils.translation import gettext_lazy as _
import requests
from djmoney.contrib.exchange.models import convert_money
from djmoney.money import Money
from PIL import Image
import common.models
import InvenTree
import InvenTree.helpers_model
import InvenTree.version
from common.notifications import (
InvenTreeNotificationBodies,
NotificationBody,
trigger_notification,
)
from InvenTree.format import format_money
logger = logging.getLogger('inventree')
def getSetting(key, backup_value=None):
"""Shortcut for reading a setting value from the database."""
return common.models.InvenTreeSetting.get_setting(key, backup_value=backup_value)
def get_base_url(request=None):
"""Return the base URL for the InvenTree server.
The base URL is determined in the following order of decreasing priority:
1. If a request object is provided, use the request URL
2. Multi-site is enabled, and the current site has a valid URL
3. If settings.SITE_URL is set (e.g. in the Django settings), use that
4. If the InvenTree setting INVENTREE_BASE_URL is set, use that
"""
# Check if a request is provided
if request:
return request.build_absolute_uri('/')
# Check if multi-site is enabled
try:
from django.contrib.sites.models import Site
return Site.objects.get_current().domain
except (ImportError, RuntimeError):
pass
# Check if a global site URL is provided
if site_url := getattr(settings, 'SITE_URL', None):
return site_url
# Check if a global InvenTree setting is provided
try:
if site_url := common.models.InvenTreeSetting.get_setting(
'INVENTREE_BASE_URL', create=False, cache=False
):
return site_url
except (ProgrammingError, OperationalError):
pass
# No base URL available
return ''
def construct_absolute_url(*arg, base_url=None, request=None):
"""Construct (or attempt to construct) an absolute URL from a relative URL.
Args:
*arg: The relative URL to construct
base_url: The base URL to use for the construction (if not provided, will attempt to determine from settings)
request: The request object to use for the construction (optional)
"""
relative_url = '/'.join(arg)
if not base_url:
base_url = get_base_url(request=request)
return urljoin(base_url, relative_url)
def download_image_from_url(remote_url, timeout=2.5):
"""Download an image file from a remote URL.
This is a potentially dangerous operation, so we must perform some checks:
- The remote URL is available
- The Content-Length is provided, and is not too large
- The file is a valid image file
Arguments:
remote_url: The remote URL to retrieve image
max_size: Maximum allowed image size (default = 1MB)
timeout: Connection timeout in seconds (default = 5)
Returns:
An in-memory PIL image file, if the download was successful
Raises:
requests.exceptions.ConnectionError: Connection could not be established
requests.exceptions.Timeout: Connection timed out
requests.exceptions.HTTPError: Server responded with invalid response code
ValueError: Server responded with invalid 'Content-Length' value
TypeError: Response is not a valid image
"""
# Check that the provided URL at least looks valid
validator = URLValidator()
validator(remote_url)
# Calculate maximum allowable image size (in bytes)
max_size = (
int(
common.models.InvenTreeSetting.get_setting(
'INVENTREE_DOWNLOAD_IMAGE_MAX_SIZE'
)
)
* 1024
* 1024
)
# Add user specified user-agent to request (if specified)
user_agent = common.models.InvenTreeSetting.get_setting(
'INVENTREE_DOWNLOAD_FROM_URL_USER_AGENT'
)
if user_agent:
headers = {'User-Agent': user_agent}
else:
headers = None
try:
response = requests.get(
remote_url,
timeout=timeout,
allow_redirects=True,
stream=True,
headers=headers,
)
# Throw an error if anything goes wrong
response.raise_for_status()
except requests.exceptions.ConnectionError as exc:
raise Exception(_('Connection error') + f': {str(exc)}')
except requests.exceptions.Timeout as exc:
raise exc
except requests.exceptions.HTTPError:
raise requests.exceptions.HTTPError(
_('Server responded with invalid status code') + f': {response.status_code}'
)
except Exception as exc:
raise Exception(_('Exception occurred') + f': {str(exc)}')
if response.status_code != 200:
raise Exception(
_('Server responded with invalid status code') + f': {response.status_code}'
)
try:
content_length = int(response.headers.get('Content-Length', 0))
except ValueError:
raise ValueError(_('Server responded with invalid Content-Length value'))
if content_length > max_size:
raise ValueError(_('Image size is too large'))
# Download the file, ensuring we do not exceed the reported size
file = io.BytesIO()
dl_size = 0
chunk_size = 64 * 1024
for chunk in response.iter_content(chunk_size=chunk_size):
dl_size += len(chunk)
if dl_size > max_size:
raise ValueError(_('Image download exceeded maximum size'))
file.write(chunk)
if dl_size == 0:
raise ValueError(_('Remote server returned empty response'))
# Now, attempt to convert the downloaded data to a valid image file
# img.verify() will throw an exception if the image is not valid
try:
img = Image.open(file).convert()
img.verify()
except Exception:
raise TypeError(_('Supplied URL is not a valid image file'))
return img
def render_currency(
money,
decimal_places=None,
currency=None,
min_decimal_places=None,
max_decimal_places=None,
include_symbol=True,
):
"""Render a currency / Money object to a formatted string (e.g. for reports).
Arguments:
money: The Money instance to be rendered
decimal_places: The number of decimal places to render to. If unspecified, uses the PRICING_DECIMAL_PLACES setting.
currency: Optionally convert to the specified currency
min_decimal_places: The minimum number of decimal places to render to. If unspecified, uses the PRICING_DECIMAL_PLACES_MIN setting.
max_decimal_places: The maximum number of decimal places to render to. If unspecified, uses the PRICING_DECIMAL_PLACES setting.
include_symbol: If True, include the currency symbol in the output
"""
if money in [None, '']:
return '-'
if type(money) is not Money:
return '-'
if currency is not None:
# Attempt to convert to the provided currency
# If cannot be done, leave the original
try:
money = convert_money(money, currency)
except Exception:
pass
if decimal_places is None:
decimal_places = common.models.InvenTreeSetting.get_setting(
'PRICING_DECIMAL_PLACES', 6
)
if min_decimal_places is None:
min_decimal_places = common.models.InvenTreeSetting.get_setting(
'PRICING_DECIMAL_PLACES_MIN', 0
)
if max_decimal_places is None:
max_decimal_places = common.models.InvenTreeSetting.get_setting(
'PRICING_DECIMAL_PLACES', 6
)
value = Decimal(str(money.amount)).normalize()
value = str(value)
if '.' in value:
decimals = len(value.split('.')[-1])
decimals = max(decimals, min_decimal_places)
decimals = min(decimals, decimal_places)
decimal_places = decimals
else:
decimal_places = max(decimal_places, 2)
decimal_places = max(decimal_places, max_decimal_places)
return format_money(
money, decimal_places=decimal_places, include_symbol=include_symbol
)
def getModelsWithMixin(mixin_class) -> list:
"""Return a list of models that inherit from the given mixin class.
Args:
mixin_class: The mixin class to search for
Returns:
List of models that inherit from the given mixin class
"""
from django.contrib.contenttypes.models import ContentType
try:
db_models = [
x.model_class() for x in ContentType.objects.all() if x is not None
]
except (OperationalError, ProgrammingError):
# Database is likely not yet ready
db_models = []
return [x for x in db_models if x is not None and issubclass(x, mixin_class)]
def notify_responsible(
instance,
sender,
content: NotificationBody = InvenTreeNotificationBodies.NewOrder,
exclude=None,
):
"""Notify all responsible parties of a change in an instance.
Parses the supplied content with the provided instance and sender and sends a notification to all responsible users,
excluding the optional excluded list.
Args:
instance: The newly created instance
sender: Sender model reference
content (NotificationBody, optional): _description_. Defaults to InvenTreeNotificationBodies.NewOrder.
exclude (User, optional): User instance that should be excluded. Defaults to None.
"""
import InvenTree.ready
if InvenTree.ready.isImportingData() or InvenTree.ready.isRunningMigrations():
return
notify_users(
[instance.responsible], instance, sender, content=content, exclude=exclude
)
def notify_users(
users,
instance,
sender,
content: NotificationBody = InvenTreeNotificationBodies.NewOrder,
exclude=None,
):
"""Notify all passed users or groups.
Parses the supplied content with the provided instance and sender and sends a notification to all users,
excluding the optional excluded list.
Args:
users: List of users or groups to notify
instance: The newly created instance
sender: Sender model reference
content (NotificationBody, optional): _description_. Defaults to InvenTreeNotificationBodies.NewOrder.
exclude (User, optional): User instance that should be excluded. Defaults to None.
"""
# Setup context for notification parsing
content_context = {
'instance': str(instance),
'verbose_name': sender._meta.verbose_name,
'app_label': sender._meta.app_label,
'model_name': sender._meta.model_name,
}
# Setup notification context
context = {
'instance': instance,
'name': content.name.format(**content_context),
'message': content.message.format(**content_context),
'link': InvenTree.helpers_model.construct_absolute_url(
instance.get_absolute_url()
),
'template': {'subject': content.name.format(**content_context)},
}
if content.template:
context['template']['html'] = content.template.format(**content_context)
# Create notification
trigger_notification(
instance,
content.slug.format(**content_context),
targets=users,
target_exclude=[exclude],
context=context,
)

View File

@@ -1,48 +0,0 @@
"""Support translation locales for InvenTree.
If a new language translation is supported, it must be added here
After adding a new language, run the following command:
python manage.py makemessages -l <language_code> -e html,js,py --no-wrap
where <language_code> is the code for the new language
Additionally, update the following files with the new locale code:
- /src/frontend/.linguirc file
- /src/frontend/src/context/LanguageContext.tsx
"""
from django.utils.translation import gettext_lazy as _
LOCALES = [
('bg', _('Bulgarian')),
('cs', _('Czech')),
('da', _('Danish')),
('de', _('German')),
('el', _('Greek')),
('en', _('English')),
('es', _('Spanish')),
('es-mx', _('Spanish (Mexican)')),
('fa', _('Farsi / Persian')),
('fi', _('Finnish')),
('fr', _('French')),
('he', _('Hebrew')),
('hi', _('Hindi')),
('hu', _('Hungarian')),
('it', _('Italian')),
('ja', _('Japanese')),
('ko', _('Korean')),
('nl', _('Dutch')),
('no', _('Norwegian')),
('pl', _('Polish')),
('pt', _('Portuguese')),
('pt-br', _('Portuguese (Brazilian)')),
('ru', _('Russian')),
('sk', _('Slovak')),
('sl', _('Slovenian')),
('sr', _('Serbian')),
('sv', _('Swedish')),
('th', _('Thai')),
('tr', _('Turkish')),
('vi', _('Vietnamese')),
('zh-hans', _('Chinese (Simplified)')),
('zh-hant', _('Chinese (Traditional)')),
]

View File

@@ -1,75 +0,0 @@
"""Functions for magic login."""
from django.conf import settings
from django.contrib.auth.models import User
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
import sesame.utils
from rest_framework import serializers
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
import InvenTree.version
def send_simple_login_email(user, link):
"""Send an email with the login link to this user."""
site_name = InvenTree.version.inventreeInstanceName()
context = {'username': user.username, 'site_name': site_name, 'link': link}
email_plaintext_message = render_to_string(
'InvenTree/user_simple_login.txt', context
)
send_mail(
_(f'[{site_name}] Log in to the app'),
email_plaintext_message,
settings.DEFAULT_FROM_EMAIL,
[user.email],
)
class GetSimpleLoginSerializer(serializers.Serializer):
"""Serializer for the simple login view."""
email = serializers.CharField(label=_('Email'))
class GetSimpleLoginView(GenericAPIView):
"""View to send a simple login link."""
permission_classes = ()
serializer_class = GetSimpleLoginSerializer
def post(self, request, *args, **kwargs):
"""Get the token for the current user or fail."""
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
self.email_submitted(email=serializer.data['email'])
return Response({'status': 'ok'})
def email_submitted(self, email):
"""Notify user about link."""
user = self.get_user(email)
if user is None:
print('user not found:', email)
return
link = self.create_link(user)
send_simple_login_email(user, link)
def get_user(self, email):
"""Find the user with this email address."""
try:
return User.objects.get(email=email)
except User.DoesNotExist:
return None
def create_link(self, user):
"""Create a login link for this user."""
link = reverse('sesame-login')
link = self.request.build_absolute_uri(link)
link += sesame.utils.get_query_string(user)
return link

View File

@@ -1,19 +0,0 @@
"""Check if there are any pending database migrations, and run them."""
import logging
from django.core.management.base import BaseCommand
from InvenTree.tasks import check_for_migrations
logger = logging.getLogger('inventree')
class Command(BaseCommand):
"""Check if there are any pending database migrations, and run them."""
def handle(self, *args, **kwargs):
"""Check for any pending database migrations."""
logger.info('Checking for pending database migrations')
check_for_migrations(force=True, reload_registry=False)
logger.info('Database migrations complete')

View File

@@ -1,4 +1,6 @@
"""Custom management command to cleanup old settings that are not defined anymore."""
"""
Custom management command to cleanup old settings that are not defined anymore
"""
import logging
@@ -8,11 +10,13 @@ logger = logging.getLogger('inventree')
class Command(BaseCommand):
"""Cleanup old (undefined) settings in the database."""
"""
Cleanup old (undefined) settings in the database
"""
def handle(self, *args, **kwargs):
"""Cleanup old (undefined) settings in the database."""
logger.info('Collecting settings')
logger.info("Collecting settings")
from common.models import InvenTreeSetting, InvenTreeUserSetting
# general settings
@@ -23,7 +27,7 @@ class Command(BaseCommand):
for setting in db_settings:
if setting.key not in model_settings:
setting.delete()
logger.info("deleted setting '%s'", setting.key)
logger.info(f"deleted setting '{setting.key}'")
# user settings
db_settings = InvenTreeUserSetting.objects.all()
@@ -33,6 +37,6 @@ class Command(BaseCommand):
for setting in db_settings:
if setting.key not in model_settings:
setting.delete()
logger.info("deleted user setting '%s'", setting.key)
logger.info(f"deleted user setting '{setting.key}'")
logger.info('checked all settings')
logger.info("checked all settings")

View File

@@ -1,4 +1,6 @@
"""Custom management command to prerender files."""
"""
Custom management command to prerender files
"""
import os
@@ -11,25 +13,21 @@ from django.utils.translation import override as lang_over
def render_file(file_name, source, target, locales, ctx):
"""Renders a file into all provided locales."""
""" renders a file into all provided locales """
for locale in locales:
# Enforce lower-case for locale names
locale = locale.lower()
locale = locale.replace('_', '-')
target_file = os.path.join(target, locale + '.' + file_name)
with open(target_file, 'w') as localised_file:
with lang_over(locale):
rendered = render_to_string(os.path.join(source, file_name), ctx)
localised_file.write(rendered)
renderd = render_to_string(os.path.join(source, file_name), ctx)
localised_file.write(renderd)
class Command(BaseCommand):
"""Django command to prerender files."""
"""
django command to prerender files
"""
def handle(self, *args, **kwargs):
"""Django command to prerender files."""
# static directories
LC_DIR = settings.LOCALE_PATHS[0]
SOURCE_DIR = settings.STATICFILES_I18_SRC
@@ -49,18 +47,15 @@ class Command(BaseCommand):
# render!
request = HttpRequest()
ctx = {}
processors = tuple(
import_string(path) for path in settings.STATFILES_I18_PROCESSORS
)
processors = tuple(import_string(path) for path in settings.STATFILES_I18_PROCESSORS)
for processor in processors:
ctx.update(processor(request))
for file in os.listdir(SOURCE_DIR):
for file in os.listdir(SOURCE_DIR, ):
path = os.path.join(SOURCE_DIR, file)
if os.path.exists(path) and os.path.isfile(path):
print(f"render {file}")
render_file(file, SOURCE_DIR, TARGET_DIR, locales, ctx)
else:
raise NotImplementedError(
'Using multi-level directories is not implemented at this point'
) # TODO multilevel dir if needed
print(f'Rendered all files in {SOURCE_DIR}')
raise NotImplementedError('Using multi-level directories is not implemented at this point') # TODO multilevel dir if needed
print(f"rendered all files in {SOURCE_DIR}")

View File

@@ -1,75 +1,60 @@
"""Custom management command to rebuild all MPTT models.
"""
Custom management command to rebuild all MPTT models
- This is crucial after importing any fixtures, etc
"""
import logging
from django.core.management.base import BaseCommand
from maintenance_mode.core import maintenance_mode_on, set_maintenance_mode
logger = logging.getLogger('inventree')
class Command(BaseCommand):
"""Rebuild all database models which leverage the MPTT structure."""
"""
Rebuild all database models which leverage the MPTT structure.
"""
def handle(self, *args, **kwargs):
"""Rebuild all database models which leverage the MPTT structure."""
with maintenance_mode_on():
self.rebuild_models()
set_maintenance_mode(False)
def rebuild_models(self):
"""Rebuild all MPTT models in the database."""
# Part model
try:
logger.info('Rebuilding Part objects')
print("Rebuilding Part objects")
from part.models import Part
Part.objects.rebuild()
except Exception:
logger.info('Error rebuilding Part objects')
except:
print("Error rebuilding Part objects")
# Part category
try:
logger.info('Rebuilding PartCategory objects')
print("Rebuilding PartCategory objects")
from part.models import PartCategory
PartCategory.objects.rebuild()
except Exception:
logger.info('Error rebuilding PartCategory objects')
except:
print("Error rebuilding PartCategory objects")
# StockItem model
try:
logger.info('Rebuilding StockItem objects')
print("Rebuilding StockItem objects")
from stock.models import StockItem
StockItem.objects.rebuild()
except Exception:
logger.info('Error rebuilding StockItem objects')
except:
print("Error rebuilding StockItem objects")
# StockLocation model
try:
logger.info('Rebuilding StockLocation objects')
print("Rebuilding StockLocation objects")
from stock.models import StockLocation
StockLocation.objects.rebuild()
except Exception:
logger.info('Error rebuilding StockLocation objects')
except:
print("Error rebuilding StockLocation objects")
# Build model
try:
logger.info('Rebuilding Build objects')
print("Rebuilding Build objects")
from build.models import Build
Build.objects.rebuild()
except Exception:
logger.info('Error rebuilding Build objects')
except:
print("Error rebuilding Build objects")

View File

@@ -1,4 +1,5 @@
"""Custom management command to rebuild thumbnail images.
"""
Custom management command to rebuild thumbnail images
- May be required after importing a new dataset, for example
"""
@@ -6,6 +7,7 @@
import logging
import os
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db.utils import OperationalError, ProgrammingError
@@ -18,52 +20,48 @@ logger = logging.getLogger('inventree')
class Command(BaseCommand):
"""Rebuild all thumbnail images."""
"""
Rebuild all thumbnail images
"""
def rebuild_thumbnail(self, model):
"""Rebuild the thumbnail specified by the "image" field of the provided model."""
"""
Rebuild the thumbnail specified by the "image" field of the provided model
"""
if not model.image:
return
img = model.image
url = img.thumbnail.name
loc = os.path.join(settings.MEDIA_ROOT, url)
# Check for image paths
img_paths = []
if not os.path.exists(loc):
logger.info(f"Generating thumbnail image for '{img}'")
for x in [model.image, model.image.thumbnail, model.image.preview]:
if x and x.path:
img_paths.append(x.path)
if len(img_paths) > 0:
if all((os.path.exists(path) for path in img_paths)):
# All images exist - skip further work
return
logger.info("Generating thumbnail image for '%s'", img)
try:
model.image.render_variations(replace=False)
except FileNotFoundError:
logger.warning("Warning: Image file '%s' is missing", img)
except UnidentifiedImageError:
logger.warning("Warning: Image file '%s' is not a valid image", img)
try:
model.image.render_variations(replace=False)
except FileNotFoundError:
logger.warning(f"Warning: Image file '{img}' is missing")
except UnidentifiedImageError:
logger.warning(f"Warning: Image file '{img}' is not a valid image")
def handle(self, *args, **kwargs):
"""Rebuild all thumbnail images."""
logger.info('Rebuilding Part thumbnails')
logger.info("Rebuilding Part thumbnails")
for part in Part.objects.exclude(image=None):
try:
self.rebuild_thumbnail(part)
except (OperationalError, ProgrammingError):
logger.exception('ERROR: Database read error.')
logger.error("ERROR: Database read error.")
break
logger.info('Rebuilding Company thumbnails')
logger.info("Rebuilding Company thumbnails")
for company in Company.objects.exclude(image=None):
try:
self.rebuild_thumbnail(company)
except (OperationalError, ProgrammingError):
logger.exception('ERROR: abase read error.')
logger.error("ERROR: abase read error.")
break

View File

@@ -1,29 +1,27 @@
"""Custom management command to remove MFA for a user."""
"""
Custom management command to remove MFA for a user
"""
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""Remove MFA for a user."""
"""
Remove MFA for a user
"""
def add_arguments(self, parser):
"""Add the arguments."""
parser.add_argument('mail', type=str)
def handle(self, *args, **kwargs):
"""Remove MFA for the supplied user (by mail)."""
# general settings
mail = kwargs.get('mail')
if not mail:
raise KeyError('A mail is required')
user = get_user_model()
mfa_user = [
*set(
user.objects.filter(email=mail)
| user.objects.filter(emailaddress__email=mail)
)
]
mfa_user = [*set(user.objects.filter(email=mail) | user.objects.filter(emailaddress__email=mail))]
if len(mfa_user) == 0:
print('No user with this mail associated')

View File

@@ -1,19 +0,0 @@
"""Check if there are any pending database migrations, and run them."""
import logging
from django.core.management.base import BaseCommand
from InvenTree.tasks import check_for_migrations
logger = logging.getLogger('inventree')
class Command(BaseCommand):
"""Check if there are any pending database migrations, and run them."""
def handle(self, *args, **kwargs):
"""Check for any pending database migrations."""
logger.info('Checking for pending database migrations')
check_for_migrations(force=True, reload_registry=False)
logger.info('Database migrations complete')

View File

@@ -1,4 +1,6 @@
"""Custom management command, wait for the database to be ready!"""
"""
Custom management command, wait for the database to be ready!
"""
import time
@@ -8,16 +10,19 @@ from django.db.utils import ImproperlyConfigured, OperationalError
class Command(BaseCommand):
"""Django command to pause execution until the database is ready."""
"""
django command to pause execution until the database is ready
"""
def handle(self, *args, **kwargs):
"""Wait till the database is ready."""
self.stdout.write('Waiting for database...')
self.stdout.write("Waiting for database...")
connected = False
while not connected:
time.sleep(2)
time.sleep(5)
try:
connection.ensure_connection()
@@ -25,12 +30,12 @@ class Command(BaseCommand):
connected = True
except OperationalError as e:
self.stdout.write(f'Could not connect to database: {e}')
self.stdout.write(f"Could not connect to database: {e}")
except ImproperlyConfigured as e:
self.stdout.write(f'Improperly configured: {e}')
self.stdout.write(f"Improperly configured: {e}")
else:
if not connection.is_usable():
self.stdout.write('Database configuration is not usable')
self.stdout.write("Database configuration is not usable")
if connected:
self.stdout.write('Database connection successful!')
self.stdout.write("Database connection sucessful!")

View File

@@ -1,5 +1,3 @@
"""Custom metadata for DRF."""
import logging
from rest_framework import serializers
@@ -7,17 +5,15 @@ from rest_framework.fields import empty
from rest_framework.metadata import SimpleMetadata
from rest_framework.utils import model_meta
import common.models
import InvenTree.permissions
import users.models
from InvenTree.helpers import str2bool
from InvenTree.serializers import DependentField
logger = logging.getLogger('inventree')
class InvenTreeMetadata(SimpleMetadata):
"""Custom metadata class for the DRF API.
"""
Custom metadata class for the DRF API.
This custom metadata class imits the available "actions",
based on the user's role permissions.
@@ -27,10 +23,11 @@ class InvenTreeMetadata(SimpleMetadata):
Additionally, we include some extra information about database models,
so we can perform lookup for ForeignKey related fields.
"""
def determine_metadata(self, request, view):
"""Overwrite the metadata to adapt to the request user."""
self.request = request
self.view = view
@@ -38,7 +35,7 @@ class InvenTreeMetadata(SimpleMetadata):
"""
Custom context information to pass through to the OPTIONS endpoint,
if the "context=True" is supplied to the OPTIONS request
if the "context=True" is supplied to the OPTIONS requst
Serializer class can supply context data by defining a get_context_data() method (no arguments)
"""
@@ -46,9 +43,8 @@ class InvenTreeMetadata(SimpleMetadata):
context = {}
if str2bool(request.query_params.get('context', False)):
if hasattr(self, 'serializer') and hasattr(
self.serializer, 'get_context_data'
):
if hasattr(self.serializer, 'get_context_data'):
context = self.serializer.get_context_data()
metadata['context'] = context
@@ -62,7 +58,7 @@ class InvenTreeMetadata(SimpleMetadata):
try:
# Extract the model name associated with the view
self.model = InvenTree.permissions.get_model_for_view(view)
self.model = view.serializer_class.Meta.model
# Construct the 'table name' from the model
app_label = self.model._meta.app_label
@@ -70,43 +66,37 @@ class InvenTreeMetadata(SimpleMetadata):
metadata['model'] = tbl_label
table = f'{app_label}_{tbl_label}'
table = f"{app_label}_{tbl_label}"
actions = metadata.get('actions', None)
if actions is None:
actions = {}
if actions is not None:
check = users.models.RuleSet.check_table_permission
check = users.models.RuleSet.check_table_permission
# Map the request method to a permission type
rolemap = {
'POST': 'add',
'PUT': 'change',
'PATCH': 'change',
'DELETE': 'delete',
}
# Map the request method to a permission type
rolemap = {
'POST': 'add',
'PUT': 'change',
'PATCH': 'change',
'DELETE': 'delete',
}
# let the view define a custom rolemap
if hasattr(view, 'rolemap'):
rolemap.update(view.rolemap)
# Remove any HTTP methods that the user does not have permission for
for method, permission in rolemap.items():
# Remove any HTTP methods that the user does not have permission for
for method, permission in rolemap.items():
result = check(user, table, permission)
result = check(user, table, permission)
if method in actions and not result:
del actions[method]
if method in actions and not result:
del actions[method]
# Add a 'DELETE' action if we are allowed to delete
if 'DELETE' in view.allowed_methods and check(user, table, 'delete'):
actions['DELETE'] = {}
# Add a 'DELETE' action if we are allowed to delete
if 'DELETE' in view.allowed_methods and check(user, table, 'delete'):
actions['DELETE'] = True
# Add a 'VIEW' action if we are allowed to view
if 'GET' in view.allowed_methods and check(user, table, 'view'):
actions['GET'] = {}
metadata['actions'] = actions
# Add a 'VIEW' action if we are allowed to view
if 'GET' in view.allowed_methods and check(user, table, 'view'):
actions['GET'] = True
except AttributeError:
# We will assume that if the serializer class does *not* have a Meta
@@ -116,16 +106,17 @@ class InvenTreeMetadata(SimpleMetadata):
return metadata
def get_serializer_info(self, serializer):
"""Override get_serializer_info so that we can add 'default' values to any fields whose Meta.model specifies a default value."""
"""
Override get_serializer_info so that we can add 'default' values
to any fields whose Meta.model specifies a default value
"""
self.serializer = serializer
serializer_info = super().get_serializer_info(serializer)
model_class = None
# Attributes to copy extra attributes from the model to the field (if they don't exist)
extra_attributes = ['help_text', 'max_length']
try:
model_class = serializer.Meta.model
@@ -140,14 +131,17 @@ class InvenTreeMetadata(SimpleMetadata):
# Iterate through simple fields
for name, field in model_fields.fields.items():
if name in serializer_info.keys():
if field.has_default():
default = field.default
if callable(default):
try:
default = default()
except Exception:
except:
continue
serializer_info[name]['default'] = default
@@ -155,13 +149,18 @@ class InvenTreeMetadata(SimpleMetadata):
elif name in model_default_values:
serializer_info[name]['default'] = model_default_values[name]
for attr in extra_attributes:
# Attributes to copy from the model to the field (if they don't exist)
attributes = ['help_text']
for attr in attributes:
if attr not in serializer_info[name]:
if hasattr(field, attr):
serializer_info[name][attr] = getattr(field, attr)
# Iterate through relations
for name, relation in model_fields.relations.items():
if name not in serializer_info.keys():
# Skip relation not defined in serializer
continue
@@ -172,17 +171,10 @@ class InvenTreeMetadata(SimpleMetadata):
# Extract and provide the "limit_choices_to" filters
# This is used to automatically filter AJAX requests
serializer_info[name]['filters'] = (
relation.model_field.get_limit_choices_to()
)
serializer_info[name]['filters'] = relation.model_field.get_limit_choices_to()
for attr in extra_attributes:
if attr not in serializer_info[name] and hasattr(
relation.model_field, attr
):
serializer_info[name][attr] = getattr(
relation.model_field, attr
)
if 'help_text' not in serializer_info[name] and hasattr(relation.model_field, 'help_text'):
serializer_info[name]['help_text'] = relation.model_field.help_text
if name in model_default_values:
serializer_info[name]['default'] = model_default_values[name]
@@ -209,22 +201,24 @@ class InvenTreeMetadata(SimpleMetadata):
pk = kwargs[field]
break
if issubclass(model_class, common.models.BaseInvenTreeSetting):
instance = model_class.get_setting_object(**kwargs, create=False)
elif pk is not None:
if pk is not None:
try:
instance = model_class.objects.get(pk=pk)
except (ValueError, model_class.DoesNotExist):
pass
if instance is not None:
"""If there is an instance associated with this API View, introspect that instance to find any specific API info."""
"""
If there is an instance associated with this API View,
introspect that instance to find any specific API info.
"""
if hasattr(instance, 'api_instance_filters'):
instance_filters = instance.api_instance_filters()
for field_name, field_filters in instance_filters.items():
if field_name not in serializer_info.keys():
# The field might be missing, but is added later on
# This function seems to get called multiple times?
@@ -239,13 +233,12 @@ class InvenTreeMetadata(SimpleMetadata):
return serializer_info
def get_field_info(self, field):
"""Given an instance of a serializer field, return a dictionary of metadata about it.
"""
Given an instance of a serializer field, return a dictionary
of metadata about it.
We take the regular DRF metadata and add our own unique flavor
"""
# Try to add the child property to the dependent field to be used by the super call
if self.label_lookup[field] == 'dependent field':
field.get_child(raise_exception=True)
# Run super method first
field_info = super().get_field_info(field)
@@ -256,20 +249,17 @@ class InvenTreeMetadata(SimpleMetadata):
# Force non-nullable fields to read as "required"
# (even if there is a default value!)
if not field.allow_null and not (
hasattr(field, 'allow_blank') and field.allow_blank
):
if not field.allow_null and not (hasattr(field, 'allow_blank') and field.allow_blank):
field_info['required'] = True
# Introspect writable related fields
if field_info['type'] == 'field' and not field_info['read_only']:
# If the field is a PrimaryKeyRelatedField, we can extract the model from the queryset
if isinstance(field, serializers.PrimaryKeyRelatedField):
model = field.queryset.model
else:
logger.debug(
'Could not extract model for:', field_info.get('label'), '->', field
)
logger.debug("Could not extract model for:", field_info['label'], '->', field)
model = None
if model:
@@ -283,11 +273,4 @@ class InvenTreeMetadata(SimpleMetadata):
else:
field_info['api_url'] = model.get_api_url()
# Add more metadata about dependent fields
if field_info['type'] == 'dependent field':
field_info['depends_on'] = field.depends_on
return field_info
InvenTreeMetadata.label_lookup[DependentField] = 'dependent field'

View File

@@ -1,68 +1,28 @@
"""Middleware for InvenTree."""
# -*- coding: utf-8 -*-
import logging
import sys
from django.conf import settings
from django.contrib.auth.middleware import PersistentRemoteUserMiddleware
from django.http import HttpResponse
from django.shortcuts import redirect
from django.urls import Resolver404, include, path, resolve, reverse_lazy
from django.urls import Resolver404, include, re_path, reverse_lazy
from allauth_2fa.middleware import AllauthTwoFactorMiddleware, BaseRequire2FAMiddleware
from error_report.middleware import ExceptionProcessor
from allauth_2fa.middleware import (AllauthTwoFactorMiddleware,
BaseRequire2FAMiddleware)
from rest_framework.authtoken.models import Token
from common.models import InvenTreeSetting
from InvenTree.urls import frontendpatterns
from users.models import ApiToken
logger = logging.getLogger('inventree')
def get_token_from_request(request):
"""Extract token information from a request object."""
auth_keys = ['Authorization', 'authorization']
token_keys = ['token', 'bearer']
for k in auth_keys:
if auth_header := request.headers.get(k, None):
auth_header = auth_header.strip().lower().split()
if len(auth_header) > 1:
if auth_header[0].strip().lower().replace(':', '') in token_keys:
token = auth_header[1]
return token
return None
logger = logging.getLogger("inventree")
class AuthRequiredMiddleware(object):
"""Check for user to be authenticated."""
def __init__(self, get_response):
"""Save response object."""
self.get_response = get_response
def check_token(self, request) -> bool:
"""Check if the user is authenticated via token."""
if token := get_token_from_request(request):
# Does the provided token match a valid user?
try:
token = ApiToken.objects.get(key=token)
if token.active and token.user:
# Provide the user information to the request
request.user = token.user
return True
except ApiToken.DoesNotExist:
logger.warning('Access denied for unknown token %s', token)
return False
def __call__(self, request):
"""Check if user needs to be authenticated and is.
Redirects to login if not authenticated.
"""
# Code to be executed for each request before
# the view (and later middleware) are called.
@@ -72,15 +32,9 @@ class AuthRequiredMiddleware(object):
if request.path_info.startswith('/api/'):
return self.get_response(request)
# Is the function exempt from auth requirements?
path_func = resolve(request.path).func
if getattr(path_func, 'auth_exempt', False) is True:
return self.get_response(request)
if not request.user.is_authenticated:
"""
Normally, a web-based session would use csrftoken based authentication.
However when running an external application (e.g. the InvenTree app or Python library),
we must validate the user token manually.
"""
@@ -96,15 +50,22 @@ class AuthRequiredMiddleware(object):
elif request.path_info.startswith('/accounts/'):
authorized = True
elif (
request.path_info.startswith(f'/{settings.FRONTEND_URL_BASE}/')
or request.path_info.startswith('/assets/')
or request.path_info == f'/{settings.FRONTEND_URL_BASE}'
):
authorized = True
elif 'Authorization' in request.headers.keys() or 'authorization' in request.headers.keys():
auth = request.headers.get('Authorization', request.headers.get('authorization')).strip()
elif self.check_token(request):
authorized = True
if auth.lower().startswith('token') and len(auth.split()) == 2:
token_key = auth.split()[1]
# Does the provided token match a valid user?
try:
token = Token.objects.get(key=token_key)
# Provide the user information to the request
request.user = token.user
authorized = True
except Token.DoesNotExist:
logger.warning(f"Access denied for unknown token {token_key}")
# No authorization was found for the request
if not authorized:
@@ -119,34 +80,34 @@ class AuthRequiredMiddleware(object):
]
# Do not redirect requests to any of these paths
paths_ignore = ['/api/', '/js/', '/media/', '/static/']
paths_ignore = [
'/api/',
'/js/',
'/media/',
'/static/',
]
if path not in urls and not any(
path.startswith(p) for p in paths_ignore
):
if path not in urls and not any([path.startswith(p) for p in paths_ignore]):
# Save the 'next' parameter to pass through to the login view
return redirect(
f'{reverse_lazy("account_login")}?next={request.path}'
)
# Return a 401 (Unauthorized) response code for this request
return HttpResponse('Unauthorized', status=401)
return redirect('{}?next={}'.format(reverse_lazy('account_login'), request.path))
else:
# Return a 401 (Unauthorized) response code for this request
return HttpResponse('Unauthorized', status=401)
response = self.get_response(request)
return response
url_matcher = path('', include(frontendpatterns))
url_matcher = re_path('', include(frontendpatterns))
class Check2FAMiddleware(BaseRequire2FAMiddleware):
"""Check if user is required to have MFA enabled."""
"""check if user is required to have MFA enabled"""
def require_2fa(self, request):
"""Use setting to check if MFA should be enforced for frontend page."""
from common.models import InvenTreeSetting
# Superusers are require to have 2FA.
try:
if url_matcher.resolve(request.path[1:]):
return InvenTreeSetting.get_setting('LOGIN_ENFORCE_MFA')
@@ -156,10 +117,8 @@ class Check2FAMiddleware(BaseRequire2FAMiddleware):
class CustomAllauthTwoFactorMiddleware(AllauthTwoFactorMiddleware):
"""This function ensures only frontend code triggers the MFA auth cycle."""
"""This function ensures only frontend code triggers the MFA auth cycle"""
def process_request(self, request):
"""Check if requested url is forntend and enforce MFA check."""
try:
if not url_matcher.resolve(request.path[1:]):
super().process_request(request)
@@ -168,54 +127,13 @@ class CustomAllauthTwoFactorMiddleware(AllauthTwoFactorMiddleware):
class InvenTreeRemoteUserMiddleware(PersistentRemoteUserMiddleware):
"""Middleware to check if HTTP-header based auth is enabled and to set it up."""
"""
Middleware to check if HTTP-header based auth is enabled and to set it up
"""
header = settings.REMOTE_LOGIN_HEADER
def process_request(self, request):
"""Check if proxy login is enabled."""
if not settings.REMOTE_LOGIN:
return
return super().process_request(request)
class InvenTreeExceptionProcessor(ExceptionProcessor):
"""Custom exception processor that respects blocked errors."""
def process_exception(self, request, exception):
"""Check if kind is ignored before processing."""
kind, info, data = sys.exc_info()
# Check if the error is on the ignore list
if kind in settings.IGNORED_ERRORS:
return
import traceback
from django.views.debug import ExceptionReporter
from error_report.models import Error
from error_report.settings import ERROR_DETAIL_SETTINGS
# Error reporting is disabled
if not ERROR_DETAIL_SETTINGS.get('ERROR_DETAIL_ENABLE', True):
return
path = request.build_absolute_uri()
# Truncate the path to a reasonable length
# Otherwise we get a database error,
# because the path field is limited to 200 characters
if len(path) > 200:
path = path[:195] + '...'
error = Error.objects.create(
kind=kind.__name__,
html=ExceptionReporter(request, kind, info, data).get_traceback_html(),
path=path,
info=info,
data='\n'.join(traceback.format_exception(kind, info, data)),
)
error.save()

View File

@@ -1,12 +0,0 @@
# Generated by Django 3.2.15 on 2022-10-03 18:57
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
]

View File

@@ -1,188 +0,0 @@
"""Mixins for (API) views in the whole project."""
from django.core.exceptions import FieldDoesNotExist
from rest_framework import generics, mixins, status
from rest_framework.response import Response
from InvenTree.fields import InvenTreeNotesField
from InvenTree.helpers import remove_non_printable_characters, strip_html_tags
class CleanMixin:
"""Model mixin class which cleans inputs using the Mozilla bleach tools."""
# Define a list of field names which will *not* be cleaned
SAFE_FIELDS = []
def create(self, request, *args, **kwargs):
"""Override to clean data before processing it."""
serializer = self.get_serializer(data=self.clean_data(request.data))
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
return Response(
serializer.data, status=status.HTTP_201_CREATED, headers=headers
)
def update(self, request, *args, **kwargs):
"""Override to clean data before processing it."""
partial = kwargs.pop('partial', False)
instance = self.get_object()
serializer = self.get_serializer(
instance, data=self.clean_data(request.data), partial=partial
)
serializer.is_valid(raise_exception=True)
self.perform_update(serializer)
if getattr(instance, '_prefetched_objects_cache', None):
# If 'prefetch_related' has been applied to a queryset, we need to
# forcibly invalidate the prefetch cache on the instance.
instance._prefetched_objects_cache = {}
return Response(serializer.data)
def clean_string(self, field: str, data: str) -> str:
"""Clean / sanitize a single input string.
Note that this function will *allow* orphaned <>& characters,
which would normally be escaped by bleach.
Nominally, the only thing that will be "cleaned" will be HTML tags
Ref: https://github.com/mozilla/bleach/issues/192
"""
cleaned = strip_html_tags(data, field_name=field)
# By default, newline characters are removed
remove_newline = True
try:
if hasattr(self, 'serializer_class'):
model = self.serializer_class.Meta.model
field = model._meta.get_field(field)
# The following field types allow newline characters
allow_newline = [InvenTreeNotesField]
for field_type in allow_newline:
if issubclass(type(field), field_type):
remove_newline = False
break
except AttributeError:
pass
except FieldDoesNotExist:
pass
cleaned = remove_non_printable_characters(
cleaned, remove_newline=remove_newline
)
return cleaned
def clean_data(self, data: dict) -> dict:
"""Clean / sanitize data.
This uses mozillas bleach under the hood to disable certain html tags by
encoding them - this leads to script tags etc. to not work.
The results can be longer then the input; might make some character combinations
`ugly`. Prevents XSS on the server-level.
Args:
data (dict): Data that should be Sanitized.
Returns:
dict: Provided data Sanitized; still in the same order.
"""
clean_data = {}
for k, v in data.items():
if k in self.SAFE_FIELDS:
ret = v
elif isinstance(v, str):
ret = self.clean_string(k, v)
elif isinstance(v, dict):
ret = self.clean_data(v)
else:
ret = v
clean_data[k] = ret
return clean_data
class ListAPI(generics.ListAPIView):
"""View for list API."""
class ListCreateAPI(CleanMixin, generics.ListCreateAPIView):
"""View for list and create API."""
class CreateAPI(CleanMixin, generics.CreateAPIView):
"""View for create API."""
class RetrieveAPI(generics.RetrieveAPIView):
"""View for retrieve API."""
pass
class RetrieveUpdateAPI(CleanMixin, generics.RetrieveUpdateAPIView):
"""View for retrieve and update API."""
pass
class CustomDestroyModelMixin:
"""This mixin was created pass the kwargs from the API to the models."""
def destroy(self, request, *args, **kwargs):
"""Custom destroy method to pass kwargs."""
instance = self.get_object()
self.perform_destroy(instance, **kwargs)
return Response(status=status.HTTP_204_NO_CONTENT)
def perform_destroy(self, instance, **kwargs):
"""Custom destroy method to pass kwargs."""
instance.delete(**kwargs)
class CustomRetrieveUpdateDestroyAPIView(
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
CustomDestroyModelMixin,
generics.GenericAPIView,
):
"""This APIView was created pass the kwargs from the API to the models."""
def get(self, request, *args, **kwargs):
"""Custom get method to pass kwargs."""
return self.retrieve(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
"""Custom put method to pass kwargs."""
return self.update(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
"""Custom patch method to pass kwargs."""
return self.partial_update(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
"""Custom delete method to pass kwargs."""
return self.destroy(request, *args, **kwargs)
class CustomRetrieveUpdateDestroyAPI(CleanMixin, CustomRetrieveUpdateDestroyAPIView):
"""This APIView was created pass the kwargs from the API to the models."""
class RetrieveUpdateDestroyAPI(CleanMixin, generics.RetrieveUpdateDestroyAPIView):
"""View for retrieve, update and destroy API."""
class UpdateAPI(CleanMixin, generics.UpdateAPIView):
"""View for update API."""

File diff suppressed because it is too large Load Diff

View File

@@ -1,28 +1,12 @@
"""Permission set for InvenTree."""
from functools import wraps
from rest_framework import permissions
import users.models
def get_model_for_view(view, raise_error=True):
"""Attempt to introspect the 'model' type for an API view."""
if hasattr(view, 'get_permission_model'):
return view.get_permission_model()
if hasattr(view, 'serializer_class'):
return view.serializer_class.Meta.model
if hasattr(view, 'get_serializer_class'):
return view.get_serializr_class().Meta.model
raise AttributeError(f'Serializer class not specified for {view.__class__}')
class RolePermission(permissions.BasePermission):
"""Role mixin for API endpoints, allowing us to specify the user "role" which is required for certain operations.
"""
Role mixin for API endpoints, allowing us to specify the user "role"
which is required for certain operations.
Each endpoint can have one or more of the following actions:
- GET
@@ -41,10 +25,14 @@ class RolePermission(permissions.BasePermission):
to perform the specified action.
For example, a DELETE action will be rejected unless the user has the "part.remove" permission
"""
def has_permission(self, request, view):
"""Determine if the current user has the specified permissions."""
"""
Determine if the current user has the specified permissions
"""
user = request.user
# Superuser can do it all
@@ -61,61 +49,21 @@ class RolePermission(permissions.BasePermission):
'DELETE': 'delete',
}
# let the view define a custom rolemap
if hasattr(view, 'rolemap'):
rolemap.update(view.rolemap)
permission = rolemap[request.method]
# The required role may be defined for the view class
if role := getattr(view, 'role_required', None):
# If the role is specified as "role.permission", split it
if '.' in role:
role, permission = role.split('.')
return users.models.check_user_role(user, role, permission)
try:
# Extract the model name associated with this request
model = get_model_for_view(view)
model = view.serializer_class.Meta.model
app_label = model._meta.app_label
model_name = model._meta.model_name
table = f'{app_label}_{model_name}'
table = f"{app_label}_{model_name}"
except AttributeError:
# We will assume that if the serializer class does *not* have a Meta,
# then we don't need a permission
return True
return users.models.RuleSet.check_table_permission(user, table, permission)
result = users.models.RuleSet.check_table_permission(user, table, permission)
class IsSuperuser(permissions.IsAdminUser):
"""Allows access only to superuser users."""
def has_permission(self, request, view):
"""Check if the user is a superuser."""
return bool(request.user and request.user.is_superuser)
class IsStaffOrReadOnly(permissions.IsAdminUser):
"""Allows read-only access to any user, but write access is restricted to staff users."""
def has_permission(self, request, view):
"""Check if the user is a superuser."""
return bool(
request.user
and request.user.is_staff
or request.method in permissions.SAFE_METHODS
)
def auth_exempt(view_func):
"""Mark a view function as being exempt from auth requirements."""
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.auth_exempt = True
return wraps(view_func)(wrapped_view)
return result

View File

@@ -1,149 +1,57 @@
"""Functions to check if certain parts of InvenTree are ready."""
import os
import sys
def isInTestMode():
"""Returns True if the database is in testing mode."""
"""
Returns True if the database is in testing mode
"""
return 'test' in sys.argv
def isImportingData():
"""Returns True if the database is currently importing (or exporting) data, e.g. 'loaddata' command is performed."""
return any((x in sys.argv for x in ['flush', 'loaddata', 'dumpdata']))
def isRunningMigrations():
"""Return True if the database is currently running migrations."""
return any(
(
x in sys.argv
for x in ['migrate', 'makemigrations', 'showmigrations', 'runmigrations']
)
)
def isRebuildingData():
"""Return true if any of the rebuilding commands are being executed."""
return any(
(
x in sys.argv
for x in ['prerender', 'rebuild_models', 'rebuild_thumbnails', 'rebuild']
)
)
def isRunningBackup():
"""Return true if any of the backup commands are being executed."""
return any(
(
x in sys.argv
for x in [
'backup',
'restore',
'dbbackup',
'dbresotore',
'mediabackup',
'mediarestore',
]
)
)
def isInWorkerThread():
"""Returns True if the current thread is a background worker thread."""
return 'qcluster' in sys.argv
def isInServerThread():
"""Returns True if the current thread is a server thread."""
if isInWorkerThread():
return False
if 'runserver' in sys.argv:
return True
if 'gunicorn' in sys.argv[0]:
return True
return False
def isInMainThread():
"""Django runserver starts two processes, one for the actual dev server and the other to reload the application.
- The RUN_MAIN env is set in that case. However if --noreload is applied, this variable
is not set because there are no different threads.
"""
if 'runserver' in sys.argv and '--noreload' not in sys.argv:
return os.environ.get('RUN_MAIN', None) == 'true'
Returns True if the database is currently importing data,
e.g. 'loaddata' command is performed
"""
return not isInWorkerThread()
return 'loaddata' in sys.argv
def canAppAccessDatabase(
allow_test: bool = False, allow_plugins: bool = False, allow_shell: bool = False
):
"""Returns True if the apps.py file can access database records.
def canAppAccessDatabase(allow_test=False):
"""
Returns True if the apps.py file can access database records.
There are some circumstances where we don't want the ready function in apps.py
to touch the database
"""
# Prevent database access if we are running backups
if isRunningBackup():
return False
# Prevent database access if we are importing data
if isImportingData():
return False
# Prevent database access if we are rebuilding data
if isRebuildingData():
return False
# Prevent database access if we are running migrations
if not allow_plugins and isRunningMigrations():
return False
# If any of the following management commands are being executed,
# prevent custom "on load" code from running!
excluded_commands = [
'flush',
'loaddata',
'dumpdata',
'makemigrations',
'migrate',
'check',
'shell',
'createsuperuser',
'wait_for_db',
'prerender',
'rebuild_models',
'rebuild_thumbnails',
'collectstatic',
'makemessages',
'compilemessages',
]
if not allow_shell:
excluded_commands.append('shell')
if not allow_test:
# Override for testing mode?
excluded_commands.append('test')
if not allow_plugins:
excluded_commands.extend(['collectstatic'])
for cmd in excluded_commands:
if cmd in sys.argv:
return False
return True
def isPluginRegistryLoaded():
"""Ensures that the plugin registry is already loaded.
The plugin registry reloads all apps onetime after starting if there are AppMixin plugins,
so that the discovered AppConfigs are added to Django. This triggers the ready function of
AppConfig to execute twice. Add this check to prevent from running two times.
Note: All apps using this check need to be registered after the plugins app in settings.py
Returns: 'False' if the registry has not fully loaded the plugins yet.
"""
from plugin import registry
return registry.plugins_loaded

View File

@@ -1,218 +0,0 @@
"""Functions to sanitize user input files."""
from bleach import clean
from bleach.css_sanitizer import CSSSanitizer
ALLOWED_ELEMENTS_SVG = [
'a',
'animate',
'animateColor',
'animateMotion',
'animateTransform',
'circle',
'defs',
'desc',
'ellipse',
'font-face',
'font-face-name',
'font-face-src',
'g',
'glyph',
'hkern',
'linearGradient',
'line',
'marker',
'metadata',
'missing-glyph',
'mpath',
'path',
'polygon',
'polyline',
'radialGradient',
'rect',
'set',
'stop',
'svg',
'switch',
'text',
'title',
'tspan',
'use',
]
ALLOWED_ATTRIBUTES_SVG = [
'accent-height',
'accumulate',
'additive',
'alphabetic',
'arabic-form',
'ascent',
'attributeName',
'attributeType',
'baseProfile',
'bbox',
'begin',
'by',
'calcMode',
'cap-height',
'class',
'color',
'color-rendering',
'content',
'cx',
'cy',
'd',
'dx',
'dy',
'descent',
'display',
'dur',
'end',
'fill',
'fill-opacity',
'fill-rule',
'font-family',
'font-size',
'font-stretch',
'font-style',
'font-variant',
'font-weight',
'from',
'fx',
'fy',
'g1',
'g2',
'glyph-name',
'gradientUnits',
'hanging',
'height',
'horiz-adv-x',
'horiz-origin-x',
'id',
'ideographic',
'k',
'keyPoints',
'keySplines',
'keyTimes',
'lang',
'marker-end',
'marker-mid',
'marker-start',
'markerHeight',
'markerUnits',
'markerWidth',
'mathematical',
'max',
'min',
'name',
'offset',
'opacity',
'orient',
'origin',
'overline-position',
'overline-thickness',
'panose-1',
'path',
'pathLength',
'points',
'preserveAspectRatio',
'r',
'refX',
'refY',
'repeatCount',
'repeatDur',
'requiredExtensions',
'requiredFeatures',
'restart',
'rotate',
'rx',
'ry',
'slope',
'stemh',
'stemv',
'stop-color',
'stop-opacity',
'strikethrough-position',
'strikethrough-thickness',
'stroke',
'stroke-dasharray',
'stroke-dashoffset',
'stroke-linecap',
'stroke-linejoin',
'stroke-miterlimit',
'stroke-opacity',
'stroke-width',
'systemLanguage',
'target',
'text-anchor',
'to',
'transform',
'type',
'u1',
'u2',
'underline-position',
'underline-thickness',
'unicode',
'unicode-range',
'units-per-em',
'values',
'version',
'viewBox',
'visibility',
'width',
'widths',
'x',
'x-height',
'x1',
'x2',
'xlink:actuate',
'xlink:arcrole',
'xlink:href',
'xlink:role',
'xlink:show',
'xlink:title',
'xlink:type',
'xml:base',
'xml:lang',
'xml:space',
'xmlns',
'xmlns:xlink',
'y',
'y1',
'y2',
'zoomAndPan',
'style',
]
def sanitize_svg(
file_data,
strip: bool = True,
elements: str = ALLOWED_ELEMENTS_SVG,
attributes: str = ALLOWED_ATTRIBUTES_SVG,
) -> str:
"""Sanitize a SVG file.
Args:
file_data (str): SVG as string.
strip (bool, optional): Should invalid elements get removed. Defaults to True.
elements (str, optional): Allowed elements. Defaults to ALLOWED_ELEMENTS_SVG.
attributes (str, optional): Allowed attributes. Defaults to ALLOWED_ATTRIBUTES_SVG.
Returns:
str: Sanitzied SVG file.
"""
# Handle byte-encoded data
if isinstance(file_data, bytes):
file_data = file_data.decode('utf-8')
cleaned = clean(
file_data,
tags=elements,
attributes=attributes,
strip=strip,
strip_comments=strip,
css_sanitizer=CSSSanitizer(),
)
return cleaned

View File

@@ -1,73 +0,0 @@
"""Configuration for Sentry.io error reporting."""
import logging
from django.conf import settings
from django.core.exceptions import ValidationError
from django.http import Http404
import rest_framework.exceptions
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
import InvenTree.version
logger = logging.getLogger('inventree')
def default_sentry_dsn():
"""Return the default Sentry.io DSN for InvenTree."""
return 'https://3928ccdba1d34895abde28031fd00100@o378676.ingest.sentry.io/6494600'
def sentry_ignore_errors():
"""Return a list of error types to ignore.
These error types will *not* be reported to sentry.io.
"""
return [
Http404,
ValidationError,
rest_framework.exceptions.AuthenticationFailed,
rest_framework.exceptions.NotAuthenticated,
rest_framework.exceptions.PermissionDenied,
rest_framework.exceptions.ValidationError,
]
def init_sentry(dsn, sample_rate, tags):
"""Initialize sentry.io error reporting."""
logger.info('Initializing sentry.io integration')
sentry_sdk.init(
dsn=dsn,
integrations=[DjangoIntegration()],
traces_sample_rate=sample_rate,
send_default_pii=True,
ignore_errors=sentry_ignore_errors(),
release=InvenTree.version.INVENTREE_SW_VERSION,
environment='development'
if InvenTree.version.isInvenTreeDevelopmentVersion()
else 'production',
)
for key, val in tags.items():
sentry_sdk.set_tag(f'inventree_{key}', val)
sentry_sdk.set_tag('api', InvenTree.version.inventreeApiVersion())
sentry_sdk.set_tag('platform', InvenTree.version.inventreePlatform())
sentry_sdk.set_tag('git_branch', InvenTree.version.inventreeBranch())
sentry_sdk.set_tag('git_commit', InvenTree.version.inventreeCommitHash())
sentry_sdk.set_tag('git_date', InvenTree.version.inventreeCommitDate())
def report_exception(exc):
"""Report an exception to sentry.io."""
if settings.SENTRY_ENABLED and settings.SENTRY_DSN:
if not any(isinstance(exc, e) for e in sentry_ignore_errors()):
logger.info('Reporting exception to sentry.io: %s', exc)
try:
sentry_sdk.capture_exception(exc)
except Exception:
logger.warning('Failed to report exception to sentry.io')

View File

@@ -1,8 +1,9 @@
"""Serializers used in various InvenTree apps."""
"""
Serializers used in various InvenTree apps
"""
import os
from collections import OrderedDict
from copy import deepcopy
from decimal import Decimal
from django.conf import settings
@@ -16,37 +17,34 @@ from djmoney.contrib.django_rest_framework.fields import MoneyField
from djmoney.money import Money
from djmoney.utils import MONEY_CLASSES, get_currency_field_name
from rest_framework import serializers
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.exceptions import ValidationError
from rest_framework.fields import empty
from rest_framework.serializers import DecimalField
from rest_framework.utils import model_meta
from taggit.serializers import TaggitSerializer
import common.models as common_models
from common.settings import currency_code_default, currency_code_mappings
from InvenTree.fields import InvenTreeRestURLField, InvenTreeURLField
class EmptySerializer(serializers.Serializer):
"""Empty serializer for use in testing."""
from .models import extract_int
class InvenTreeMoneySerializer(MoneyField):
"""Custom serializer for 'MoneyField', which ensures that passed values are numerically valid.
"""
Custom serializer for 'MoneyField',
which ensures that passed values are numerically valid
Ref: https://github.com/django-money/django-money/blob/master/djmoney/contrib/django_rest_framework/fields.py
"""
def __init__(self, *args, **kwargs):
"""Override default values."""
kwargs['max_digits'] = kwargs.get('max_digits', 19)
self.decimal_places = kwargs['decimal_places'] = kwargs.get('decimal_places', 6)
kwargs['required'] = kwargs.get('required', False)
kwargs["max_digits"] = kwargs.get("max_digits", 19)
kwargs["decimal_places"] = kwargs.get("decimal_places", 4)
super().__init__(*args, **kwargs)
def get_value(self, data):
"""Test that the returned amount is a valid Decimal."""
"""
Test that the returned amount is a valid Decimal
"""
amount = super(DecimalField, self).get_value(data)
# Convert an empty string to None
@@ -55,167 +53,54 @@ class InvenTreeMoneySerializer(MoneyField):
try:
if amount is not None and amount is not empty:
# Convert to a Decimal instance, and round to maximum allowed decimal places
amount = Decimal(amount)
amount = round(amount, self.decimal_places)
except Exception:
raise ValidationError({self.field_name: [_('Must be a valid number')]})
except:
raise ValidationError({
self.field_name: [_("Must be a valid number")],
})
currency = data.get(
get_currency_field_name(self.field_name), self.default_currency
)
currency = data.get(get_currency_field_name(self.field_name), self.default_currency)
if (
currency
and amount is not None
and not isinstance(amount, MONEY_CLASSES)
and amount is not empty
):
if currency and amount is not None and not isinstance(amount, MONEY_CLASSES) and amount is not empty:
return Money(amount, currency)
return amount
class InvenTreeCurrencySerializer(serializers.ChoiceField):
"""Custom serializers for selecting currency option."""
class UserSerializer(serializers.ModelSerializer):
""" Serializer for User - provides all fields """
def __init__(self, *args, **kwargs):
"""Initialize the currency serializer."""
choices = currency_code_mappings()
allow_blank = kwargs.get('allow_blank', False) or kwargs.get(
'allow_null', False
)
if allow_blank:
choices = [('', '---------')] + choices
kwargs['choices'] = choices
if 'default' not in kwargs and 'required' not in kwargs:
kwargs['default'] = '' if allow_blank else currency_code_default
if 'label' not in kwargs:
kwargs['label'] = _('Currency')
if 'help_text' not in kwargs:
kwargs['help_text'] = _('Select currency from available options')
super().__init__(*args, **kwargs)
class Meta:
model = User
fields = 'all'
class DependentField(serializers.Field):
"""A dependent field can be used to dynamically return child fields based on the value of other fields."""
class UserSerializerBrief(serializers.ModelSerializer):
""" Serializer for User - provides limited information """
child = None
def __init__(self, *args, depends_on, field_serializer, **kwargs):
"""A dependent field can be used to dynamically return child fields based on the value of other fields.
Example:
This example adds two fields. If the client selects integer, an integer field will be shown, but if he
selects char, an char field will be shown. For any other value, nothing will be shown.
class TestSerializer(serializers.Serializer):
select_type = serializers.ChoiceField(choices=[
("integer", "Integer"),
("char", "Char"),
])
my_field = DependentField(depends_on=["select_type"], field_serializer="get_my_field")
def get_my_field(self, fields):
if fields["select_type"] == "integer":
return serializers.IntegerField()
if fields["select_type"] == "char":
return serializers.CharField()
"""
super().__init__(*args, **kwargs)
self.depends_on = depends_on
self.field_serializer = field_serializer
def get_child(self, raise_exception=False):
"""This method tries to extract the child based on the provided data in the request by the client."""
data = deepcopy(self.context['request'].data)
def visit_parent(node):
"""Recursively extract the data for the parent field/serializer in reverse."""
nonlocal data
if node.parent:
visit_parent(node.parent)
# only do for composite fields and stop right before the current field
if hasattr(node, 'child') and node is not self and isinstance(data, dict):
data = data.get(node.field_name, None)
visit_parent(self)
# ensure that data is a dictionary and that a parent exists
if not isinstance(data, dict) or self.parent is None:
return
# check if the request data contains the dependent fields, otherwise skip getting the child
for f in self.depends_on:
if data.get(f, None) is None:
if (
self.parent
and (v := getattr(self.parent.fields[f], 'default', None))
is not None
):
data[f] = v
else:
return
# partially validate the data for options requests that set raise_exception while calling .get_child(...)
if raise_exception:
validation_data = {k: v for k, v in data.items() if k in self.depends_on}
serializer = self.parent.__class__(
context=self.context, data=validation_data, partial=True
)
serializer.is_valid(raise_exception=raise_exception)
# try to get the field serializer
field_serializer = getattr(self.parent, self.field_serializer)
child = field_serializer(data)
if not child:
return
self.child = child
self.child.bind(field_name='', parent=self)
def to_internal_value(self, data):
"""This method tries to convert the data to an internal representation based on the defined to_internal_value method on the child."""
self.get_child()
if self.child:
return self.child.to_internal_value(data)
return None
def to_representation(self, value):
"""This method tries to convert the data to representation based on the defined to_representation method on the child."""
self.get_child()
if self.child:
return self.child.to_representation(value)
return None
class Meta:
model = User
fields = [
'pk',
'username',
]
class InvenTreeModelSerializer(serializers.ModelSerializer):
"""Inherits the standard Django ModelSerializer class, but also ensures that the underlying model class data are checked on validation."""
# Switch out URLField mapping
serializer_field_mapping = {
**serializers.ModelSerializer.serializer_field_mapping,
models.URLField: InvenTreeRestURLField,
InvenTreeURLField: InvenTreeRestURLField,
}
"""
Inherits the standard Django ModelSerializer class,
but also ensures that the underlying model class data are checked on validation.
"""
def __init__(self, instance=None, data=empty, **kwargs):
"""Custom __init__ routine to ensure that *default* values (as specified in the ORM) are used by the DRF serializers, *if* the values are not provided by the user."""
"""
Custom __init__ routine to ensure that *default* values (as specified in the ORM)
are used by the DRF serializers, *if* the values are not provided by the user.
"""
# If instance is None, we are creating a new instance
if instance is None and data is not empty:
if data is None:
data = OrderedDict()
else:
@@ -230,20 +115,21 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
fields = model_meta.get_field_info(ModelClass)
for field_name, field in fields.fields.items():
"""
Update the field IF (and ONLY IF):
- The field has a specified default value
- The field does not already have a value set
"""
if field.has_default() and field_name not in data:
value = field.default
# Account for callable functions
if callable(value):
try:
value = value()
except Exception:
except:
continue
data[field_name] = value
@@ -251,10 +137,11 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
super().__init__(instance, data, **kwargs)
def get_initial(self):
"""Construct initial data for the serializer.
"""
Construct initial data for the serializer.
Use the 'default' values specified by the django model definition
"""
initials = super().get_initial().copy()
# Are we creating a new instance?
@@ -264,32 +151,28 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
fields = model_meta.get_field_info(ModelClass)
for field_name, field in fields.fields.items():
if field.has_default() and field_name not in initials:
value = field.default
# Account for callable functions
if callable(value):
try:
value = value()
except Exception:
except:
continue
initials[field_name] = value
return initials
def skip_create_fields(self):
"""Return a list of 'fields' which should be skipped for model creation.
This is used to 'bypass' a shortcoming of the DRF framework,
which does not allow us to have writeable serializer fields which do not exist on the model.
Default implementation returns an empty list
"""
return []
def save(self, **kwargs):
"""Catch any django ValidationError thrown at the moment `save` is called, and re-throw as a DRF ValidationError."""
"""
Catch any django ValidationError thrown at the moment save() is called,
and re-throw as a DRF ValidationError
"""
try:
super().save(**kwargs)
except (ValidationError, DjangoValidationError) as exc:
@@ -297,18 +180,11 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
return self.instance
def create(self, validated_data):
"""Custom create method which supports field adjustment."""
initial_data = validated_data.copy()
# Remove any fields which do not exist on the model
for field in self.skip_create_fields():
initial_data.pop(field, None)
return super().create(initial_data)
def update(self, instance, validated_data):
"""Catch any django ValidationError, and re-throw as a DRF ValidationError."""
"""
Catch any django ValidationError, and re-throw as a DRF ValidationError
"""
try:
instance = super().update(instance, validated_data)
except (ValidationError, DjangoValidationError) as exc:
@@ -317,25 +193,20 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
return instance
def run_validation(self, data=empty):
"""Perform serializer validation.
"""
Perform serializer validation.
In addition to running validators on the serializer fields,
this class ensures that the underlying model is also validated.
"""
# Run any native validation checks first (may raise a ValidationError)
data = super().run_validation(data)
# Now ensure the underlying model is correct
if not hasattr(self, 'instance') or self.instance is None:
# No instance exists (we are creating a new one)
initial_data = data.copy()
for field in self.skip_create_fields():
# Remove any fields we do not wish to provide to the model
initial_data.pop(field, None)
# Create a (RAM only) instance for extra testing
instance = self.Meta.model(**initial_data)
instance = self.Meta.model(**data)
else:
# Instance already exists (we are updating!)
instance = self.instance
@@ -352,6 +223,7 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
try:
instance.full_clean()
except (ValidationError, DjangoValidationError) as exc:
data = exc.message_dict
# Change '__all__' key (django style) to 'non_field_errors' (DRF style)
@@ -364,122 +236,21 @@ class InvenTreeModelSerializer(serializers.ModelSerializer):
return data
class InvenTreeTaggitSerializer(TaggitSerializer):
"""Updated from https://github.com/glemmaPaul/django-taggit-serializer."""
def update(self, instance, validated_data):
"""Overridden update method to re-add the tagmanager."""
to_be_tagged, validated_data = self._pop_tags(validated_data)
tag_object = super().update(instance, validated_data)
for key in to_be_tagged.keys():
# re-add the tagmanager
new_tagobject = tag_object.__class__.objects.get(id=tag_object.id)
setattr(tag_object, key, getattr(new_tagobject, key))
return self._save_tags(tag_object, to_be_tagged)
class InvenTreeTagModelSerializer(InvenTreeTaggitSerializer, InvenTreeModelSerializer):
"""Combination of InvenTreeTaggitSerializer and InvenTreeModelSerializer."""
pass
class UserSerializer(InvenTreeModelSerializer):
"""Serializer for a User."""
class Meta:
"""Metaclass defines serializer fields."""
model = User
fields = ['pk', 'username', 'first_name', 'last_name', 'email']
read_only_fields = ['username']
class ExendedUserSerializer(UserSerializer):
"""Serializer for a User with a bit more info."""
from users.serializers import GroupSerializer
groups = GroupSerializer(read_only=True, many=True)
class Meta(UserSerializer.Meta):
"""Metaclass defines serializer fields."""
fields = UserSerializer.Meta.fields + [
'groups',
'is_staff',
'is_superuser',
'is_active',
]
read_only_fields = UserSerializer.Meta.read_only_fields + ['groups']
def validate(self, attrs):
"""Expanded validation for changing user role."""
# Check if is_staff or is_superuser is in attrs
role_change = 'is_staff' in attrs or 'is_superuser' in attrs
request_user = self.context['request'].user
if role_change:
if request_user.is_superuser:
# Superusers can change any role
pass
elif request_user.is_staff and 'is_superuser' not in attrs:
# Staff can change any role except is_superuser
pass
else:
raise PermissionDenied(
_('You do not have permission to change this user role.')
)
return super().validate(attrs)
class UserCreateSerializer(ExendedUserSerializer):
"""Serializer for creating a new User."""
def validate(self, attrs):
"""Expanded valiadation for auth."""
# Check that the user trying to create a new user is a superuser
if not self.context['request'].user.is_superuser:
raise serializers.ValidationError(_('Only superusers can create new users'))
# Generate a random password
password = User.objects.make_random_password(length=14)
attrs.update({'password': password})
return super().validate(attrs)
def create(self, validated_data):
"""Send an e email to the user after creation."""
from InvenTree.helpers_model import get_base_url
base_url = get_base_url()
instance = super().create(validated_data)
# Make sure the user cannot login until they have set a password
instance.set_unusable_password()
message = (
_('Your account has been created.')
+ '\n\n'
+ _('Please use the password reset function to login')
)
if base_url:
message += f'\n\nURL: {base_url}'
# Send the user an onboarding email (from current site)
instance.email_user(subject=_('Welcome to InvenTree'), message=message)
return instance
class ReferenceIndexingSerializerMixin():
"""
This serializer mixin ensures the the reference is not to big / small
for the BigIntegerField
"""
def validate_reference(self, value):
if extract_int(value) > models.BigIntegerField.MAX_BIGINT:
raise serializers.ValidationError('reference is to to big')
return value
class InvenTreeAttachmentSerializerField(serializers.FileField):
"""Override the DRF native FileField serializer, to remove the leading server path.
"""
Override the DRF native FileField serializer,
to remove the leading server path.
For example, the FileField might supply something like:
@@ -489,14 +260,16 @@ class InvenTreeAttachmentSerializerField(serializers.FileField):
/media/foo/bar.jpg
If the server process is serving the data at 127.0.0.1,
Why? You can't handle the why!
Actually, if the server process is serving the data at 127.0.0.1,
but a proxy service (e.g. nginx) is then providing DNS lookup to the outside world,
then an attachment which prefixes the "address" of the internal server
will not be accessible from the outside world.
"""
def to_representation(self, value):
"""To json-serializable type."""
if not value:
return None
@@ -504,50 +277,34 @@ class InvenTreeAttachmentSerializerField(serializers.FileField):
class InvenTreeAttachmentSerializer(InvenTreeModelSerializer):
"""Special case of an InvenTreeModelSerializer, which handles an "attachment" model.
"""
Special case of an InvenTreeModelSerializer, which handles an "attachment" model.
The only real addition here is that we support "renaming" of the attachment file.
"""
@staticmethod
def attachment_fields(extra_fields=None):
"""Default set of fields for an attachment serializer."""
fields = [
'pk',
'attachment',
'filename',
'link',
'comment',
'upload_date',
'user',
'user_detail',
]
if extra_fields:
fields += extra_fields
return fields
user_detail = UserSerializer(source='user', read_only=True, many=False)
attachment = InvenTreeAttachmentSerializerField(required=False, allow_null=False)
attachment = InvenTreeAttachmentSerializerField(
required=False,
allow_null=False,
)
# The 'filename' field must be present in the serializer
filename = serializers.CharField(
label=_('Filename'), required=False, source='basename', allow_blank=False
label=_('Filename'),
required=False,
source='basename',
allow_blank=False,
)
upload_date = serializers.DateField(read_only=True)
class InvenTreeImageSerializerField(serializers.ImageField):
"""Custom image serializer.
"""
Custom image serializer.
On upload, validate that the file is a valid image file
"""
def to_representation(self, value):
"""To json-serializable type."""
if not value:
return None
@@ -555,24 +312,25 @@ class InvenTreeImageSerializerField(serializers.ImageField):
class InvenTreeDecimalField(serializers.FloatField):
"""Custom serializer for decimal fields.
"""
Custom serializer for decimal fields. Solves the following issues:
Solves the following issues:
- The normal DRF DecimalField renders values with trailing zeros
- Using a FloatField can result in rounding issues: https://code.djangoproject.com/ticket/30290
"""
def to_internal_value(self, data):
"""Convert to python type."""
# Convert the value to a string, and then a decimal
try:
return Decimal(str(data))
except Exception:
raise serializers.ValidationError(_('Invalid value'))
except:
raise serializers.ValidationError(_("Invalid value"))
class DataFileUploadSerializer(serializers.Serializer):
"""Generic serializer for uploading a data file, and extracting a dataset.
"""
Generic serializer for uploading a data file, and extracting a dataset.
- Validates uploaded file
- Extracts column names
@@ -583,19 +341,22 @@ class DataFileUploadSerializer(serializers.Serializer):
TARGET_MODEL = None
class Meta:
"""Metaclass options."""
fields = ['data_file']
fields = [
'data_file',
]
data_file = serializers.FileField(
label=_('Data File'),
help_text=_('Select data file for upload'),
label=_("Data File"),
help_text=_("Select data file for upload"),
required=True,
allow_empty_file=False,
)
def validate_data_file(self, data_file):
"""Perform validation checks on the uploaded data file."""
"""
Perform validation checks on the uploaded data file.
"""
self.filename = data_file.name
name, ext = os.path.splitext(data_file.name)
@@ -603,16 +364,20 @@ class DataFileUploadSerializer(serializers.Serializer):
# Remove the leading . from the extension
ext = ext[1:]
accepted_file_types = ['xls', 'xlsx', 'csv', 'tsv', 'xml']
accepted_file_types = [
'xls', 'xlsx',
'csv', 'tsv',
'xml',
]
if ext not in accepted_file_types:
raise serializers.ValidationError(_('Unsupported file type'))
raise serializers.ValidationError(_("Unsupported file type"))
# Impose a 50MB limit on uploaded BOM files
max_upload_file_size = 50 * 1024 * 1024
if data_file.size > max_upload_file_size:
raise serializers.ValidationError(_('File is too large'))
raise serializers.ValidationError(_("File is too large"))
# Read file data into memory (bytes object)
try:
@@ -633,21 +398,23 @@ class DataFileUploadSerializer(serializers.Serializer):
raise serializers.ValidationError(str(e))
if len(self.dataset.headers) == 0:
raise serializers.ValidationError(_('No columns found in file'))
raise serializers.ValidationError(_("No columns found in file"))
if len(self.dataset) == 0:
raise serializers.ValidationError(_('No data rows found in file'))
raise serializers.ValidationError(_("No data rows found in file"))
return data_file
def match_column(self, column_name, field_names, exact=False):
"""Attempt to match a column name (from the file) to a field (defined in the model).
"""
Attempt to match a column name (from the file) to a field (defined in the model)
Order of matching is:
- Direct match
- Case insensitive match
- Fuzzy match
"""
if not column_name:
return None
@@ -672,7 +439,10 @@ class DataFileUploadSerializer(serializers.Serializer):
return None
def extract_data(self):
"""Returns dataset extracted from the file."""
"""
Returns dataset extracted from the file
"""
# Provide a dict of available import fields for the model
model_fields = {}
@@ -682,11 +452,11 @@ class DataFileUploadSerializer(serializers.Serializer):
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except Exception:
except:
pass
# Extract a list of valid model field names
model_field_names = list(model_fields.keys())
model_field_names = [key for key in model_fields.keys()]
# Provide a dict of available columns from the dataset
file_columns = {}
@@ -713,12 +483,12 @@ class DataFileUploadSerializer(serializers.Serializer):
}
def save(self):
"""Empty overwrite for save."""
...
class DataFileExtractSerializer(serializers.Serializer):
"""Generic serializer for extracting data from an imported dataset.
"""
Generic serializer for extracting data from an imported dataset.
- User provides an array of matched headers
- User provides an array of raw data rows
@@ -728,31 +498,39 @@ class DataFileExtractSerializer(serializers.Serializer):
TARGET_MODEL = None
class Meta:
"""Metaclass options."""
fields = ['columns', 'rows']
fields = [
'columns',
'rows',
]
# Mapping of columns
columns = serializers.ListField(child=serializers.CharField(allow_blank=True))
columns = serializers.ListField(
child=serializers.CharField(
allow_blank=True,
),
)
rows = serializers.ListField(
child=serializers.ListField(
child=serializers.CharField(allow_blank=True, allow_null=True)
child=serializers.CharField(
allow_blank=True,
allow_null=True,
),
)
)
def validate(self, data):
"""Clean data."""
data = super().validate(data)
self.columns = data.get('columns', [])
self.rows = data.get('rows', [])
if len(self.rows) == 0:
raise serializers.ValidationError(_('No data rows provided'))
raise serializers.ValidationError(_("No data rows provided"))
if len(self.columns) == 0:
raise serializers.ValidationError(_('No data columns supplied'))
raise serializers.ValidationError(_("No data columns supplied"))
self.validate_extracted_columns()
@@ -760,40 +538,57 @@ class DataFileExtractSerializer(serializers.Serializer):
@property
def data(self):
"""Returns current data."""
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except Exception:
except:
model_fields = {}
rows = []
for row in self.rows:
"""Optionally pre-process each row, before sending back to the client."""
"""
Optionally pre-process each row, before sending back to the client
"""
processed_row = self.process_row(self.row_to_dict(row))
if processed_row:
rows.append({'original': row, 'data': processed_row})
rows.append({
"original": row,
"data": processed_row,
})
return {'fields': model_fields, 'columns': self.columns, 'rows': rows}
return {
'fields': model_fields,
'columns': self.columns,
'rows': rows,
}
def process_row(self, row):
"""Process a 'row' of data, which is a mapped column:value dict.
"""
Process a 'row' of data, which is a mapped column:value dict
Returns either a mapped column:value dict, or None.
If the function returns None, the column is ignored!
"""
# Default implementation simply returns the original row data
return row
def row_to_dict(self, row):
"""Convert a "row" to a named data dict."""
row_dict = {'errors': {}}
"""
Convert a "row" to a named data dict
"""
row_dict = {
'errors': {},
}
for idx, value in enumerate(row):
if idx < len(self.columns):
col = self.columns[idx]
@@ -803,26 +598,29 @@ class DataFileExtractSerializer(serializers.Serializer):
return row_dict
def validate_extracted_columns(self):
"""Perform custom validation of header mapping."""
"""
Perform custom validation of header mapping.
"""
if self.TARGET_MODEL:
try:
model_fields = self.TARGET_MODEL.get_import_fields()
except Exception:
except:
model_fields = {}
cols_seen = set()
for name, field in model_fields.items():
required = field.get('required', False)
# Check for missing required columns
if required:
if name not in self.columns:
raise serializers.ValidationError(
_(f"Missing required column: '{name}'")
)
raise serializers.ValidationError(_(f"Missing required column: '{name}'"))
for col in self.columns:
if not col:
continue
@@ -833,50 +631,7 @@ class DataFileExtractSerializer(serializers.Serializer):
cols_seen.add(col)
def save(self):
"""No "save" action for this serializer."""
pass
class RemoteImageMixin(metaclass=serializers.SerializerMetaclass):
"""Mixin class which allows downloading an 'image' from a remote URL.
Adds the optional, write-only `remote_image` field to the serializer
"""
def skip_create_fields(self):
"""Ensure the 'remote_image' field is skipped when creating a new instance."""
return ['remote_image']
remote_image = serializers.URLField(
required=False,
allow_blank=False,
write_only=True,
label=_('Remote Image'),
help_text=_('URL of remote image file'),
)
def validate_remote_image(self, url):
"""Perform custom validation for the remote image URL.
- Attempt to download the image and store it against this object instance
- Catches and re-throws any errors
"""
from InvenTree.helpers_model import download_image_from_url
if not url:
return
if not common_models.InvenTreeSetting.get_setting(
'INVENTREE_DOWNLOAD_FROM_URL'
):
raise ValidationError(
_('Downloading images from remote URL is not enabled')
)
try:
self.remote_image_file = download_image_from_url(url)
except Exception as exc:
self.remote_image_file = None
raise ValidationError(str(exc))
return url
No "save" action for this serializer
"""
...

File diff suppressed because it is too large Load Diff

View File

@@ -1,269 +0,0 @@
"""API endpoints for social authentication with allauth."""
import logging
from importlib import import_module
from django.urls import NoReverseMatch, include, path, reverse
from allauth.account.models import EmailAddress
from allauth.socialaccount import providers
from allauth.socialaccount.providers.oauth2.views import OAuth2Adapter, OAuth2LoginView
from drf_spectacular.utils import OpenApiResponse, extend_schema
from rest_framework import serializers
from rest_framework.exceptions import NotFound
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
import InvenTree.sso
from common.models import InvenTreeSetting
from InvenTree.mixins import CreateAPI, ListAPI, ListCreateAPI
from InvenTree.serializers import EmptySerializer, InvenTreeModelSerializer
logger = logging.getLogger('inventree')
class GenericOAuth2ApiLoginView(OAuth2LoginView):
"""Api view to login a user with a social account."""
def dispatch(self, request, *args, **kwargs):
"""Dispatch the regular login view directly."""
return self.login(request, *args, **kwargs)
class GenericOAuth2ApiConnectView(GenericOAuth2ApiLoginView):
"""Api view to connect a social account to the current user."""
def dispatch(self, request, *args, **kwargs):
"""Dispatch the connect request directly."""
# Override the request method be in connection mode
request.GET = request.GET.copy()
request.GET['process'] = 'connect'
# Resume the dispatch
return super().dispatch(request, *args, **kwargs)
def handle_oauth2(adapter: OAuth2Adapter):
"""Define urls for oauth2 endpoints."""
return [
path(
'login/',
GenericOAuth2ApiLoginView.adapter_view(adapter),
name=f'{provider.id}_api_login',
),
path(
'connect/',
GenericOAuth2ApiConnectView.adapter_view(adapter),
name=f'{provider.id}_api_connect',
),
]
legacy = {
'twitter': 'twitter_oauth2',
'bitbucket': 'bitbucket_oauth2',
'linkedin': 'linkedin_oauth2',
'vimeo': 'vimeo_oauth2',
'openid': 'openid_connect',
} # legacy connectors
# Collect urls for all loaded providers
social_auth_urlpatterns = []
provider_urlpatterns = []
for name, provider in providers.registry.provider_map.items():
try:
prov_mod = import_module(provider.get_package() + '.views')
except ImportError:
logger.exception('Could not import authentication provider %s', name)
continue
# Try to extract the adapter class
adapters = [
cls
for cls in prov_mod.__dict__.values()
if isinstance(cls, type)
and not cls == OAuth2Adapter
and issubclass(cls, OAuth2Adapter)
]
# Get urls
urls = []
if len(adapters) == 1:
urls = handle_oauth2(adapter=adapters[0])
else:
if provider.id in legacy:
logger.warning(
'`%s` is not supported on platform UI. Use `%s` instead.',
provider.id,
legacy[provider.id],
)
continue
else:
logger.error(
'Found handler that is not yet ready for platform UI: `%s`. Open an feature request on GitHub if you need it implemented.',
provider.id,
)
continue
provider_urlpatterns += [path(f'{provider.id}/', include(urls))]
social_auth_urlpatterns += provider_urlpatterns
class SocialProviderListResponseSerializer(serializers.Serializer):
"""Serializer for the SocialProviderListView."""
class SocialProvider(serializers.Serializer):
"""Serializer for the SocialProviderListResponseSerializer."""
id = serializers.CharField()
name = serializers.CharField()
configured = serializers.BooleanField()
login = serializers.URLField()
connect = serializers.URLField()
display_name = serializers.CharField()
sso_enabled = serializers.BooleanField()
sso_registration = serializers.BooleanField()
mfa_required = serializers.BooleanField()
providers = SocialProvider(many=True)
registration_enabled = serializers.BooleanField()
password_forgotten_enabled = serializers.BooleanField()
class SocialProviderListView(ListAPI):
"""List of available social providers."""
permission_classes = (AllowAny,)
serializer_class = EmptySerializer
@extend_schema(
responses={200: OpenApiResponse(response=SocialProviderListResponseSerializer)}
)
def get(self, request, *args, **kwargs):
"""Get the list of providers."""
provider_list = []
for provider in providers.registry.provider_map.values():
provider_data = {
'id': provider.id,
'name': provider.name,
'configured': False,
}
try:
provider_data['login'] = request.build_absolute_uri(
reverse(f'{provider.id}_api_login')
)
except NoReverseMatch:
provider_data['login'] = None
try:
provider_data['connect'] = request.build_absolute_uri(
reverse(f'{provider.id}_api_connect')
)
except NoReverseMatch:
provider_data['connect'] = None
provider_data['configured'] = InvenTree.sso.check_provider(provider)
provider_data['display_name'] = InvenTree.sso.provider_display_name(
provider
)
provider_list.append(provider_data)
data = {
'sso_enabled': InvenTree.sso.login_enabled(),
'sso_registration': InvenTree.sso.registration_enabled(),
'mfa_required': InvenTreeSetting.get_setting('LOGIN_ENFORCE_MFA'),
'providers': provider_list,
'registration_enabled': InvenTreeSetting.get_setting('LOGIN_ENABLE_REG'),
'password_forgotten_enabled': InvenTreeSetting.get_setting(
'LOGIN_ENABLE_PWD_FORGOT'
),
}
return Response(data)
class EmailAddressSerializer(InvenTreeModelSerializer):
"""Serializer for the EmailAddress model."""
class Meta:
"""Meta options for EmailAddressSerializer."""
model = EmailAddress
fields = '__all__'
class EmptyEmailAddressSerializer(InvenTreeModelSerializer):
"""Empty Serializer for the EmailAddress model."""
class Meta:
"""Meta options for EmailAddressSerializer."""
model = EmailAddress
fields = []
class EmailListView(ListCreateAPI):
"""List of registered email addresses for current users."""
permission_classes = (IsAuthenticated,)
serializer_class = EmailAddressSerializer
def get_queryset(self):
"""Only return data for current user."""
return EmailAddress.objects.filter(user=self.request.user)
class EmailActionMixin(CreateAPI):
"""Mixin to modify email addresses for current users."""
serializer_class = EmptyEmailAddressSerializer
permission_classes = (IsAuthenticated,)
def get_queryset(self):
"""Filter queryset for current user."""
return EmailAddress.objects.filter(
user=self.request.user, pk=self.kwargs['pk']
).first()
@extend_schema(responses={200: OpenApiResponse(response=EmailAddressSerializer)})
def post(self, request, *args, **kwargs):
"""Filter item, run action and return data."""
email = self.get_queryset()
if not email:
raise NotFound
self.special_action(email, request, *args, **kwargs)
return Response(EmailAddressSerializer(email).data)
class EmailVerifyView(EmailActionMixin):
"""Re-verify an email for a currently logged in user."""
def special_action(self, email, request, *args, **kwargs):
"""Send confirmation."""
if email.verified:
return
email.send_confirmation(request)
class EmailPrimaryView(EmailActionMixin):
"""Make an email for a currently logged in user primary."""
def special_action(self, email, *args, **kwargs):
"""Mark email as primary."""
if email.primary:
return
email.set_as_primary()
class EmailRemoveView(EmailActionMixin):
"""Remove an email for a currently logged in user."""
def special_action(self, email, *args, **kwargs):
"""Delete email."""
email.delete()

View File

@@ -1,77 +0,0 @@
"""Helper functions for Single Sign On functionality."""
import logging
from common.models import InvenTreeSetting
from InvenTree.helpers import str2bool
logger = logging.getLogger('inventree')
def get_provider_app(provider):
"""Return the SocialApp object for the given provider."""
from allauth.socialaccount.models import SocialApp
try:
apps = SocialApp.objects.filter(provider__iexact=provider.id)
except SocialApp.DoesNotExist:
logger.warning("SSO SocialApp not found for provider '%s'", provider.id)
return None
if apps.count() > 1:
logger.warning("Multiple SocialApps found for provider '%s'", provider.id)
if apps.count() == 0:
logger.warning("SSO SocialApp not found for provider '%s'", provider.id)
return apps.first()
def check_provider(provider, raise_error=False):
"""Check if the given provider is correctly configured.
To be correctly configured, the following must be true:
- Provider must either have a registered SocialApp
- Must have at least one site enabled
"""
import allauth.app_settings
# First, check that the provider is enabled
app = get_provider_app(provider)
if not app:
return False
if allauth.app_settings.SITES_ENABLED:
# At least one matching site must be specified
if not app.sites.exists():
logger.error('SocialApp %s has no sites configured', app)
return False
# At this point, we assume that the provider is correctly configured
return True
def provider_display_name(provider):
"""Return the 'display name' for the given provider."""
if app := get_provider_app(provider):
return app.name
# Fallback value if app not found
return provider.name
def login_enabled() -> bool:
"""Return True if SSO login is enabled."""
return str2bool(InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO'))
def registration_enabled() -> bool:
"""Return True if SSO registration is enabled."""
return str2bool(InvenTreeSetting.get_setting('LOGIN_ENABLE_SSO_REG'))
def auto_registration_enabled() -> bool:
"""Return True if SSO auto-registration is enabled."""
return str2bool(InvenTreeSetting.get_setting('LOGIN_SIGNUP_SSO_AUTO'))

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -12,18 +12,15 @@
--label-yellow: #fdc82a;
--bs-body-color: #68686a;
--scanner-upscale: 5;
--scanner-margin: 10%;
}
main {
overflow-x: auto;
overflow-x: clip;
}
.login-screen {
background: url(/static/img/paper_splash.jpg) no-repeat center fixed;
background-size: cover;
background-position: center;
height: 100vh;
font-family: 'Numans', sans-serif;
color: #eee;
@@ -66,10 +63,30 @@ main {
background-color: #EEEEF5;
}
.markdownx .row {
margin: 5px;
padding: 5px;
border: 1px solid #cce;
border-radius: 4px;
}
.markdownx-editor {
width: 100%;
border: 1px solid #cce;
border-radius: 3px;
padding: 10px;
}
.panel-content {
padding: 10px;
}
.markdownx-preview {
border: 1px solid #cce;
border-radius: 3px;
padding: 10px;
}
/* Progress bars */
.progress {
@@ -105,10 +122,6 @@ main {
font-size: 110%;
}
.bg-qr-code {
background-color: #FFF !important;
}
.qr-code {
max-width: 400px;
max-height: 400px;
@@ -223,7 +236,8 @@ main {
}
.sub-table {
margin-left: 60px;
margin-left: 45px;
margin-right: 45px;
}
.detail-icon .glyphicon {
@@ -269,6 +283,10 @@ main {
}
/* Styles for table buttons and filtering */
.button-toolbar .btn {
margin-left: 1px;
margin-right: 1px;
}
.filter-list {
display: inline-block;
@@ -300,24 +318,29 @@ main {
.filter-tag {
display: inline-block;
*display: inline;
margin: 5px;
padding: 5px;
padding-top: 1px;
padding-bottom: 1px;
color: var(--bs-body-color);
border: 1px solid var(--border-color);
border-radius: 10px;
background: var(--secondary-color);
zoom: 1;
padding-top: 3px;
padding-left: 3px;
padding-right: 3px;
border: 1px solid #aaa;
border-radius: 3px;
background: #eee;
margin: 1px;
margin-left: 5px;
margin-right: 5px;
white-space: nowrap;
}
.filter-button {
padding: 6px;
padding: 2px;
padding-left: 4px;
padding-right: 4px;
}
.filter-input {
display: inline-block;
*display: inline;
zoom: 1;
}
.filter-tag:hover {
@@ -447,15 +470,15 @@ main {
}
.part-thumb {
width: 256px;
height: 256px;
width: 200px;
height: 200px;
margin: 2px;
padding: 3px;
object-fit: contain;
}
.part-thumb-container:hover .part-thumb-overlay {
opacity: 0.75;
opacity: 1;
}
.part-thumb-overlay {
@@ -464,6 +487,8 @@ main {
left: 0;
opacity: 0;
transition: .25s ease;
padding-top: 10px;
padding-left: 25px;
margin: 5px;
}
@@ -674,10 +699,6 @@ main {
color: #A94442;
}
.form-error-message {
display: block;
}
.modal input {
width: 100%;
}
@@ -708,33 +729,6 @@ input[type="submit"] {
margin-bottom: 10px;
}
/* Make barcode scanner responsive and performant */
#barcode_scan_video video {
width: calc(100% * var(--scanner-upscale)) !important;
transform: scale(calc(1 / var(--scanner-upscale)));
margin: calc(-100% * (var(--scanner-upscale) - 1) / 2);
display: inline-block !important;
}
@-moz-document url-prefix() {
#barcode_scan_video video {
margin-top: calc(-100% * (var(--scanner-upscale) - 1) / 2 + 50%);
margin-bottom: calc(-100% * (var(--scanner-upscale) - 1) / 2 + 50%);
}
@media (pointer:coarse) {
#barcode_scan_video video {
margin-top: calc(-100% * (var(--scanner-upscale)) / 2 - 20%);
margin-bottom: calc(-100% * (var(--scanner-upscale)) / 2 - 20%);
}
}
}
#barcode_scan_video #qr-shaded-region {
border: none !important;
margin: var(--scanner-margin);
}
.sidebar-list-group-item {
background-color: var(--secondary-color);
color: var(--bs-body-color);
@@ -780,12 +774,6 @@ input[type="submit"] {
.alert-block {
display: block;
padding: 0.75rem;
}
.alert-small {
padding: 0.35rem;
font-size: 75%;
}
.navbar .btn {
@@ -871,11 +859,6 @@ input[type="submit"] {
padding: 10px;
}
.card-thumb {
max-width: 64px;
max-height: 64px;
}
.float-right {
float: right;
}
@@ -1065,39 +1048,3 @@ a {
margin-top: 3px;
overflow: hidden;
}
.treeview .node-icon {
margin-left: 0.25rem;
margin-right: 0.25rem;
}
.sso-header {
text-align: center;
width: 100%;
padding-bottom: 10px;
}
.sso-provider-list {
width: 100%;
list-style-type: none;
padding-left: 0px;
}
.sso-provider-link {
width: 100%;
}
.sso-provider-link a {
width: 100%;
text-align: left;
}
.flex-cell {
display: flex;
align-items: center;
justify-content: space-between;
}
.large-treeview-icon {
font-size: 1em;
}

View File

@@ -222,29 +222,6 @@
};
var l10 = {
code: 'bn',
week: {
dow: 0, // Sunday is the first day of the week.
doy: 6, // The week that contains Jan 1st is the first week of the year.
},
buttonText: {
prev: 'পেছনে',
next: 'সামনে',
today: 'আজ',
month: 'মাস',
week: 'সপ্তাহ',
day: 'দিন',
list: 'তালিকা',
},
weekText: 'সপ্তাহ',
allDayText: 'সারাদিন',
moreLinkText: function(n) {
return '+অন্যান্য ' + n
},
noEventsText: 'কোনো ইভেন্ট নেই',
};
var l11 = {
code: 'bs',
week: {
dow: 1, // Monday is the first day of the week.
@@ -267,7 +244,7 @@
noEventsText: 'Nema događaja za prikazivanje',
};
var l12 = {
var l11 = {
code: 'ca',
week: {
dow: 1, // Monday is the first day of the week.
@@ -288,7 +265,7 @@
noEventsText: 'No hi ha esdeveniments per mostrar',
};
var l13 = {
var l12 = {
code: 'cs',
week: {
dow: 1, // Monday is the first day of the week.
@@ -311,7 +288,7 @@
noEventsText: 'Žádné akce k zobrazení',
};
var l14 = {
var l13 = {
code: 'cy',
week: {
dow: 1, // Monday is the first day of the week.
@@ -333,7 +310,7 @@
noEventsText: 'Dim digwyddiadau',
};
var l15 = {
var l14 = {
code: 'da',
week: {
dow: 1, // Monday is the first day of the week.
@@ -354,12 +331,7 @@
noEventsText: 'Ingen arrangementer at vise',
};
function affix$1(buttonText) {
return (buttonText === 'Tag' || buttonText === 'Monat') ? 'r' :
buttonText === 'Jahr' ? 's' : ''
}
var l16 = {
var l15 = {
code: 'de-at',
week: {
dow: 1, // Monday is the first day of the week.
@@ -376,49 +348,14 @@
list: 'Terminübersicht',
},
weekText: 'KW',
weekTextLong: 'Woche',
allDayText: 'Ganztägig',
moreLinkText: function(n) {
return '+ weitere ' + n
},
noEventsText: 'Keine Ereignisse anzuzeigen',
buttonHints: {
prev(buttonText) {
return `Vorherige${affix$1(buttonText)} ${buttonText}`
},
next(buttonText) {
return `Nächste${affix$1(buttonText)} ${buttonText}`
},
today(buttonText) {
// → Heute, Diese Woche, Dieser Monat, Dieses Jahr
if (buttonText === 'Tag') {
return 'Heute'
}
return `Diese${affix$1(buttonText)} ${buttonText}`
},
},
viewHint(buttonText) {
// → Tagesansicht, Wochenansicht, Monatsansicht, Jahresansicht
const glue = buttonText === 'Woche' ? 'n' : buttonText === 'Monat' ? 's' : 'es';
return buttonText + glue + 'ansicht'
},
navLinkHint: 'Gehe zu $0',
moreLinkHint(eventCnt) {
return 'Zeige ' + (eventCnt === 1 ?
'ein weiteres Ereignis' :
eventCnt + ' weitere Ereignisse')
},
closeHint: 'Schließen',
timeHint: 'Uhrzeit',
eventHint: 'Ereignis',
};
function affix(buttonText) {
return (buttonText === 'Tag' || buttonText === 'Monat') ? 'r' :
buttonText === 'Jahr' ? 's' : ''
}
var l17 = {
var l16 = {
code: 'de',
week: {
dow: 1, // Monday is the first day of the week.
@@ -435,44 +372,14 @@
list: 'Terminübersicht',
},
weekText: 'KW',
weekTextLong: 'Woche',
allDayText: 'Ganztägig',
moreLinkText: function(n) {
return '+ weitere ' + n
},
noEventsText: 'Keine Ereignisse anzuzeigen',
buttonHints: {
prev(buttonText) {
return `Vorherige${affix(buttonText)} ${buttonText}`
},
next(buttonText) {
return `Nächste${affix(buttonText)} ${buttonText}`
},
today(buttonText) {
// → Heute, Diese Woche, Dieser Monat, Dieses Jahr
if (buttonText === 'Tag') {
return 'Heute'
}
return `Diese${affix(buttonText)} ${buttonText}`
},
},
viewHint(buttonText) {
// → Tagesansicht, Wochenansicht, Monatsansicht, Jahresansicht
const glue = buttonText === 'Woche' ? 'n' : buttonText === 'Monat' ? 's' : 'es';
return buttonText + glue + 'ansicht'
},
navLinkHint: 'Gehe zu $0',
moreLinkHint(eventCnt) {
return 'Zeige ' + (eventCnt === 1 ?
'ein weiteres Ereignis' :
eventCnt + ' weitere Ereignisse')
},
closeHint: 'Schließen',
timeHint: 'Uhrzeit',
eventHint: 'Ereignis',
};
var l18 = {
var l17 = {
code: 'el',
week: {
dow: 1, // Monday is the first day of the week.
@@ -493,61 +400,31 @@
noEventsText: 'Δεν υπάρχουν γεγονότα προς εμφάνιση',
};
var l19 = {
var l18 = {
code: 'en-au',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonHints: {
prev: 'Previous $0',
next: 'Next $0',
today: 'This $0',
},
viewHint: '$0 view',
navLinkHint: 'Go to $0',
moreLinkHint(eventCnt) {
return `Show ${eventCnt} more event${eventCnt === 1 ? '' : 's'}`
},
};
var l20 = {
var l19 = {
code: 'en-gb',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonHints: {
prev: 'Previous $0',
next: 'Next $0',
today: 'This $0',
},
viewHint: '$0 view',
navLinkHint: 'Go to $0',
moreLinkHint(eventCnt) {
return `Show ${eventCnt} more event${eventCnt === 1 ? '' : 's'}`
},
};
var l21 = {
var l20 = {
code: 'en-nz',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonHints: {
prev: 'Previous $0',
next: 'Next $0',
today: 'This $0',
},
viewHint: '$0 view',
navLinkHint: 'Go to $0',
moreLinkHint(eventCnt) {
return `Show ${eventCnt} more event${eventCnt === 1 ? '' : 's'}`
},
};
var l22 = {
var l21 = {
code: 'eo',
week: {
dow: 1, // Monday is the first day of the week.
@@ -568,7 +445,7 @@
noEventsText: 'Neniuj eventoj por montri',
};
var l23 = {
var l22 = {
code: 'es',
week: {
dow: 0, // Sunday is the first day of the week.
@@ -589,7 +466,7 @@
noEventsText: 'No hay eventos para mostrar',
};
var l24 = {
var l23 = {
code: 'es',
week: {
dow: 1, // Monday is the first day of the week.
@@ -604,32 +481,13 @@
day: 'Día',
list: 'Agenda',
},
buttonHints: {
prev: '$0 antes',
next: '$0 siguiente',
today(buttonText) {
return (buttonText === 'Día') ? 'Hoy' :
((buttonText === 'Semana') ? 'Esta' : 'Este') + ' ' + buttonText.toLocaleLowerCase()
},
},
viewHint(buttonText) {
return 'Vista ' + (buttonText === 'Semana' ? 'de la' : 'del') + ' ' + buttonText.toLocaleLowerCase()
},
weekText: 'Sm',
weekTextLong: 'Semana',
allDayText: 'Todo el día',
moreLinkText: 'más',
moreLinkHint(eventCnt) {
return `Mostrar ${eventCnt} eventos más`
},
noEventsText: 'No hay eventos para mostrar',
navLinkHint: 'Ir al $0',
closeHint: 'Cerrar',
timeHint: 'La hora',
eventHint: 'Evento',
};
var l25 = {
var l24 = {
code: 'et',
week: {
dow: 1, // Monday is the first day of the week.
@@ -652,7 +510,7 @@
noEventsText: 'Kuvamiseks puuduvad sündmused',
};
var l26 = {
var l25 = {
code: 'eu',
week: {
dow: 1, // Monday is the first day of the week.
@@ -673,7 +531,7 @@
noEventsText: 'Ez dago ekitaldirik erakusteko',
};
var l27 = {
var l26 = {
code: 'fa',
week: {
dow: 6, // Saturday is the first day of the week.
@@ -697,7 +555,7 @@
noEventsText: 'هیچ رویدادی به نمایش',
};
var l28 = {
var l27 = {
code: 'fi',
week: {
dow: 1, // Monday is the first day of the week.
@@ -718,7 +576,7 @@
noEventsText: 'Ei näytettäviä tapahtumia',
};
var l29 = {
var l28 = {
code: 'fr',
buttonText: {
prev: 'Précédent',
@@ -736,7 +594,7 @@
noEventsText: 'Aucun événement à afficher',
};
var l30 = {
var l29 = {
code: 'fr-ch',
week: {
dow: 1, // Monday is the first day of the week.
@@ -758,7 +616,7 @@
noEventsText: 'Aucun événement à afficher',
};
var l31 = {
var l30 = {
code: 'fr',
week: {
dow: 1, // Monday is the first day of the week.
@@ -780,7 +638,7 @@
noEventsText: 'Aucun événement à afficher',
};
var l32 = {
var l31 = {
code: 'gl',
week: {
dow: 1, // Monday is the first day of the week.
@@ -801,7 +659,7 @@
noEventsText: 'Non hai eventos para amosar',
};
var l33 = {
var l32 = {
code: 'he',
direction: 'rtl',
buttonText: {
@@ -819,7 +677,7 @@
weekText: 'שבוע',
};
var l34 = {
var l33 = {
code: 'hi',
week: {
dow: 0, // Sunday is the first day of the week.
@@ -842,7 +700,7 @@
noEventsText: 'कोई घटनाओं को प्रदर्शित करने के लिए',
};
var l35 = {
var l34 = {
code: 'hr',
week: {
dow: 1, // Monday is the first day of the week.
@@ -865,7 +723,7 @@
noEventsText: 'Nema događaja za prikaz',
};
var l36 = {
var l35 = {
code: 'hu',
week: {
dow: 1, // Monday is the first day of the week.
@@ -878,7 +736,7 @@
month: 'Hónap',
week: 'Hét',
day: 'Nap',
list: 'Lista',
list: 'Napló',
},
weekText: 'Hét',
allDayText: 'Egész nap',
@@ -886,7 +744,7 @@
noEventsText: 'Nincs megjeleníthető esemény',
};
var l37 = {
var l36 = {
code: 'hy-am',
week: {
dow: 1, // Monday is the first day of the week.
@@ -909,7 +767,7 @@
noEventsText: 'Բացակայում է իրադարձությունը ցուցադրելու',
};
var l38 = {
var l37 = {
code: 'id',
week: {
dow: 1, // Monday is the first day of the week.
@@ -930,7 +788,7 @@
noEventsText: 'Tidak ada acara untuk ditampilkan',
};
var l39 = {
var l38 = {
code: 'is',
week: {
dow: 1, // Monday is the first day of the week.
@@ -951,7 +809,7 @@
noEventsText: 'Engir viðburðir til að sýna',
};
var l40 = {
var l39 = {
code: 'it',
week: {
dow: 1, // Monday is the first day of the week.
@@ -974,7 +832,7 @@
noEventsText: 'Non ci sono eventi da visualizzare',
};
var l41 = {
var l40 = {
code: 'ja',
buttonText: {
prev: '前',
@@ -993,7 +851,7 @@
noEventsText: '表示する予定はありません',
};
var l42 = {
var l41 = {
code: 'ka',
week: {
dow: 1,
@@ -1016,7 +874,7 @@
noEventsText: 'ღონისძიებები არ არის',
};
var l43 = {
var l42 = {
code: 'kk',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1039,29 +897,7 @@
noEventsText: 'Көрсету үшін оқиғалар жоқ',
};
var l44 = {
code: 'km',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonText: {
prev: 'មុន',
next: 'បន្ទាប់',
today: 'ថ្ងៃនេះ',
year: 'ឆ្នាំ',
month: 'ខែ',
week: 'សប្តាហ៍',
day: 'ថ្ងៃ',
list: 'បញ្ជី',
},
weekText: 'សប្តាហ៍',
allDayText: 'ពេញមួយថ្ងៃ',
moreLinkText: 'ច្រើនទៀត',
noEventsText: 'គ្មានព្រឹត្តិការណ៍ត្រូវបង្ហាញ',
};
var l45 = {
var l43 = {
code: 'ko',
buttonText: {
prev: '이전달',
@@ -1078,29 +914,7 @@
noEventsText: '일정이 없습니다',
};
var l46 = {
code: 'ku',
week: {
dow: 6, // Saturday is the first day of the week.
doy: 12, // The week that contains Jan 1st is the first week of the year.
},
direction: 'rtl',
buttonText: {
prev: 'پێشتر',
next: 'دواتر',
today: 'ئەمڕو',
month: 'مانگ',
week: 'هەفتە',
day: 'ڕۆژ',
list: 'بەرنامە',
},
weekText: 'هەفتە',
allDayText: 'هەموو ڕۆژەکە',
moreLinkText: 'زیاتر',
noEventsText: 'هیچ ڕووداوێك نیە',
};
var l47 = {
var l44 = {
code: 'lb',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1121,7 +935,7 @@
noEventsText: 'Nee Evenementer ze affichéieren',
};
var l48 = {
var l45 = {
code: 'lt',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1142,7 +956,7 @@
noEventsText: 'Nėra įvykių rodyti',
};
var l49 = {
var l46 = {
code: 'lv',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1165,7 +979,7 @@
noEventsText: 'Nav notikumu',
};
var l50 = {
var l47 = {
code: 'mk',
buttonText: {
prev: 'претходно',
@@ -1184,7 +998,7 @@
noEventsText: 'Нема настани за прикажување',
};
var l51 = {
var l48 = {
code: 'ms',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1207,7 +1021,7 @@
noEventsText: 'Tiada peristiwa untuk dipaparkan',
};
var l52 = {
var l49 = {
code: 'nb',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1223,23 +1037,12 @@
list: 'Agenda',
},
weekText: 'Uke',
weekTextLong: 'Uke',
allDayText: 'Hele dagen',
moreLinkText: 'til',
noEventsText: 'Ingen hendelser å vise',
buttonHints: {
prev: 'Forrige $0',
next: 'Neste $0',
today: 'Nåværende $0',
},
viewHint: '$0 visning',
navLinkHint: 'Gå til $0',
moreLinkHint(eventCnt) {
return `Vis ${eventCnt} flere hendelse${eventCnt === 1 ? '' : 'r'}`
},
};
var l53 = {
var l50 = {
code: 'ne', // code for nepal
week: {
dow: 7, // Sunday is the first day of the week.
@@ -1260,7 +1063,7 @@
noEventsText: 'देखाउनको लागि कुनै घटनाहरू छैनन्',
};
var l54 = {
var l51 = {
code: 'nl',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1281,7 +1084,7 @@
noEventsText: 'Geen evenementen om te laten zien',
};
var l55 = {
var l52 = {
code: 'nn',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1302,7 +1105,7 @@
noEventsText: 'Ingen hendelser å vise',
};
var l56 = {
var l53 = {
code: 'pl',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1323,7 +1126,7 @@
noEventsText: 'Brak wydarzeń do wyświetlenia',
};
var l57 = {
var l54 = {
code: 'pt-br',
buttonText: {
prev: 'Anterior',
@@ -1342,7 +1145,7 @@
noEventsText: 'Não há eventos para mostrar',
};
var l58 = {
var l55 = {
code: 'pt',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1363,7 +1166,7 @@
noEventsText: 'Não há eventos para mostrar',
};
var l59 = {
var l56 = {
code: 'ro',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1386,7 +1189,7 @@
noEventsText: 'Nu există evenimente de afișat',
};
var l60 = {
var l57 = {
code: 'ru',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1409,28 +1212,7 @@
noEventsText: 'Нет событий для отображения',
};
var l61 = {
code: 'si-lk',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonText: {
prev: 'පෙර',
next: 'පසු',
today: 'අද',
month: 'මාසය',
week: 'සතිය',
day: 'දවස',
list: 'ලැයිස්තුව',
},
weekText: 'සති',
allDayText: 'සියලු',
moreLinkText: 'තවත්',
noEventsText: 'මුකුත් නැත',
};
var l62 = {
var l58 = {
code: 'sk',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1453,7 +1235,7 @@
noEventsText: 'Žiadne akcie na zobrazenie',
};
var l63 = {
var l59 = {
code: 'sl',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1474,24 +1256,7 @@
noEventsText: 'Ni dogodkov za prikaz',
};
var l64 = {
code: 'sm',
buttonText: {
prev: 'Talu ai',
next: 'Mulimuli atu',
today: 'Aso nei',
month: 'Masina',
week: 'Vaiaso',
day: 'Aso',
list: 'Faasologa',
},
weekText: 'Vaiaso',
allDayText: 'Aso atoa',
moreLinkText: 'sili atu',
noEventsText: 'Leai ni mea na tutupu',
};
var l65 = {
var l60 = {
code: 'sq',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1514,7 +1279,7 @@
noEventsText: 'Nuk ka evente për të shfaqur',
};
var l66 = {
var l61 = {
code: 'sr-cyrl',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1537,7 +1302,7 @@
noEventsText: 'Нема догађаја за приказ',
};
var l67 = {
var l62 = {
code: 'sr',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1560,7 +1325,7 @@
noEventsText: 'Nеma događaja za prikaz',
};
var l68 = {
var l63 = {
code: 'sv',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1575,56 +1340,13 @@
day: 'Dag',
list: 'Program',
},
buttonHints: {
prev(buttonText) {
return `Föregående ${buttonText.toLocaleLowerCase()}`
},
next(buttonText) {
return `Nästa ${buttonText.toLocaleLowerCase()}`
},
today(buttonText) {
return (buttonText === 'Program' ? 'Detta' : 'Denna') + ' ' + buttonText.toLocaleLowerCase()
},
},
viewHint: '$0 vy',
navLinkHint: 'Gå till $0',
moreLinkHint(eventCnt) {
return `Visa ytterligare ${eventCnt} händelse${eventCnt === 1 ? '' : 'r'}`
},
weekText: 'v.',
weekTextLong: 'Vecka',
allDayText: 'Heldag',
moreLinkText: 'till',
noEventsText: 'Inga händelser att visa',
closeHint: 'Stäng',
timeHint: 'Klockan',
eventHint: 'Händelse',
};
var l69 = {
code: 'ta-in',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonText: {
prev: 'முந்தைய',
next: 'அடுத்தது',
today: 'இன்று',
month: 'மாதம்',
week: 'வாரம்',
day: 'நாள்',
list: 'தினசரி அட்டவணை',
},
weekText: 'வாரம்',
allDayText: 'நாள் முழுவதும்',
moreLinkText: function(n) {
return '+ மேலும் ' + n
},
noEventsText: 'காண்பிக்க நிகழ்வுகள் இல்லை',
};
var l70 = {
var l64 = {
code: 'th',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1648,7 +1370,7 @@
noEventsText: 'ไม่มีกิจกรรมที่จะแสดง',
};
var l71 = {
var l65 = {
code: 'tr',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1669,7 +1391,7 @@
noEventsText: 'Gösterilecek etkinlik yok',
};
var l72 = {
var l66 = {
code: 'ug',
buttonText: {
month: 'ئاي',
@@ -1680,7 +1402,7 @@
allDayText: 'پۈتۈن كۈن',
};
var l73 = {
var l67 = {
code: 'uk',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1703,7 +1425,7 @@
noEventsText: 'Немає подій для відображення',
};
var l74 = {
var l68 = {
code: 'uz',
buttonText: {
month: 'Oy',
@@ -1718,7 +1440,7 @@
noEventsText: "Ko'rsatish uchun voqealar yo'q",
};
var l75 = {
var l69 = {
code: 'vi',
week: {
dow: 1, // Monday is the first day of the week.
@@ -1741,7 +1463,7 @@
noEventsText: 'Không có sự kiện để hiển thị',
};
var l76 = {
var l70 = {
code: 'zh-cn',
week: {
// GB/T 7408-1994《数据元和交换格式·信息交换·日期和时间表示法》与ISO 8601:1988等效
@@ -1765,7 +1487,7 @@
noEventsText: '没有事件显示',
};
var l77 = {
var l71 = {
code: 'zh-tw',
buttonText: {
prev: '上月',
@@ -1785,7 +1507,7 @@
/* eslint max-len: off */
var localesAll = [
l0, l1, l2, l3, l4, l5, l6, l7, l8, l9, l10, l11, l12, l13, l14, l15, l16, l17, l18, l19, l20, l21, l22, l23, l24, l25, l26, l27, l28, l29, l30, l31, l32, l33, l34, l35, l36, l37, l38, l39, l40, l41, l42, l43, l44, l45, l46, l47, l48, l49, l50, l51, l52, l53, l54, l55, l56, l57, l58, l59, l60, l61, l62, l63, l64, l65, l66, l67, l68, l69, l70, l71, l72, l73, l74, l75, l76, l77,
l0, l1, l2, l3, l4, l5, l6, l7, l8, l9, l10, l11, l12, l13, l14, l15, l16, l17, l18, l19, l20, l21, l22, l23, l24, l25, l26, l27, l28, l29, l30, l31, l32, l33, l34, l35, l36, l37, l38, l39, l40, l41, l42, l43, l44, l45, l46, l47, l48, l49, l50, l51, l52, l53, l54, l55, l56, l57, l58, l59, l60, l61, l62, l63, l64, l65, l66, l67, l68, l69, l70, l71,
];
return localesAll;

File diff suppressed because one or more lines are too long

View File

@@ -1,29 +0,0 @@
FullCalendar.globalLocales.push(function () {
'use strict';
var bn = {
code: 'bn',
week: {
dow: 0, // Sunday is the first day of the week.
doy: 6, // The week that contains Jan 1st is the first week of the year.
},
buttonText: {
prev: 'পেছনে',
next: 'সামনে',
today: 'আজ',
month: 'মাস',
week: 'সপ্তাহ',
day: 'দিন',
list: 'তালিকা',
},
weekText: 'সপ্তাহ',
allDayText: 'সারাদিন',
moreLinkText: function(n) {
return '+অন্যান্য ' + n
},
noEventsText: 'কোনো ইভেন্ট নেই',
};
return bn;
}());

View File

@@ -1,11 +1,6 @@
FullCalendar.globalLocales.push(function () {
'use strict';
function affix(buttonText) {
return (buttonText === 'Tag' || buttonText === 'Monat') ? 'r' :
buttonText === 'Jahr' ? 's' : ''
}
var deAt = {
code: 'de-at',
week: {
@@ -23,41 +18,11 @@ FullCalendar.globalLocales.push(function () {
list: 'Terminübersicht',
},
weekText: 'KW',
weekTextLong: 'Woche',
allDayText: 'Ganztägig',
moreLinkText: function(n) {
return '+ weitere ' + n
},
noEventsText: 'Keine Ereignisse anzuzeigen',
buttonHints: {
prev(buttonText) {
return `Vorherige${affix(buttonText)} ${buttonText}`
},
next(buttonText) {
return `Nächste${affix(buttonText)} ${buttonText}`
},
today(buttonText) {
// → Heute, Diese Woche, Dieser Monat, Dieses Jahr
if (buttonText === 'Tag') {
return 'Heute'
}
return `Diese${affix(buttonText)} ${buttonText}`
},
},
viewHint(buttonText) {
// → Tagesansicht, Wochenansicht, Monatsansicht, Jahresansicht
const glue = buttonText === 'Woche' ? 'n' : buttonText === 'Monat' ? 's' : 'es';
return buttonText + glue + 'ansicht'
},
navLinkHint: 'Gehe zu $0',
moreLinkHint(eventCnt) {
return 'Zeige ' + (eventCnt === 1 ?
'ein weiteres Ereignis' :
eventCnt + ' weitere Ereignisse')
},
closeHint: 'Schließen',
timeHint: 'Uhrzeit',
eventHint: 'Ereignis',
};
return deAt;

View File

@@ -1,11 +1,6 @@
FullCalendar.globalLocales.push(function () {
'use strict';
function affix(buttonText) {
return (buttonText === 'Tag' || buttonText === 'Monat') ? 'r' :
buttonText === 'Jahr' ? 's' : ''
}
var de = {
code: 'de',
week: {
@@ -23,41 +18,11 @@ FullCalendar.globalLocales.push(function () {
list: 'Terminübersicht',
},
weekText: 'KW',
weekTextLong: 'Woche',
allDayText: 'Ganztägig',
moreLinkText: function(n) {
return '+ weitere ' + n
},
noEventsText: 'Keine Ereignisse anzuzeigen',
buttonHints: {
prev(buttonText) {
return `Vorherige${affix(buttonText)} ${buttonText}`
},
next(buttonText) {
return `Nächste${affix(buttonText)} ${buttonText}`
},
today(buttonText) {
// → Heute, Diese Woche, Dieser Monat, Dieses Jahr
if (buttonText === 'Tag') {
return 'Heute'
}
return `Diese${affix(buttonText)} ${buttonText}`
},
},
viewHint(buttonText) {
// → Tagesansicht, Wochenansicht, Monatsansicht, Jahresansicht
const glue = buttonText === 'Woche' ? 'n' : buttonText === 'Monat' ? 's' : 'es';
return buttonText + glue + 'ansicht'
},
navLinkHint: 'Gehe zu $0',
moreLinkHint(eventCnt) {
return 'Zeige ' + (eventCnt === 1 ?
'ein weiteres Ereignis' :
eventCnt + ' weitere Ereignisse')
},
closeHint: 'Schließen',
timeHint: 'Uhrzeit',
eventHint: 'Ereignis',
};
return de;

View File

@@ -7,16 +7,6 @@ FullCalendar.globalLocales.push(function () {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonHints: {
prev: 'Previous $0',
next: 'Next $0',
today: 'This $0',
},
viewHint: '$0 view',
navLinkHint: 'Go to $0',
moreLinkHint(eventCnt) {
return `Show ${eventCnt} more event${eventCnt === 1 ? '' : 's'}`
},
};
return enAu;

View File

@@ -7,16 +7,6 @@ FullCalendar.globalLocales.push(function () {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonHints: {
prev: 'Previous $0',
next: 'Next $0',
today: 'This $0',
},
viewHint: '$0 view',
navLinkHint: 'Go to $0',
moreLinkHint(eventCnt) {
return `Show ${eventCnt} more event${eventCnt === 1 ? '' : 's'}`
},
};
return enGb;

View File

@@ -7,16 +7,6 @@ FullCalendar.globalLocales.push(function () {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonHints: {
prev: 'Previous $0',
next: 'Next $0',
today: 'This $0',
},
viewHint: '$0 view',
navLinkHint: 'Go to $0',
moreLinkHint(eventCnt) {
return `Show ${eventCnt} more event${eventCnt === 1 ? '' : 's'}`
},
};
return enNz;

View File

@@ -16,29 +16,10 @@ FullCalendar.globalLocales.push(function () {
day: 'Día',
list: 'Agenda',
},
buttonHints: {
prev: '$0 antes',
next: '$0 siguiente',
today(buttonText) {
return (buttonText === 'Día') ? 'Hoy' :
((buttonText === 'Semana') ? 'Esta' : 'Este') + ' ' + buttonText.toLocaleLowerCase()
},
},
viewHint(buttonText) {
return 'Vista ' + (buttonText === 'Semana' ? 'de la' : 'del') + ' ' + buttonText.toLocaleLowerCase()
},
weekText: 'Sm',
weekTextLong: 'Semana',
allDayText: 'Todo el día',
moreLinkText: 'más',
moreLinkHint(eventCnt) {
return `Mostrar ${eventCnt} eventos más`
},
noEventsText: 'No hay eventos para mostrar',
navLinkHint: 'Ir al $0',
closeHint: 'Cerrar',
timeHint: 'La hora',
eventHint: 'Evento',
};
return es;

View File

@@ -14,7 +14,7 @@ FullCalendar.globalLocales.push(function () {
month: 'Hónap',
week: 'Hét',
day: 'Nap',
list: 'Lista',
list: 'Napló',
},
weekText: 'Hét',
allDayText: 'Egész nap',

View File

@@ -1,28 +0,0 @@
FullCalendar.globalLocales.push(function () {
'use strict';
var km = {
code: 'km',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonText: {
prev: 'មុន',
next: 'បន្ទាប់',
today: 'ថ្ងៃនេះ',
year: 'ឆ្នាំ',
month: 'ខែ',
week: 'សប្តាហ៍',
day: 'ថ្ងៃ',
list: 'បញ្ជី',
},
weekText: 'សប្តាហ៍',
allDayText: 'ពេញមួយថ្ងៃ',
moreLinkText: 'ច្រើនទៀត',
noEventsText: 'គ្មានព្រឹត្តិការណ៍ត្រូវបង្ហាញ',
};
return km;
}());

View File

@@ -1,28 +0,0 @@
FullCalendar.globalLocales.push(function () {
'use strict';
var ku = {
code: 'ku',
week: {
dow: 6, // Saturday is the first day of the week.
doy: 12, // The week that contains Jan 1st is the first week of the year.
},
direction: 'rtl',
buttonText: {
prev: 'پێشتر',
next: 'دواتر',
today: 'ئەمڕو',
month: 'مانگ',
week: 'هەفتە',
day: 'ڕۆژ',
list: 'بەرنامە',
},
weekText: 'هەفتە',
allDayText: 'هەموو ڕۆژەکە',
moreLinkText: 'زیاتر',
noEventsText: 'هیچ ڕووداوێك نیە',
};
return ku;
}());

View File

@@ -17,20 +17,9 @@ FullCalendar.globalLocales.push(function () {
list: 'Agenda',
},
weekText: 'Uke',
weekTextLong: 'Uke',
allDayText: 'Hele dagen',
moreLinkText: 'til',
noEventsText: 'Ingen hendelser å vise',
buttonHints: {
prev: 'Forrige $0',
next: 'Neste $0',
today: 'Nåværende $0',
},
viewHint: '$0 visning',
navLinkHint: 'Gå til $0',
moreLinkHint(eventCnt) {
return `Vis ${eventCnt} flere hendelse${eventCnt === 1 ? '' : 'r'}`
},
};
return nb;

View File

@@ -1,27 +0,0 @@
FullCalendar.globalLocales.push(function () {
'use strict';
var siLk = {
code: 'si-lk',
week: {
dow: 1, // Monday is the first day of the week.
doy: 4, // The week that contains Jan 4th is the first week of the year.
},
buttonText: {
prev: 'පෙර',
next: 'පසු',
today: 'අද',
month: 'මාසය',
week: 'සතිය',
day: 'දවස',
list: 'ලැයිස්තුව',
},
weekText: 'සති',
allDayText: 'සියලු',
moreLinkText: 'තවත්',
noEventsText: 'මුකුත් නැත',
};
return siLk;
}());

Some files were not shown because too many files have changed in this diff Show More