Compare commits

..

6 Commits

Author SHA1 Message Date
Oliver
b18f360daf 0.4.2 2021-08-02 08:43:04 +10:00
Oliver
20cc952982 Merge pull request #1887 from matmair/settings-safety
settings fixes

(cherry picked from commit d154ca08ea)
2021-08-02 08:42:34 +10:00
Oliver
cd39fd1dc2 Merge pull request #1890 from matmair/fix-for-1888
catch connection errors in exchange update

(cherry picked from commit db57e9516b)
2021-08-02 08:42:26 +10:00
Oliver
0e59c15773 0.4.1 2021-07-30 11:26:53 +10:00
Oliver
0a73032950 Merge pull request #1877 from eeintech/fix_search_js
Fixed missing comma propagating to translated JS files

(cherry picked from commit 2009773d9d)
2021-07-29 08:27:49 +10:00
Oliver
a7229b5b0b Merge pull request #1874 from SchrodingersGat/docker-dev-fix
Copy static files when starting dev server

(cherry picked from commit 50eb70f538)
2021-07-28 22:50:31 +10:00
4860 changed files with 258144 additions and 1232497 deletions

9
.coveragerc Normal file
View File

@@ -0,0 +1,9 @@
[run]
source = ./InvenTree
omit =
InvenTree/manage.py
InvenTree/setup.py
InvenTree/InvenTree/middleware.py
InvenTree/InvenTree/utils.py
InvenTree/InvenTree/wsgi.py
InvenTree/users/apps.py

View File

@@ -1,70 +0,0 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.241.1/containers/python-3
{
"name": "InvenTree devcontainer",
"dockerComposeFile": "docker-compose.yml",
"service": "inventree",
"overrideCommand": true,
"workspaceFolder": "/home/inventree/",
// Configure tool-specific properties.
"customizations": {
// Configure properties specific to VS Code.
"vscode": {
// Set *default* container specific settings.json values on container create.
"settings": {
"python.defaultInterpreterPath": "${containerWorkspaceFolder}/dev/venv/bin/python",
"python.linting.enabled": true,
"python.linting.pylintEnabled": false,
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"batisteo.vscode-django",
"eamodio.gitlens"
]
}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [5173, 8000, 8080],
"portsAttributes": {
"5173": {
"label": "Vite Server"
},
"8000": {
"label": "InvenTree Server"
},
"8080": {
"label": "mkdocs server"
}
},
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": ".devcontainer/postCreateCommand.sh",
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode",
"containerUser": "vscode",
"remoteEnv": {
// Python config
"PIP_USER": "no",
// used to load the venv into the PATH and activate it
// Ref: https://stackoverflow.com/a/56286534
"VIRTUAL_ENV": "${containerWorkspaceFolder}/dev/venv",
"PATH": "${containerWorkspaceFolder}/dev/venv/bin:${containerEnv:PATH}"
}
}

View File

@@ -1,49 +0,0 @@
services:
db:
image: postgres:13
restart: unless-stopped
expose:
- 5432/tcp
volumes:
- inventreedatabase:/var/lib/postgresql/data:z
environment:
POSTGRES_DB: inventree
POSTGRES_USER: inventree_user
POSTGRES_PASSWORD: inventree_password
redis:
image: redis:7.0
restart: always
expose:
- 6379
inventree:
build:
context: ..
dockerfile: ../InvenTree/contrib/container/Dockerfile
target: dev
args:
base_image: "mcr.microsoft.com/vscode/devcontainers/base:alpine-3.18"
data_dir: "dev"
volumes:
- ../:/home/inventree:z
environment:
INVENTREE_DEBUG: True
INVENTREE_DB_ENGINE: postgresql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_HOST: db
INVENTREE_DB_USER: inventree_user
INVENTREE_DB_PASSWORD: inventree_password
INVENTREE_CACHE_HOST: redis
INVENTREE_CACHE_PORT: 6379
INVENTREE_PLUGINS_ENABLED: True
INVENTREE_SITE_URL: http://localhost:8000
INVENTREE_CORS_ORIGIN_ALLOW_ALL: True
INVENTREE_PY_ENV: /home/inventree/dev/venv
depends_on:
- db
volumes:
inventreedatabase:

View File

@@ -1,22 +0,0 @@
#!/bin/bash
# Avoiding Dubious Ownership in Dev Containers for setup commands that use git
git config --global --add safe.directory /home/inventree
# create venv
python3 -m venv /home/inventree/dev/venv --system-site-packages --upgrade-deps
. /home/inventree/dev/venv/bin/activate
# Run initial InvenTree server setup
invoke update -s
# Configure dev environment
invoke setup-dev
# Install required frontend packages
invoke frontend-install
# remove existing gitconfig created by "Avoiding Dubious Ownership" step
# so that it gets copied from host to the container to have your global
# git config in container
rm -f /home/vscode/.gitconfig

View File

@@ -1,71 +0,0 @@
# Python Django
# Test a Django project on multiple versions of Python.
# Add steps that analyze code, save build artifacts, deploy, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
trigger:
- master
pool:
vmImage: ubuntu-latest
strategy:
matrix:
Python39:
PYTHON_VERSION: '3.9'
maxParallel: 3
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(PYTHON_VERSION)'
architecture: 'x64'
- task: PythonScript@0
displayName: 'Export project path'
inputs:
scriptSource: 'inline'
script: |
"""Search all subdirectories for `manage.py`."""
from glob import iglob
from os import path
# Python >= 3.5
manage_py = next(iglob(path.join('**', 'manage.py'), recursive=True), None)
if not manage_py:
raise SystemExit('Could not find a Django project')
project_location = path.dirname(path.abspath(manage_py))
print('Found Django project in', project_location)
print('##vso[task.setvariable variable=projectRoot]{}'.format(project_location))
- script: |
python -m pip install --upgrade pip setuptools wheel
pip install --require-hashes -r requirements.txt
pip install --require-hashes -r requirements-dev.txt
pip install unittest-xml-reporting coverage invoke
sudo apt-get install poppler-utils
sudo apt-get install libpoppler-dev
displayName: 'Install prerequisites'
- script: |
pushd '$(projectRoot)'
invoke update
coverage run manage.py test --testrunner xmlrunner.extra.djangotestrunner.XMLTestRunner --no-input
coverage xml -i
displayName: 'Run tests'
env:
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_DB_NAME: inventree
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
INVENTREE_BACKUP_DIR: ./backup
INVENTREE_PLUGINS_ENABLED: true
- task: PublishTestResults@2
inputs:
testResultsFiles: "**/TEST-*.xml"
testRunTitle: 'Python $(PYTHON_VERSION)'
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml'

View File

@@ -1,3 +0,0 @@
# .git-blame-ignore-revs
# Code Structure refactor https://github.com/inventree/InvenTree/pull/5582
0bace3f3afaa213c63b5dcc70103f0d232637a9a

2
.gitattributes vendored
View File

@@ -8,4 +8,4 @@
*.yaml text
*.conf text
*.sh text eol=lf
*.js text
*.js text

13
.github/CODEOWNERS vendored
View File

@@ -1,13 +0,0 @@
# General owner is the maintainers team
* @SchrodingersGat
# plugins are co-owned
/src/backend/InvenTree/plugin/ @SchrodingersGat @matmair
/src/backend/InvenTree/plugins/ @SchrodingersGat @matmair
# Installer functions
.pkgr.yml @matmair
Procfile @matmair
runtime.txt @matmair
/contrib/installer @matmair
/contrib/packager.io @matmair

5
.github/FUNDING.yml vendored
View File

@@ -1,5 +0,0 @@
github: inventree
ko_fi: inventree
patreon: inventree
polar: inventree
custom: [paypal.me/inventree]

View File

@@ -1,71 +0,0 @@
name: "Bug"
description: "Create a bug report to help us improve InvenTree!"
labels: ["bug", "question", "triage:not-checked"]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "Please verify that this bug has NOT been raised before."
description: "Search in the issues sections by clicking [HERE](https://github.com/inventree/inventree/issues?q=) and read the [Frequently Asked Questions](https://docs.inventree.org/en/latest/faq/)!"
options:
- label: "I checked and didn't find a similar issue"
required: true
- type: textarea
id: description
validations:
required: true
attributes:
label: "Describe the bug*"
description: "A clear and concise description of what the bug is."
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "Steps to Reproduce"
description: "Steps to reproduce the behaviour, please make it detailed"
placeholder: |
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See the error
- type: textarea
id: expected-behavior
validations:
required: true
attributes:
label: "Expected behaviour"
description: "A clear and concise description of what you expected to happen."
placeholder: "..."
- type: checkboxes
id: deployment
attributes:
label: "Deployment Method"
options:
- label: "Docker"
- label: "Package"
- label: "Bare metal"
- label: "Other - added info in Steps to Reproduce"
- type: textarea
id: version-info
validations:
required: true
attributes:
label: "Version Information"
description: "The version info block."
placeholder: "You can get this by going to the `About InvenTree` section in the upper right corner and clicking on the `copy version information` button"
- type: checkboxes
id: can-reproduce
attributes:
label: "Please verify if you can reproduce this bug on the demo site."
description: "You can sign in at [InvenTree Demo](https://demo.inventree.org) with admin:inventree. Note that this instance runs on the latest dev version, so your bug may be fixed there."
options:
- label: "I can reproduce this bug on the demo site."
- type: textarea
id: logs
attributes:
label: "Relevant log output"
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: shell
validations:
required: false

View File

@@ -1 +0,0 @@
blank_issues_enabled: false

View File

@@ -1,15 +0,0 @@
name: "Documentation"
description: "Create an issue to improve the documentation"
labels: ["documentation", "triage:not-checked"]
body:
- type: markdown
attributes:
value: |
Create a new issue regarding the InvenTree documentation
- type: textarea
id: repro
attributes:
label: Body of the issue
description: Please provide one distinct thing to fix or a clearly defined enhancement
validations:
required: true

View File

@@ -1,53 +0,0 @@
name: Feature Request
description: Suggest an idea for this project
title: "[FR] title"
labels: ["enhancement", "triage:not-checked"]
body:
- type: checkboxes
id: no-duplicate-issues
attributes:
label: "Please verify that this feature request has NOT been suggested before."
description: "Search in the issues sections by clicking [HERE](https://github.com/inventree/inventree/issues?q=)"
options:
- label: "I checked and didn't find a similar feature request"
required: true
- type: textarea
id: problem
validations:
required: true
attributes:
label: "Problem statement"
description: "A clear and concise description of the problem or missing feature."
placeholder: "I am always struggling with ..."
- type: textarea
id: solution
validations:
required: true
attributes:
label: "Suggested solution"
description: "A clear and concise description of what you want to happen to solve the problem statement."
placeholder: "In my use-case, ..."
- type: textarea
id: alternatives
validations:
required: true
attributes:
label: "Describe alternatives you've considered"
description: "A clear and concise description of any alternative solutions or features you've considered."
placeholder: "This could also be done by doing ..."
- type: textarea
id: examples
validations:
required: false
attributes:
label: "Examples of other systems"
description: "Show how other software handles your FR if you have examples."
placeholder: "I software xxx this is done in the following way..."
- type: checkboxes
id: self-develop
attributes:
label: "Do you want to develop this?"
description: "This is not required, and you do not need to be a pro - this is just as information for us."
options:
- label: "I want to develop this."
required: false

View File

@@ -1,46 +0,0 @@
name: "Install problems"
description: "If you have problems deploying InvenTree"
labels: ["question", "triage:not-checked", "setup"]
body:
- type: checkboxes
id: deployment
validations:
required: true
attributes:
label: "Deployment Method"
options:
- label: "Installer"
- label: "Docker Development"
- label: "Docker Production"
- label: "Bare metal Development"
- label: "Bare metal Production"
- label: "Digital Ocean image"
- label: "Other (please provide a link `Steps to Reproduce`"
- type: textarea
id: description
validations:
required: true
attributes:
label: "Describe the problem*"
description: "A clear and concise description of what is failing."
- type: textarea
id: steps-to-reproduce
validations:
required: true
attributes:
label: "Steps to Reproduce"
description: "Steps to reproduce the behaviour, please make it detailed"
placeholder: |
0. Link to all docs you used
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See the error
- type: textarea
id: logs
attributes:
label: "Relevant log output"
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
render: bash
validations:
required: false

View File

@@ -1,17 +0,0 @@
name: 'Migration test'
description: 'Run migration test sequence'
author: 'InvenTree'
runs:
using: 'composite'
steps:
- name: Data Import Export
shell: bash
run: |
invoke migrate
invoke import-fixtures
invoke export-records -f data.json
python3 ./src/backend/InvenTree/manage.py flush --noinput
invoke migrate
invoke import-records -c -f data.json
invoke import-records -c -f data.json

View File

@@ -1,101 +0,0 @@
name: 'Setup Enviroment'
description: 'Setup the environment for general InvenTree tests'
author: 'InvenTree'
inputs:
python:
required: false
description: 'Install python.'
default: 'true'
npm:
required: false
description: 'Install npm.'
default: 'false'
install:
required: false
description: 'Install the InvenTree requirements?'
default: 'false'
dev-install:
required: false
description: 'Install the InvenTree development requirements?'
default: 'false'
update:
required: false
description: 'Should a full update cycle be run?'
default: 'false'
apt-dependency:
required: false
description: 'Extra APT package for install.'
pip-dependency:
required: false
description: 'Extra python package for install.'
runs:
using: 'composite'
steps:
- name: Checkout Code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
# Python installs
- name: Set up Python ${{ env.python_version }}
if: ${{ inputs.python == 'true' }}
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # pin@v5.0.0
with:
python-version: ${{ env.python_version }}
cache: pip
cache-dependency-path: |
src/backend/requirements.txt
src/backend/requirements-dev.txt
contrib/container/requirements.txt
contrib/dev_reqs/requirements.txt
- name: Install Base Python Dependencies
if: ${{ inputs.python == 'true' }}
shell: bash
run: |
python3 -m pip install -U pip
pip3 install -U invoke wheel
pip3 install 'uv<0.3.0'
- name: Allow uv to use the system Python by default
run: echo "UV_SYSTEM_PYTHON=1" >> $GITHUB_ENV
shell: bash
- name: Install Specific Python Dependencies
if: ${{ inputs.pip-dependency }}
shell: bash
run: uv pip install ${{ inputs.pip-dependency }}
# NPM installs
- name: Install node.js ${{ env.node_version }}
if: ${{ inputs.npm == 'true' }}
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 # pin to v3.8.2
with:
node-version: ${{ env.node_version }}
cache: 'npm'
cache-dependency-path: src/backend/package-lock.json
- name: Install npm packages
if: ${{ inputs.npm == 'true' }}
shell: bash
run: cd src/backend && npm install
# OS installs
- name: Install OS Dependencies
if: ${{ inputs.apt-dependency }}
shell: bash
run: |
sudo apt-get update
sudo apt-get install ${{ inputs.apt-dependency }}
sudo apt-get install ${{ inputs.apt-dependency }}
# Invoke commands
- name: Install dev requirements
if: ${{ inputs.dev-install == 'true' || inputs.install == 'true' }}
shell: bash
run: uv pip install --require-hashes -r src/backend/requirements-dev.txt
- name: Run invoke install
if: ${{ inputs.install == 'true' }}
shell: bash
run: invoke install --uv
- name: Run invoke update
if: ${{ inputs.update == 'true' }}
shell: bash
run: invoke update --uv --skip-backup --skip-static

View File

@@ -1,39 +0,0 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: weekly
groups:
dependencies:
patterns:
- "*" # Include all dependencies
- package-ecosystem: docker
directory: /contrib/container
schedule:
interval: weekly
- package-ecosystem: pip
directories:
- /contrib/container
- /docs
- /contrib/dev_reqs
- /src/backend
schedule:
interval: weekly
groups:
dependencies:
patterns:
- "*" # Include all dependencies
- package-ecosystem: npm
directories:
- /src/frontend
- /src/backend
schedule:
interval: weekly
groups:
dependencies:
patterns:
- "*" # Include all dependencies

39
.github/release.yml vendored
View File

@@ -1,39 +0,0 @@
# .github/release.yml
changelog:
exclude:
labels:
- translation
- documentation
categories:
- title: Breaking Changes
labels:
- Semver-Major
- breaking
- title: Security Patches
labels:
- security
- title: New Features
labels:
- Semver-Minor
- feature
- enhancement
- title: Experimental Features
labels:
- experimental
- title: Bug Fixes
labels:
- Semver-Patch
- bug
- title: Devops / Setup Changes
labels:
- docker
- setup
- demo
- CI
- title: Dependencies
labels:
- dependency
- title: Other Changes
labels:
- "*"

View File

@@ -1,103 +0,0 @@
"""Test that the "translated" javascript files to not contain template tags which need to be determined at "run time".
This is because the "translated" javascript files are compiled into the "static" directory.
They should only contain template tags that render static information.
"""
import os
import pathlib
import re
import sys
here = os.path.abspath(os.path.dirname(__file__))
template_dir = os.path.abspath(os.path.join(here, '..', 'InvenTree', 'templates'))
# We only care about the 'translated' files
js_i18n_dir = os.path.join(template_dir, 'js', 'translated')
js_dynamic_dir = os.path.join(template_dir, 'js', 'dynamic')
errors = 0
print('=================================')
print('Checking static javascript files:')
print('=================================')
def check_invalid_tag(data):
"""Check for invalid tags."""
pattern = r'{%(\w+)'
err_count = 0
for idx, line in enumerate(data):
results = re.findall(pattern, line)
for result in results:
err_count += 1
print(f' - Error on line {idx + 1}: %{{{result[0]}')
return err_count
def check_prohibited_tags(data):
"""Check for prohibited tags."""
allowed_tags = [
'if',
'elif',
'else',
'endif',
'for',
'endfor',
'trans',
'load',
'include',
'url',
]
pattern = r'{% (\w+)\s'
err_count = 0
for idx, line in enumerate(data):
for tag in re.findall(pattern, line):
if tag not in allowed_tags:
print(f" > Line {idx + 1} contains prohibited template tag '{tag}'")
err_count += 1
return err_count
for filename in pathlib.Path(js_i18n_dir).rglob('*.js'):
print(f"Checking file 'translated/{os.path.basename(filename)}':")
with open(filename, 'r') as js_file:
data = js_file.readlines()
errors += check_invalid_tag(data)
errors += check_prohibited_tags(data)
for filename in pathlib.Path(js_dynamic_dir).rglob('*.js'):
print(f"Checking file 'dynamic/{os.path.basename(filename)}':")
# Check that the 'dynamic' files do not contains any translated strings
with open(filename, 'r') as js_file:
data = js_file.readlines()
invalid_tags = ['blocktrans', 'blocktranslate', 'trans', 'translate']
err_count = 0
for idx, line in enumerate(data):
for tag in invalid_tags:
tag = '{% ' + tag
if tag in line:
err_count += 1
print(f" > Error on line {idx + 1}: Prohibited tag '{tag}' found")
if errors > 0:
print(f'Found {errors} incorrect template tags')
sys.exit(errors)

View File

@@ -1,28 +0,0 @@
"""Check that there are no database migration files which have not been committed."""
import subprocess
import sys
print('Checking for unstaged migration files...')
cmd = ['git', 'ls-files', '--exclude-standard', '--others']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
migrations = []
for line in str(out.decode()).split('\n'):
if '/migrations/' in line:
migrations.append(line)
if len(migrations) == 0:
sys.exit(0)
print('There are {n} unstaged migration files:'.format(n=len(migrations)))
for m in migrations:
print(' - {m}'.format(m=m))
sys.exit(len(migrations))

View File

@@ -1,206 +0,0 @@
"""Ensure that the release tag matches the InvenTree version number.
Behaviour:
master / main branch:
- version number must end with 'dev'
tagged branch:
- version number must match tag being built
- version number cannot already exist as a release tag
"""
import json
import os
import re
import sys
from pathlib import Path
import requests
REPO = os.getenv('GITHUB_REPOSITORY', 'inventree/inventree')
GITHUB_API_URL = os.getenv('GITHUB_API_URL', 'https://api.github.com')
def get_existing_release_tags():
"""Request information on existing releases via the GitHub API."""
# Check for github token
token = os.getenv('GITHUB_TOKEN', None)
headers = None
if token:
headers = {'Authorization': f'Bearer {token}'}
response = requests.get(f'{GITHUB_API_URL}/repos/{REPO}/releases', headers=headers)
if response.status_code != 200:
raise ValueError(
f'Unexpected status code from GitHub API: {response.status_code}'
)
data = json.loads(response.text)
# Return a list of all tags
tags = []
for release in data:
tag = release['tag_name'].strip()
match = re.match(r'^.*(\d+)\.(\d+)\.(\d+).*$', tag)
if len(match.groups()) != 3:
print(f"Version '{tag}' did not match expected pattern")
continue
tags.append([int(x) for x in match.groups()])
return tags
def check_version_number(version_string, allow_duplicate=False):
"""Check the provided version number.
Returns True if the provided version is the 'newest' InvenTree release
"""
print(f"Checking version '{version_string}'")
# Check that the version string matches the required format
match = re.match(r'^(\d+)\.(\d+)\.(\d+)(?: dev)?$', version_string)
if not match or len(match.groups()) != 3:
raise ValueError(
f"Version string '{version_string}' did not match required pattern"
)
version_tuple = [int(x) for x in match.groups()]
# Look through the existing releases
existing = get_existing_release_tags()
# Assume that this is the highest release, unless told otherwise
highest_release = True
for release in existing:
if release == version_tuple and not allow_duplicate:
raise ValueError(f"Duplicate release '{version_string}' exists!")
if release > version_tuple:
highest_release = False
print(f'Found newer release: {str(release)}')
return highest_release
if __name__ == '__main__':
# Ensure that we are running in GH Actions
if os.environ.get('GITHUB_ACTIONS', '') != 'true':
print('This script is intended to be run within a GitHub Action!')
sys.exit(1)
if 'only_version' in sys.argv:
here = Path(__file__).parent.absolute()
version_file = here.joinpath(
'..', '..', 'src', 'backend', 'InvenTree', 'InvenTree', 'api_version.py'
)
text = version_file.read_text()
results = re.findall(r"""INVENTREE_API_VERSION = (.*)""", text)
# If 2. args is true lower the version number by 1
if len(sys.argv) > 2 and sys.argv[2] == 'true':
results[0] = str(int(results[0]) - 1)
print(results[0])
exit(0)
# GITHUB_REF_TYPE may be either 'branch' or 'tag'
GITHUB_REF_TYPE = os.environ['GITHUB_REF_TYPE']
# GITHUB_REF may be either 'refs/heads/<branch>' or 'refs/heads/<tag>'
GITHUB_REF = os.environ['GITHUB_REF']
GITHUB_REF_NAME = os.environ['GITHUB_REF_NAME']
GITHUB_BASE_REF = os.environ['GITHUB_BASE_REF']
# Print out version information, makes debugging actions *much* easier!
print(f'GITHUB_REF: {GITHUB_REF}')
print(f'GITHUB_REF_NAME: {GITHUB_REF_NAME}')
print(f'GITHUB_REF_TYPE: {GITHUB_REF_TYPE}')
print(f'GITHUB_BASE_REF: {GITHUB_BASE_REF}')
here = Path(__file__).parent.absolute()
version_file = here.joinpath(
'..', '..', 'src', 'backend', 'InvenTree', 'InvenTree', 'version.py'
)
version = None
with open(version_file, 'r') as f:
text = f.read()
# Extract the InvenTree software version
results = re.findall(r"""INVENTREE_SW_VERSION = '(.*)'""", text)
if len(results) != 1:
print(f'Could not find INVENTREE_SW_VERSION in {version_file}')
sys.exit(1)
version = results[0]
print(f"InvenTree Version: '{version}'")
# Check version number and look for existing versions
# If a release is found which matches the current tag, throw an error
allow_duplicate = False
# Note: on a 'tag' (release) we *must* allow duplicate versions, as this *is* the version that has just been released
if GITHUB_REF_TYPE == 'tag':
allow_duplicate = True
# Note: on a push to 'stable' branch we also allow duplicates
if GITHUB_BASE_REF == 'stable':
allow_duplicate = True
highest_release = check_version_number(version, allow_duplicate=allow_duplicate)
# Determine which docker tag we are going to use
docker_tags = None
if GITHUB_REF_TYPE == 'tag':
# GITHUB_REF should be of the form /refs/heads/<tag>
version_tag = GITHUB_REF.split('/')[-1]
print(f"Checking requirements for tagged release - '{version_tag}':")
if version_tag != version:
print(f"Version number '{version}' does not match tag '{version_tag}'")
sys.exit
if highest_release:
docker_tags = [version_tag, 'stable']
else:
docker_tags = [version_tag]
elif GITHUB_REF_TYPE == 'branch':
# Otherwise we know we are targeting the 'master' branch
docker_tags = ['latest']
else:
print('Unsupported branch / version combination:')
print(f'InvenTree Version: {version}')
print('GITHUB_REF_TYPE:', GITHUB_REF_TYPE)
print('GITHUB_BASE_REF:', GITHUB_BASE_REF)
print('GITHUB_REF:', GITHUB_REF)
sys.exit(1)
if docker_tags is None:
print('Docker tags could not be determined')
sys.exit(1)
print(f"Version check passed for '{version}'!")
print(f"Docker tags: '{docker_tags}'")
# Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/
with open(os.getenv('GITHUB_ENV'), 'a') as env_file:
# Construct tag string
tags = ','.join([f'{REPO.lower()}:{tag}' for tag in docker_tags])
env_file.write(f'docker_tags={tags}\n')
if GITHUB_REF_TYPE == 'tag' and highest_release:
env_file.write('stable_release=true\n')

View File

@@ -1,39 +0,0 @@
# Backport tagged issues to a stable branch.
#
# To enable backporting for a pullrequest, add the label "backport" to the PR.
# Additionally, add a label with the prefix "backport-to-" and the target branch
name: Backport
on:
pull_request_target:
types: ["labeled", "closed"]
jobs:
backport:
name: Backport PR
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
if: |
github.event.pull_request.merged == true
&& contains(github.event.pull_request.labels.*.name, 'backport')
&& (
(github.event.action == 'labeled' && github.event.label.name == 'backport')
|| (github.event.action == 'closed')
)
steps:
- name: Backport Action
uses: sqren/backport-github-action@f54e19901f2a57f8b82360f2490d47ee82ec82c6 # pin@v9.2.2
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
auto_backport_label_prefix: backport-to-
- name: Info log
if: ${{ success() }}
run: cat ~/.backport/backport.info.log
- name: Debug log
if: ${{ failure() }}
run: cat ~/.backport/backport.debug.log

View File

@@ -1,42 +0,0 @@
name: Check Translations
on:
push:
branches:
- l10
pull_request:
branches:
- l10
env:
python_version: 3.9
permissions:
contents: read
jobs:
check:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_NAME: "./test_db.sqlite"
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
INVENTREE_BACKUP_DIR: ./backup
INVENTREE_SITE_URL: http://localhost:8000
steps:
- name: Checkout Code
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
install: true
apt-dependency: gettext
- name: Test Translations
run: invoke translate
- name: Check Migration Files
run: python3 .github/scripts/check_migration_files.py

60
.github/workflows/coverage.yaml vendored Normal file
View File

@@ -0,0 +1,60 @@
# Perform CI checks, and calculate code coverage
name: SQLite
on:
push:
branches-ignore:
- l10*
pull_request:
branches-ignore:
- l10*
jobs:
# Run tests on SQLite database
# These tests are used for code coverage analysis
coverage:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_NAME: './test_db.sqlite'
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install gettext
pip3 install invoke
invoke install
invoke static
- name: Coverage Tests
run: |
invoke coverage
- name: Data Import Export
run: |
invoke migrate
invoke import-fixtures
invoke export-records -f data.json
rm test_db.sqlite
invoke migrate
invoke import-records -f data.json
invoke import-records -f data.json
- name: Test Translations
run: invoke translate
- name: Check Migration Files
run: python3 ci/check_migration_files.py
- name: Upload Coverage Report
run: coveralls

View File

@@ -1,181 +0,0 @@
# Build, test and push InvenTree docker image
# This workflow runs under any of the following conditions:
#
# - Push to the master branch
# - Publish release
#
# The following actions are performed:
#
# - Check that the version number matches the current branch or tag
# - Build the InvenTree docker image
# - Run suite of unit tests against the build image
# - Push the compiled, tested image to dockerhub
name: Docker
on:
release:
types: [published]
push:
branches:
- "master"
pull_request:
branches:
- "master"
permissions:
contents: read
jobs:
paths-filter:
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
name: Filter
runs-on: ubuntu-latest
outputs:
docker: ${{ steps.filter.outputs.docker }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # pin@v3.0.2
id: filter
with:
filters: |
docker:
- .github/workflows/docker.yaml
- contrib/container/**
- src/backend/InvenTree/InvenTree/settings.py
- src/backend/requirements.txt
- tasks.py
# Build the docker image
build:
needs: paths-filter
if: needs.paths-filter.outputs.docker == 'true' || github.event_name == 'release' || github.event_name == 'push'
permissions:
contents: read
packages: write
id-token: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
python_version: "3.11"
runs-on: ubuntu-latest # in the future we can try to use alternative runners here
steps:
- name: Check out repo
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Set Up Python ${{ env.python_version }}
uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # pin@v5.1.1
with:
python-version: ${{ env.python_version }}
- name: Version Check
run: |
pip install --require-hashes -r contrib/dev_reqs/requirements.txt
python3 .github/scripts/version_check.py
echo "git_commit_hash=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
echo "git_commit_date=$(git show -s --format=%ci)" >> $GITHUB_ENV
- name: Test Docker Image
id: test-docker
run: |
docker build . --target production --tag inventree-test -f contrib/container/Dockerfile
docker run --rm inventree-test invoke --version
docker run --rm inventree-test invoke --list
docker run --rm inventree-test gunicorn --version
docker run --rm inventree-test pg_dump --version
docker run --rm inventree-test test -f /home/inventree/init.sh
docker run --rm inventree-test test -f /home/inventree/tasks.py
docker run --rm inventree-test test -f /home/inventree/gunicorn.conf.py
docker run --rm inventree-test test -f /home/inventree/src/backend/requirements.txt
docker run --rm inventree-test test -f /home/inventree/src/backend/InvenTree/manage.py
- name: Build Docker Image
# Build the development docker image (using docker-compose.yml)
run: docker compose --project-directory . -f contrib/container/dev-docker-compose.yml build --no-cache
- name: Update Docker Image
run: |
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke install
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke update
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke setup-dev
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml up -d
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run inventree-dev-server invoke wait
- name: Check Data Directory
# The following file structure should have been created by the docker image
run: |
test -d data
test -d data/env
test -d data/pgdb
test -d data/media
test -d data/static
test -d data/plugins
test -f data/config.yaml
test -f data/plugins.txt
test -f data/secret_key.txt
- name: Run Unit Tests
run: |
echo "GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }}" >> contrib/container/docker.dev.env
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run --rm inventree-dev-server invoke test --disable-pty
- name: Run Migration Tests
run: |
docker compose --project-directory . -f contrib/container/dev-docker-compose.yml run --rm inventree-dev-server invoke test --migrations
- name: Clean up test folder
run: |
rm -rf InvenTree/_testfolder
- name: Set up QEMU
if: github.event_name != 'pull_request'
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # pin@v3.2.0
- name: Set up Docker Buildx
if: github.event_name != 'pull_request'
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # pin@v3.6.1
- name: Set up cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 # pin@v3.5.0
- name: Check if Dockerhub login is required
id: docker_login
run: |
if [ -z "${{ secrets.DOCKER_USERNAME }}" ]; then
echo "skip_dockerhub_login=true" >> $GITHUB_ENV
else
echo "skip_dockerhub_login=false" >> $GITHUB_ENV
fi
- name: Login to Dockerhub
if: github.event_name != 'pull_request' && steps.docker_login.outputs.skip_dockerhub_login != 'true'
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # pin@v3.3.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Log into registry ghcr.io
if: github.event_name != 'pull_request'
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # pin@v3.3.0
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract Docker metadata
if: github.event_name != 'pull_request'
id: meta
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # pin@v5.5.1
with:
images: |
inventree/inventree
ghcr.io/${{ github.repository }}
- name: Push Docker Images
id: push-docker
if: github.event_name != 'pull_request'
uses: docker/build-push-action@5176d81f87c23d6fc96624dfdbcd9f3830bbe445 # pin@v6.5.0
with:
context: .
file: ./contrib/container/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
sbom: true
provenance: false
target: production
tags: ${{ env.docker_tags }}
build-args: |
commit_hash=${{ env.git_commit_hash }}
commit_date=${{ env.git_commit_date }}

37
.github/workflows/docker_build.yaml vendored Normal file
View File

@@ -0,0 +1,37 @@
# Build and push latest docker image on push to master branch
name: Docker Build
on:
push:
branches:
- 'master'
jobs:
docker:
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Dockerhub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and Push
uses: docker/build-push-action@v2
with:
context: ./docker
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
target: production
repository: inventree/inventree
tags: inventree/inventree:latest
- name: Image Digest
run: echo ${{ steps.docker_build.outputs.digest }}

33
.github/workflows/docker_publish.yaml vendored Normal file
View File

@@ -0,0 +1,33 @@
# Publish docker images to dockerhub
name: Docker Publish
on:
release:
types: [published]
jobs:
publish_image:
name: Push InvenTree web server image to dockerhub
runs-on: ubuntu-latest
steps:
- name: Check out repo
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Dockerhub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and Push
uses: docker/build-push-action@v2
with:
context: ./docker
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
target: production
repository: inventree/inventree
tags: inventree/inventree:${{ github.event.release.tag_name }}

28
.github/workflows/javascript.yaml vendored Normal file
View File

@@ -0,0 +1,28 @@
# Check javascript template files
name: Javascript Templates
on:
push:
branches:
- master
pull_request:
branches-ignore:
- l10*
jobs:
javascript:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Check Files
run: |
cd ci
python check_js_templates.py

67
.github/workflows/mysql.yaml vendored Normal file
View File

@@ -0,0 +1,67 @@
# MySQL Unit Testing
name: MySQL
on:
push:
branches-ignore:
- l10*
pull_request:
branches-ignore:
- l10*
jobs:
test:
runs-on: ubuntu-latest
env:
# Database backend configuration
INVENTREE_DB_ENGINE: django.db.backends.mysql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_USER: root
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: '127.0.0.1'
INVENTREE_DB_PORT: 3306
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
services:
mysql:
image: mysql:latest
env:
MYSQL_ALLOW_EMPTY_PASSWORD: yes
MYSQL_DATABASE: inventree
MYSQL_USER: inventree
MYSQL_PASSWORD: password
MYSQL_ROOT_PASSWORD: password
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
ports:
- 3306:3306
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Dependencies
run: |
sudo apt-get install mysql-server libmysqlclient-dev
pip3 install invoke
pip3 install mysqlclient
invoke install
- name: Run Tests
run: invoke test
- name: Data Import Export
run: |
invoke migrate
python3 ./InvenTree/manage.py flush --noinput
invoke import-fixtures
invoke export-records -f data.json
python3 ./InvenTree/manage.py flush --noinput
invoke import-records -f data.json
invoke import-records -f data.json

63
.github/workflows/postgresql.yaml vendored Normal file
View File

@@ -0,0 +1,63 @@
# PostgreSQL Unit Testing
name: PostgreSQL
on:
push:
branches-ignore:
- l10*
pull_request:
branches-ignore:
- l10*
jobs:
test:
runs-on: ubuntu-latest
env:
# Database backend configuration
INVENTREE_DB_ENGINE: django.db.backends.postgresql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_USER: inventree
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: '127.0.0.1'
INVENTREE_DB_PORT: 5432
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
services:
postgres:
image: postgres
env:
POSTGRES_USER: inventree
POSTGRES_PASSWORD: password
ports:
- 5432:5432
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Dependencies
run: |
sudo apt-get install libpq-dev
pip3 install invoke
pip3 install psycopg2
invoke install
- name: Run Tests
run: invoke test
- name: Data Import Export
run: |
invoke migrate
python3 ./InvenTree/manage.py flush --noinput
invoke import-fixtures
invoke export-records -f data.json
python3 ./InvenTree/manage.py flush --noinput
invoke import-records -f data.json
invoke import-records -f data.json

49
.github/workflows/python.yaml vendored Normal file
View File

@@ -0,0 +1,49 @@
# Run python library tests whenever code is pushed to master
name: Python Bindings
on:
push:
branches:
- master
pull_request:
branches-ignore:
- l10*
jobs:
python:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_NAME: './test_db.sqlite'
INVENTREE_DB_ENGINE: 'sqlite3'
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
steps:
- name: Checkout Code
uses: actions/checkout@v2
- name: Install InvenTree
run: |
sudo apt-get update
sudo apt-get install python3-dev python3-pip python3-venv
pip3 install invoke
invoke install
invoke migrate
- name: Download Python Code
run: |
git clone --depth 1 https://github.com/inventree/inventree-python ./inventree-python
- name: Start Server
run: |
invoke import-records -f ./inventree-python/test/test_data.json
invoke server -a 127.0.0.1:8000 &
sleep 60
- name: Run Tests
run: |
cd inventree-python
invoke test

View File

@@ -1,579 +0,0 @@
# Checks for each PR / push
name: QC
on:
push:
branches-ignore: ["l10*"]
pull_request:
branches-ignore: ["l10*"]
env:
python_version: 3.9
node_version: 20
# The OS version must be set per job
server_start_sleep: 60
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_DB_NAME: inventree
INVENTREE_MEDIA_ROOT: ../test_inventree_media
INVENTREE_STATIC_ROOT: ../test_inventree_static
INVENTREE_BACKUP_DIR: ../test_inventree_backup
INVENTREE_SITE_URL: http://localhost:8000
permissions:
contents: read
jobs:
paths-filter:
name: Filter
runs-on: ubuntu-latest
outputs:
server: ${{ steps.filter.outputs.server }}
migrations: ${{ steps.filter.outputs.migrations }}
frontend: ${{ steps.filter.outputs.frontend }}
api: ${{ steps.filter.outputs.api }}
force: ${{ steps.force.outputs.force }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # pin@v3.0.2
id: filter
with:
filters: |
server:
- 'src/backend/InvenTree/**'
- 'src/backend/requirements.txt'
- 'src/backend/requirements-dev.txt'
migrations:
- '**/test_migrations.py'
- '**/migrations/**'
- '.github/workflows**'
- 'src/backend/requirements.txt'
api:
- 'src/backend/InvenTree/InvenTree/api_version.py'
frontend:
- 'src/frontend/**'
- name: Is CI being forced?
run: echo "force=true" >> $GITHUB_OUTPUT
id: force
if: |
contains(github.event.pull_request.labels.*.name, 'dependency') ||
contains(github.event.pull_request.labels.*.name, 'full-run')
javascript:
name: Style - Classic UI [JS]
runs-on: ubuntu-20.04
needs: ["pre-commit"]
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
npm: true
install: true
- name: Check Templated JS Files
run: |
cd .github/scripts
python3 check_js_templates.py
- name: Lint Javascript Files
run: |
python src/backend/InvenTree/manage.py prerender
cd src/backend && npx eslint InvenTree/InvenTree/static_i18n/i18n/*.js
pre-commit:
name: Style [pre-commit]
runs-on: ubuntu-20.04
needs: paths-filter
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.frontend == 'true' || needs.paths-filter.outputs.force == 'true'
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Set up Python ${{ env.python_version }}
uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # pin@v5.1.1
with:
python-version: ${{ env.python_version }}
cache: "pip"
- name: Run pre-commit Checks
uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd # pin@v3.0.1
- name: Check Version
run: |
pip install --require-hashes -r contrib/dev_reqs/requirements.txt
python3 .github/scripts/version_check.py
mkdocs:
name: Style [Documentation]
runs-on: ubuntu-20.04
needs: paths-filter
steps:
- name: Checkout Code
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Set up Python ${{ env.python_version }}
uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # pin@v5.1.1
with:
python-version: ${{ env.python_version }}
- name: Check Config
run: |
pip install --require-hashes -r contrib/dev_reqs/requirements.txt
pip install --require-hashes -r docs/requirements.txt
python docs/ci/check_mkdocs_config.py
- name: Check Links
uses: gaurav-nelson/github-action-markdown-link-check@5c5dfc0ac2e225883c0e5f03a85311ec2830d368 # v1
with:
folder-path: docs
config-file: docs/mlc_config.json
check-modified-files-only: "yes"
use-quiet-mode: "yes"
schema:
name: Tests - API Schema Documentation
runs-on: ubuntu-20.04
needs: paths-filter
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
env:
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DB_NAME: ../inventree_unit_test_db.sqlite3
INVENTREE_ADMIN_USER: testuser
INVENTREE_ADMIN_PASSWORD: testpassword
INVENTREE_ADMIN_EMAIL: test@test.com
INVENTREE_PYTHON_TEST_SERVER: http://localhost:12345
INVENTREE_PYTHON_TEST_USERNAME: testuser
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
outputs:
version: ${{ steps.version.outputs.version }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils
dev-install: true
update: true
- name: Export API Documentation
run: invoke schema --ignore-warnings --filename src/backend/InvenTree/schema.yml
- name: Upload schema
uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # pin@v4.3.5
with:
name: schema.yml
path: src/backend/InvenTree/schema.yml
- name: Download public schema
run: |
pip install --require-hashes -r contrib/dev_reqs/requirements.txt >/dev/null 2>&1
version="$(python3 .github/scripts/version_check.py only_version ${{ needs.paths-filter.outputs.api }} 2>&1)"
echo "Version: $version"
url="https://raw.githubusercontent.com/inventree/schema/main/export/${version}/api.yaml"
echo "URL: $url"
code=$(curl -s -o api.yaml $url --write-out '%{http_code}' --silent)
if [ "$code" != "200" ]; then
exit 1
fi
echo "Downloaded api.yaml"
- name: Running OpenAPI Spec diff action
id: breaking_changes
uses: oasdiff/oasdiff-action/diff@a2ff6682b27d175162a74c09ace8771bd3d512f8 # pin@main
with:
base: 'api.yaml'
revision: 'src/backend/InvenTree/schema.yml'
format: 'html'
- name: Echoing diff to step
run: echo "${{ steps.breaking_changes.outputs.diff }}" >> $GITHUB_STEP_SUMMARY
- name: Check for differences in API Schema
if: needs.paths-filter.outputs.api == 'false'
run: |
diff --color -u src/backend/InvenTree/schema.yml api.yaml
diff -u src/backend/InvenTree/schema.yml api.yaml && echo "no difference in API schema " || exit 2
- name: Check schema - including warnings
run: invoke schema
continue-on-error: true
- name: Extract version for publishing
id: version
if: github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true'
run: |
pip install --require-hashes -r contrib/dev_reqs/requirements.txt >/dev/null 2>&1
version="$(python3 .github/scripts/version_check.py only_version 2>&1)"
echo "Version: $version"
echo "version=$version" >> "$GITHUB_OUTPUT"
schema-push:
name: Push new schema
runs-on: ubuntu-20.04
needs: [paths-filter, schema]
if: needs.schema.result == 'success' && github.ref == 'refs/heads/master' && needs.paths-filter.outputs.api == 'true' && github.repository_owner == 'inventree'
env:
version: ${{ needs.schema.outputs.version }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
name: Checkout Code
with:
repository: inventree/schema
token: ${{ secrets.SCHEMA_PAT }}
- name: Download schema artifact
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: schema.yml
- name: Move schema to correct location
run: |
echo "Version: $version"
mkdir export/${version}
mv schema.yml export/${version}/api.yaml
- uses: stefanzweifel/git-auto-commit-action@8621497c8c39c72f3e2a999a26b4ca1b5058a842 # v5.0.1
name: Commit schema changes
with:
commit_message: "Update API schema for ${{ env.version }} / ${{ github.sha }}"
python:
name: Tests - inventree-python
runs-on: ubuntu-20.04
needs: ["pre-commit", "paths-filter"]
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
env:
wrapper_name: inventree-python
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DB_NAME: ../inventree_unit_test_db.sqlite3
INVENTREE_ADMIN_USER: testuser
INVENTREE_ADMIN_PASSWORD: testpassword
INVENTREE_ADMIN_EMAIL: test@test.com
INVENTREE_PYTHON_TEST_SERVER: http://127.0.0.1:12345
INVENTREE_PYTHON_TEST_USERNAME: testuser
INVENTREE_PYTHON_TEST_PASSWORD: testpassword
INVENTREE_SITE_URL: http://127.0.0.1:12345
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils
dev-install: true
update: true
npm: true
- name: Download Python Code For `${{ env.wrapper_name }}`
run: git clone --depth 1 https://github.com/inventree/${{ env.wrapper_name }} ./${{ env.wrapper_name }}
- name: Start InvenTree Server
run: |
invoke delete-data -f
invoke import-fixtures
invoke server -a 127.0.0.1:12345 &
invoke wait
- name: Run Tests For `${{ env.wrapper_name }}`
run: |
cd ${{ env.wrapper_name }}
invoke check-server
coverage run -m unittest discover -s test/
coverage:
name: Tests - DB [SQLite] + Coverage ${{ matrix.python_version }}
runs-on: ubuntu-20.04
needs: ["pre-commit", "paths-filter"]
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
continue-on-error: true # continue if a step fails so that coverage gets pushed
strategy:
matrix:
python_version: [3.9]
# python_version: [3.9, 3.12] # Disabled due to requirement issues
env:
INVENTREE_DB_NAME: ./inventree.sqlite
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_PLUGINS_ENABLED: true
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
python_version: ${{ matrix.python_version }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils
dev-install: true
update: true
- name: Data Export Test
uses: ./.github/actions/migration
- name: Test Translations
run: invoke translate
- name: Check Migration Files
run: python3 .github/scripts/check_migration_files.py
- name: Coverage Tests
run: invoke test --coverage
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # pin@v4.5.0
if: always()
with:
token: ${{ secrets.CODECOV_TOKEN }}
slug: inventree/InvenTree
flags: backend
postgres:
name: Tests - DB [PostgreSQL]
runs-on: ubuntu-20.04
needs: ["pre-commit", "paths-filter"]
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
env:
INVENTREE_DB_ENGINE: django.db.backends.postgresql
INVENTREE_DB_USER: inventree
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: "127.0.0.1"
INVENTREE_DB_PORT: 5432
INVENTREE_DEBUG: info
INVENTREE_CACHE_HOST: localhost
INVENTREE_PLUGINS_ENABLED: true
services:
postgres:
image: postgres:14
env:
POSTGRES_USER: inventree
POSTGRES_PASSWORD: password
ports:
- 5432:5432
redis:
image: redis
ports:
- 6379:6379
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils libpq-dev
pip-dependency: psycopg django-redis>=5.0.0
dev-install: true
update: true
- name: Run Tests
run: invoke test
- name: Data Export Test
uses: ./.github/actions/migration
mysql:
name: Tests - DB [MySQL]
runs-on: ubuntu-20.04
needs: ["pre-commit", "paths-filter"]
if: needs.paths-filter.outputs.server == 'true' || needs.paths-filter.outputs.force == 'true'
env:
# Database backend configuration
INVENTREE_DB_ENGINE: django.db.backends.mysql
INVENTREE_DB_USER: root
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: "127.0.0.1"
INVENTREE_DB_PORT: 3306
INVENTREE_DEBUG: info
INVENTREE_PLUGINS_ENABLED: true
services:
mysql:
image: mysql:latest
env:
MYSQL_ALLOW_EMPTY_PASSWORD: yes
MYSQL_DATABASE: ${{ env.INVENTREE_DB_NAME }}
MYSQL_USER: inventree
MYSQL_PASSWORD: password
MYSQL_ROOT_PASSWORD: password
options: --health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
ports:
- 3306:3306
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils libmysqlclient-dev
pip-dependency: mysqlclient
dev-install: true
update: true
- name: Run Tests
run: invoke test
- name: Data Export Test
uses: ./.github/actions/migration
migration-tests:
name: Tests - Migrations [PostgreSQL]
runs-on: ubuntu-latest
needs: paths-filter
if: ${{ (needs.paths-filter.outputs.force == 'true') || (github.ref == 'refs/heads/master' && needs.paths-filter.outputs.migrations == 'true') }}
env:
INVENTREE_DB_ENGINE: django.db.backends.postgresql
INVENTREE_DB_NAME: inventree
INVENTREE_DB_USER: inventree
INVENTREE_DB_PASSWORD: password
INVENTREE_DB_HOST: "127.0.0.1"
INVENTREE_DB_PORT: 5432
INVENTREE_DEBUG: info
INVENTREE_PLUGINS_ENABLED: false
services:
postgres:
image: postgres:14
env:
POSTGRES_USER: inventree
POSTGRES_PASSWORD: password
ports:
- 5432:5432
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
apt-dependency: gettext poppler-utils libpq-dev
pip-dependency: psycopg
dev-install: true
update: true
- name: Run Tests
run: invoke test --migrations --report --coverage
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # pin@v4.5.0
if: always()
with:
token: ${{ secrets.CODECOV_TOKEN }}
slug: inventree/InvenTree
flags: migrations
migrations-checks:
name: Tests - Full Migration [SQLite]
runs-on: ubuntu-latest
needs: paths-filter
if: ${{ (needs.paths-filter.outputs.force == 'true') || (github.ref == 'refs/heads/master' && needs.paths-filter.outputs.migrations == 'true') }}
env:
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_DB_NAME: /home/runner/work/InvenTree/db.sqlite3
INVENTREE_DEBUG: info
INVENTREE_PLUGINS_ENABLED: false
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
name: Checkout Code
- name: Environment Setup
uses: ./.github/actions/setup
with:
install: true
- name: Fetch Database
run: git clone --depth 1 https://github.com/inventree/test-db ./test-db
- name: Latest Database
run: |
cp test-db/latest.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
- name: 0.10.0 Database
run: |
rm /home/runner/work/InvenTree/db.sqlite3
cp test-db/stable_0.10.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
- name: 0.11.0 Database
run: |
rm /home/runner/work/InvenTree/db.sqlite3
cp test-db/stable_0.11.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
- name: 0.12.0 Database
run: |
rm /home/runner/work/InvenTree/db.sqlite3
cp test-db/stable_0.12.0.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
- name: 0.13.5 Database
run: |
rm /home/runner/work/InvenTree/db.sqlite3
cp test-db/stable_0.13.5.sqlite3 /home/runner/work/InvenTree/db.sqlite3
chmod +rw /home/runner/work/InvenTree/db.sqlite3
invoke migrate
platform_ui:
name: Tests - Platform UI
runs-on: ubuntu-20.04
timeout-minutes: 60
needs: ["pre-commit", "paths-filter"]
if: needs.paths-filter.outputs.frontend == 'true' || needs.paths-filter.outputs.force == 'true'
env:
INVENTREE_DB_ENGINE: sqlite3
INVENTREE_DB_NAME: /home/runner/work/InvenTree/db.sqlite3
INVENTREE_DEBUG: True
INVENTREE_PLUGINS_ENABLED: false
VITE_COVERAGE: true
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
npm: true
install: true
update: true
- name: Set up test data
run: invoke setup-test -i
- name: Rebuild thumbnails
run: invoke rebuild-thumbnails
- name: Install dependencies
run: inv frontend-compile
- name: Install Playwright Browsers
run: cd src/frontend && npx playwright install --with-deps
- name: Run Playwright tests
id: tests
run: cd src/frontend && npx nyc playwright test
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # pin@v4
if: ${{ !cancelled() && steps.tests.outcome == 'failure' }}
with:
name: playwright-report
path: src/frontend/playwright-report/
retention-days: 14
- name: Report coverage
if: always()
run: cd src/frontend && npx nyc report --report-dir ./coverage --temp-dir .nyc_output --reporter=lcov --exclude-after-remap false
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # pin@v4.5.0
if: always()
with:
token: ${{ secrets.CODECOV_TOKEN }}
slug: inventree/InvenTree
flags: pui
platform_ui_build:
name: Build - UI Platform
runs-on: ubuntu-20.04
timeout-minutes: 60
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
npm: true
- name: Install dependencies
run: cd src/frontend && yarn install
- name: Build frontend
run: cd src/frontend && yarn run compile && yarn run build
- name: Write version file - SHA
run: cd src/backend/InvenTree/web/static/web/.vite && echo "$GITHUB_SHA" > sha.txt
- name: Zip frontend
run: |
cd src/backend/InvenTree/web/static
zip -r frontend-build.zip web/ web/.vite
- uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # pin@v4.3.5
with:
name: frontend-build
path: src/backend/InvenTree/web/static/web

View File

@@ -1,85 +0,0 @@
# Runs on releases
name: Publish release
on:
release:
types: [published]
permissions:
contents: read
jobs:
stable:
runs-on: ubuntu-latest
name: Write release to stable branch
permissions:
contents: write
pull-requests: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout Code
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Version Check
run: |
pip install --require-hashes -r contrib/dev_reqs/requirements.txt
python3 .github/scripts/version_check.py
- name: Push to Stable Branch
uses: ad-m/github-push-action@d91a481090679876dfc4178fef17f286781251df # pin@v0.8.0
if: env.stable_release == 'true'
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: stable
force: true
build:
runs-on: ubuntu-latest
name: Build and attest frontend
permissions:
id-token: write
contents: write
attestations: write
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
npm: true
- name: Install dependencies
run: cd src/frontend && yarn install
- name: Build frontend
run: cd src/frontend && npm run compile && npm run build
- name: Create SBOM for frontend
uses: anchore/sbom-action@v0
with:
artifact-name: frontend-build.spdx
path: src/frontend
- name: Write version file - SHA
run: cd src/backend/InvenTree/web/static/web/.vite && echo "$GITHUB_SHA" > sha.txt
- name: Write version file - TAG
run: cd src/backend/InvenTree/web/static/web/.vite && echo "${{ github.ref_name }}" > tag.txt
- name: Zip frontend
run: |
cd src/backend/InvenTree/web/static/web
zip -r ../frontend-build.zip * .vite
- name: Attest Build Provenance
id: attest
uses: actions/attest-build-provenance@v1
with:
subject-path: "${{ github.workspace }}/src/backend/InvenTree/web/static/frontend-build.zip"
- name: Upload frontend
uses: svenstaro/upload-release-action@04733e069f2d7f7f0b4aebc4fbdbce8613b03ccd # pin@2.9.0
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: src/backend/InvenTree/web/static/frontend-build.zip
asset_name: frontend-build.zip
tag: ${{ github.ref }}
overwrite: true
- name: Upload Attestation
uses: svenstaro/upload-release-action@04733e069f2d7f7f0b4aebc4fbdbce8613b03ccd # pin@2.9.0
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
asset_name: frontend-build.intoto.jsonl
file: ${{ steps.attest.outputs.bundle-path}}
tag: ${{ github.ref }}
overwrite: true

View File

@@ -1,72 +0,0 @@
# This workflow uses actions that are not certified by GitHub. They are provided
# by a third-party and are governed by separate terms of service, privacy
# policy, and support documentation.
name: Scorecard supply-chain security
on:
# For Branch-Protection check. Only the default branch is supported. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
branch_protection_rule:
# To guarantee Maintained check is occasionally updated. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
schedule:
- cron: "32 0 * * 0"
push:
branches: ["master"]
# Declare default permissions as read only.
permissions: read-all
jobs:
analysis:
name: Scorecard analysis
runs-on: ubuntu-latest
permissions:
# Needed to upload the results to code-scanning dashboard.
security-events: write
# Needed to publish results and get a badge (see publish_results below).
id-token: write
# Uncomment the permissions below if installing in a private repository.
# contents: read
# actions: read
steps:
- name: "Checkout code"
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0
with:
results_file: results.sarif
results_format: sarif
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
# - you want to enable the Branch-Protection check on a *public* repository, or
# - you are installing Scorecard on a *private* repository
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
# Public repositories:
# - Publish results to OpenSSF REST API for easy access by consumers
# - Allows the repository to include the Scorecard badge.
# - See https://github.com/ossf/scorecard-action#publishing-results.
# For private repositories:
# - `publish_results` will always be set to `false`, regardless
# of the value entered here.
publish_results: true
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
with:
name: SARIF file
path: results.sarif
retention-days: 5
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@afb54ba388a7dca6ecae48f608c4ff05ff4cc77a # v3.25.15
with:
sarif_file: results.sarif

View File

@@ -1,27 +0,0 @@
# Marks all issues that do not receive activity stale starting 2022
name: Mark stale issues and pull requests
on:
schedule:
- cron: "24 11 * * *"
permissions:
contents: read
jobs:
stale:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # pin@v9.0.0
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-issue-message: "This issue seems stale. Please react to show this is still important."
stale-pr-message: "This PR seems stale. Please react to show this is still important."
stale-issue-label: "inactive"
stale-pr-label: "inactive"
start-date: "2022-01-01"
exempt-all-milestones: true

34
.github/workflows/style.yaml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: Style Checks
on:
push:
branches-ignore:
- l10*
pull_request:
branches-ignore:
- l10*
jobs:
style:
runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
python-version: [3.7]
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install deps
run: |
pip install flake8==3.8.3
pip install pep8-naming==0.11.1
- name: flake8
run: |
flake8 InvenTree

View File

@@ -1,54 +0,0 @@
name: Update Translation Files
on:
push:
branches:
- master
env:
python_version: 3.9
node_version: 20
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_NAME: "./test_db.sqlite"
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
INVENTREE_BACKUP_DIR: ./backup
INVENTREE_SITE_URL: http://localhost:8000
steps:
- name: Checkout Code
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v4.1.7
- name: Environment Setup
uses: ./.github/actions/setup
with:
install: true
npm: true
apt-dependency: gettext
- name: Make Translations
run: invoke translate
- name: Commit files
run: |
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git checkout -b l10_local
git add "*.po"
git commit -m "updated translation base"
- name: Push changes
uses: ad-m/github-push-action@d91a481090679876dfc4178fef17f286781251df # pin@v0.8.0
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: l10
force: true

60
.github/workflows/translations.yml vendored Normal file
View File

@@ -0,0 +1,60 @@
name: Update Translation Files
on:
push:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
INVENTREE_DB_NAME: './test_db.sqlite'
INVENTREE_DB_ENGINE: django.db.backends.sqlite3
INVENTREE_DEBUG: info
INVENTREE_MEDIA_ROOT: ./media
INVENTREE_STATIC_ROOT: ./static
steps:
- uses: actions/checkout@v2
- name: Get Current Translations
run: |
git fetch
git checkout origin/l10 -- `git ls-tree origin/l10 -r --name-only | grep ".po"`
git reset
- name: Set up Python 3.7
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Install Dependencies
run: |
sudo apt-get update
sudo apt-get install -y gettext
pip3 install invoke
invoke install
- name: Make Translations
run: |
invoke translate
- name: stash changes
run: |
git stash
- name: Checkout Translation Branch
uses: actions/checkout@v2.3.4
with:
ref: l10
- name: Commit files
run: |
git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
git checkout stash -- .
git reset
git add "*.po"
git commit -m "updated translation base"
- name: Push changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: l10

View File

@@ -1,22 +0,0 @@
name: Update dependency files regularly
on:
workflow_dispatch: null
schedule:
- cron: "0 0 * * *"
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # pin@v4.1.1
- name: Setup
run: pip install --require-hashes -r requirements-dev.txt
- name: Update requirements.txt
run: pip-compile --output-file=requirements.txt requirements.in -U
- name: Update requirements-dev.txt
run: pip-compile --generate-hashes --output-file=requirements-dev.txt requirements-dev.in -U
- uses: stefanzweifel/git-auto-commit-action@fd157da78fa13d9383e5580d1fd1184d89554b51 # pin@v4.15.1
with:
commit_message: "[Bot] Updated dependency"
branch: dep-update

57
.gitignore vendored
View File

@@ -8,7 +8,6 @@ __pycache__/
env/
inventree-env/
./build/
.cache/
develop-eggs/
dist/
bin/
@@ -26,7 +25,7 @@ var/
*.egg-info/
.installed.cfg
*.egg
*.DS_Store
# Django stuff:
*.log
@@ -37,8 +36,10 @@ local_settings.py
*.old
# Files used for testing
inventree-demo-dataset/
inventree-data/
dummy_image.*
# Sphinx files
docs/_build
# Local static and media file storage (only when running in development mode)
inventree_media
@@ -47,7 +48,6 @@ static_i18n
# Local config file
config.yaml
plugins.txt
# Default data file
data.json
@@ -60,54 +60,11 @@ secret_key.txt
# IDE / development files
.idea/
*.code-workspace
.bash_history
.DS_Store
# https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
.vscode/*
#!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
#!.vscode/extensions.json
#!.vscode/*.code-snippets
.vscode/
# Coverage reports
.coverage
htmlcov/
# Temporary javascript files (used for testing)
js_tmp/
# Development files
dev/
data/
env/
# Locale stats file
src/backend/InvenTree/InvenTree/locale_stats.json
src/backend/InvenTree/InvenTree/licenses.txt
# node.js
node_modules/
# maintenance locker
maintenance_mode_state.txt
# plugin dev directory
src/backend/InvenTree/plugins/
# Compiled translation files
*.mo
messages.ts
# Generated API schema file
api.yaml
# web frontend (static files)
src/backend/InvenTree/web/static
InvenTree/web/static
# Generated docs files
docs/schema.yml
docs/docs/api/*.yml
docs/docs/api/schema/*.yml
dev/

View File

@@ -1,41 +0,0 @@
name: inventree
description: Open Source Inventory Management System
homepage: https://inventree.org
notifications: true
buildpack: https://github.com/mjmair/heroku-buildpack-python#v216-mjmair
env:
- STACK=heroku-20
- DISABLE_COLLECTSTATIC=1
- INVENTREE_DB_ENGINE=sqlite3
- INVENTREE_DB_NAME=database.sqlite3
- INVENTREE_PLUGINS_ENABLED
- INVENTREE_MEDIA_ROOT=/opt/inventree/media
- INVENTREE_STATIC_ROOT=/opt/inventree/static
- INVENTREE_BACKUP_DIR=/opt/inventree/backup
- INVENTREE_PLUGIN_FILE=/opt/inventree/plugins.txt
- INVENTREE_CONFIG_FILE=/opt/inventree/config.yaml
- APP_REPO=inventree/InvenTree
before_install: contrib/packager.io/preinstall.sh
after_install: contrib/packager.io/postinstall.sh
before_remove: contrib/packager.io/preinstall.sh
before:
- contrib/packager.io/before.sh
dependencies:
- curl
- "python3.9 | python3.10 | python3.11"
- "python3.9-venv | python3.10-venv | python3.11-venv"
- "python3.9-dev | python3.10-dev | python3.11-dev"
- python3-pip
- python3-cffi
- python3-brotli
- python3-wheel
- libpango-1.0-0
- libharfbuzz0b
- libpangoft2-1.0-0
- gettext
- nginx
- jq
- "libffi7 | libffi8"
targets:
ubuntu-20.04: true
debian-11: true

View File

@@ -1,102 +0,0 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
exclude: |
(?x)^(
src/backend/InvenTree/InvenTree/static/.*|
src/backend/InvenTree/locale/.*|
src/frontend/src/locales/.* |
.*/migrations/.* |
src/frontend/yarn.lock
)$
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: mixed-line-ending
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.1
hooks:
- id: ruff-format
args: [--preview]
- id: ruff
args: [
--fix,
--preview
]
- repo: https://github.com/astral-sh/uv-pre-commit
rev: 0.2.13
hooks:
- id: pip-compile
name: pip-compile requirements-dev.in
args: [src/backend/requirements-dev.in, -o, src/backend/requirements-dev.txt, --no-strip-extras, --generate-hashes]
files: src/backend/requirements-dev\.(in|txt)$
- id: pip-compile
name: pip-compile requirements.txt
args: [src/backend/requirements.in, -o, src/backend/requirements.txt, --no-strip-extras, --generate-hashes]
files: src/backend/requirements\.(in|txt)$
- id: pip-compile
name: pip-compile requirements.txt
args: [contrib/dev_reqs/requirements.in, -o, contrib/dev_reqs/requirements.txt, --no-strip-extras, --generate-hashes]
files: contrib/dev_reqs/requirements\.(in|txt)$
- id: pip-compile
name: pip-compile requirements.txt
args: [docs/requirements.in, -o, docs/requirements.txt, --no-strip-extras, --generate-hashes]
files: docs/requirements\.(in|txt)$
- id: pip-compile
name: pip-compile requirements.txt
args: [contrib/container/requirements.in, -o, contrib/container/requirements.txt, --python-version=3.11, --no-strip-extras, --generate-hashes]
files: contrib/container/requirements\.(in|txt)$
- repo: https://github.com/Riverside-Healthcare/djLint
rev: v1.34.1
hooks:
- id: djlint-django
- repo: https://github.com/codespell-project/codespell
rev: v2.3.0
hooks:
- id: codespell
additional_dependencies:
- tomli
exclude: >
(?x)^(
docs/docs/stylesheets/.*|
docs/docs/javascripts/.*|
docs/docs/webfonts/.* |
src/frontend/src/locales/.* |
pyproject.toml |
src/frontend/vite.config.ts |
)$
- repo: https://github.com/pre-commit/mirrors-prettier
rev: "v4.0.0-alpha.8"
hooks:
- id: prettier
files: ^src/frontend/.*\.(js|jsx|ts|tsx)$
additional_dependencies:
- "prettier@^2.4.1"
- "@trivago/prettier-plugin-sort-imports"
- repo: https://github.com/pre-commit/mirrors-eslint
rev: "v9.6.0"
hooks:
- id: eslint
additional_dependencies:
- eslint@^8.41.0
- eslint-config-google@^0.14.0
- eslint-plugin-react@6.10.3
- babel-eslint@6.1.2
- "@typescript-eslint/eslint-plugin@latest"
- "@typescript-eslint/parser"
files: ^src/frontend/.*\.(js|jsx|ts|tsx)$
- repo: https://github.com/gitleaks/gitleaks
rev: v8.18.4
hooks:
- id: gitleaks
#- repo: https://github.com/jumanjihouse/pre-commit-hooks
# rev: 3.0.0
# hooks:
# - id: shellcheck
- repo: https://github.com/isidentical/teyit
rev: 0.4.3
hooks:
- id: teyit

51
.vscode/launch.json vendored
View File

@@ -1,51 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "InvenTree Server",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
"args": [
"runserver",
// "0.0.0.0:8000", // expose server in network (useful for testing with mobile app)
// "--noreload" // disable auto-reload
],
"django": true,
"justMyCode": true
},
{
"name": "InvenTree Server - Tests",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
"args": [
"test",
// "part.test_api.PartCategoryAPITest", // run only a specific test
],
"django": true,
"justMyCode": true
},
{
"name": "InvenTree Server - 3rd party",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/src/backend/InvenTree/manage.py",
"args": [
"runserver"
],
"django": true,
"justMyCode": false
},
{
"name": "InvenTree Frontend - Vite",
"type": "chrome",
"request": "launch",
"url": "http://localhost:5173",
"webRoot": "${workspaceFolder}/src/frontend"
}
]
}

70
.vscode/tasks.json vendored
View File

@@ -1,70 +0,0 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
// the problemMatchers should prevent vscode from asking how it should check the output
"version": "2.0.0",
"tasks": [
{
"label": "worker",
"type": "shell",
"command": "inv worker",
"problemMatcher": [],
},
{
"label": "clean-settings",
"type": "shell",
"command": "inv clean-settings",
"problemMatcher": [],
},
{
"label": "delete-data",
"type": "shell",
"command": "inv delete-data",
"problemMatcher": [],
},
{
"label": "migrate",
"type": "shell",
"command": "inv migrate",
"problemMatcher": [],
},
{
"label": "server",
"type": "shell",
"command": "inv server",
"problemMatcher": [],
},
{
"label": "setup-dev",
"type": "shell",
"command": "inv setup-dev",
"problemMatcher": [],
},
{
"label": "setup-test",
"type": "shell",
"command": "inv setup-test -i --path dev/inventree-demo-dataset",
"problemMatcher": [],
},
{
"label": "superuser",
"type": "shell",
"command": "inv superuser",
"problemMatcher": [],
},
{
"label": "test",
"type": "shell",
"command": "inv test",
"problemMatcher": [],
},
{
"label": "update",
"type": "shell",
"command": "inv update",
"problemMatcher": [],
},
]
}

View File

@@ -1,50 +1,29 @@
### Contributing to InvenTree
Contributions to InvenTree are welcomed - please follow the guidelines below.
Hi there, thank you for your interest in contributing!
Please read our contribution guidelines, before submitting your first pull request to the InvenTree codebase.
## Feature Branches
### Project File Structure
No pushing to master! New featues must be submitted in a separate branch (one branch per feature).
The InvenTree project is split into two main components: frontend and backend. This source is located in the `src` directory. All other files are used for project management, documentation, and testing.
## Include Migration Files
```bash
InvenTree/
├─ .devops/ # Files for Azure DevOps
├─ .github/ # Files for GitHub
│ ├─ actions/ # Reused actions
│ ├─ ISSUE_TEMPLATE/ # Templates for issues and pull requests
│ ├─ workflows/ # CI/CD flows
│ ├─ scripts/ # CI scripts
├─ .vscode/ # Settings for Visual Code IDE
├─ assets/ # General project assets
├─ contrib/ # Files needed for deployments
│ ├─ container/ # Files related to building container images
│ ├─ installer/ # Files needed to build single-file installer
│ ├─ packager.io/ # Files needed for Debian/Ubuntu packages
├─ docs/ # Directory for documentation / General helper files
│ ├─ ci/ # CI for documentation
│ ├─ docs/ # Source for documentation
├─ src/ # Source for application
│ ├─ backend/ # Directory for backend parts
│ │ ├─ InvenTree/ # Source for backend
│ │ ├─ requirements.txt # Dependencies for backend
│ │ ├─ package.json # Dependencies for backend HTML linting
│ ├─ frontend/ # Directory for frontend parts
│ │ ├─ src/ # Source for frontend
│ │ │ ├─ main.tsx # Entry point for frontend
│ │ ├─ tests/ # Tests for frontend
│ │ ├─ netlify.toml # Settings for frontend previews (Netlify)
│ │ ├─ package.json # Dependencies for frontend
│ │ ├─ playwright.config.ts # Settings for frontend tests
│ │ ├─ tsconfig.json # Settings for frontend compilation
├─ .pkgr.yml # Build definition for Debian/Ubuntu packages
├─ .pre-commit-config.yaml # Code formatter/linter configuration
├─ CONTRIBUTING.md # Contribution guidelines and overview
├─ Procfile # Process definition for Debian/Ubuntu packages
├─ README.md # General project information and overview
├─ runtime.txt # Python runtime settings for Debian/Ubuntu packages build
├─ SECURITY.md # Project security policy
├─ tasks.py # Action definitions for development, testing and deployment
```
Any required migration files **must** be included in the commit, or the pull-request will be rejected. If you change the underlying database schema, make sure you run `invoke migrate` and commit the migration files before submitting the PR.
Refer to our [contribution guidelines](https://docs.inventree.org/en/latest/develop/contributing/) for more information!
## Update Translation Files
Any PRs which update translatable strings (i.e. text strings that will appear in the web-front UI) must also update the translation (locale) files to include hooks for the translated strings.
*This does not mean that all translations must be provided, but that the translation files must include locations for the translated strings to be written.*
To perform this step, simply run `invoke translate` from the top level directory before submitting the PR.
## Testing
Any new code should be covered by unit tests - a submitted PR may not be accepted if the code coverage is decreased.
## Documentation
New features or updates to existing features should be accompanied by user documentation. A PR with associated documentation should link to the matching PR at https://github.com/inventree/inventree-docs/
## Code Style
Sumbitted Python code is automatically checked against PEP style guidelines. Locally you can run `invoke style` to ensure the style checks will pass, before submitting the PR.

View File

@@ -0,0 +1,5 @@
"""
The InvenTree module provides high-level management and functionality.
It provides a number of helper functions and generic classes which are used by InvenTree apps.
"""

126
InvenTree/InvenTree/api.py Normal file
View File

@@ -0,0 +1,126 @@
"""
Main JSON interface views
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from django.utils.translation import ugettext_lazy as _
from django.http import JsonResponse
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.views import APIView
from .views import AjaxView
from .version import inventreeVersion, inventreeApiVersion, inventreeInstanceName
from .status import is_worker_running
from plugins import plugins as inventree_plugins
logger = logging.getLogger("inventree")
logger.info("Loading action plugins...")
action_plugins = inventree_plugins.load_action_plugins()
class InfoView(AjaxView):
""" Simple JSON endpoint for InvenTree information.
Use to confirm that the server is running, etc.
"""
permission_classes = [permissions.AllowAny]
def get(self, request, *args, **kwargs):
data = {
'server': 'InvenTree',
'version': inventreeVersion(),
'instance': inventreeInstanceName(),
'apiVersion': inventreeApiVersion(),
'worker_running': is_worker_running(),
}
return JsonResponse(data)
class NotFoundView(AjaxView):
"""
Simple JSON view when accessing an invalid API view.
"""
permission_classes = [permissions.AllowAny]
def get(self, request, *args, **kwargs):
data = {
'details': _('API endpoint not found'),
'url': request.build_absolute_uri(),
}
return JsonResponse(data, status=404)
class AttachmentMixin:
"""
Mixin for creating attachment objects,
and ensuring the user information is saved correctly.
"""
permission_classes = [permissions.IsAuthenticated]
filter_backends = [
DjangoFilterBackend,
filters.OrderingFilter,
filters.SearchFilter,
]
def perform_create(self, serializer):
""" Save the user information when a file is uploaded """
attachment = serializer.save()
attachment.user = self.request.user
attachment.save()
class ActionPluginView(APIView):
"""
Endpoint for running custom action plugins.
"""
permission_classes = [
permissions.IsAuthenticated,
]
def post(self, request, *args, **kwargs):
action = request.data.get('action', None)
data = request.data.get('data', None)
if action is None:
return Response({
'error': _("No action specified")
})
for plugin_class in action_plugins:
if plugin_class.action_name() == action:
plugin = plugin_class(request.user, data=data)
plugin.perform_action()
return Response(plugin.get_response())
# If we got to here, no matching action was found
return Response({
'error': _("No matching action found"),
"action": action,
})

View File

@@ -0,0 +1,143 @@
"""
Helper functions for performing API unit tests
"""
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from rest_framework.test import APITestCase
class InvenTreeAPITestCase(APITestCase):
"""
Base class for running InvenTree API tests
"""
# User information
username = 'testuser'
password = 'mypassword'
email = 'test@testing.com'
superuser = False
is_staff = True
auto_login = True
# Set list of roles automatically associated with the user
roles = []
def setUp(self):
super().setUp()
# Create a user to log in with
self.user = get_user_model().objects.create_user(
username=self.username,
password=self.password,
email=self.email
)
# Create a group for the user
self.group = Group.objects.create(name='my_test_group')
self.user.groups.add(self.group)
if self.superuser:
self.user.is_superuser = True
if self.is_staff:
self.user.is_staff = True
self.user.save()
for role in self.roles:
self.assignRole(role)
if self.auto_login:
self.client.login(username=self.username, password=self.password)
def assignRole(self, role):
"""
Set the user roles for the registered user
"""
# role is of the format 'rule.permission' e.g. 'part.add'
rule, perm = role.split('.')
for ruleset in self.group.rule_sets.all():
if ruleset.name == rule:
if perm == 'view':
ruleset.can_view = True
elif perm == 'change':
ruleset.can_change = True
elif perm == 'delete':
ruleset.can_delete = True
elif perm == 'add':
ruleset.can_add = True
ruleset.save()
break
def getActions(self, url):
"""
Return a dict of the 'actions' available at a given endpoint.
Makes use of the HTTP 'OPTIONS' method to request this.
"""
response = self.client.options(url)
self.assertEqual(response.status_code, 200)
actions = response.data.get('actions', None)
if not actions:
actions = {}
return actions
def get(self, url, data={}, expected_code=200):
"""
Issue a GET request
"""
response = self.client.get(url, data, format='json')
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def post(self, url, data, expected_code=None):
"""
Issue a POST request
"""
response = self.client.post(url, data=data, format='json')
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def delete(self, url, expected_code=None):
"""
Issue a DELETE request
"""
response = self.client.delete(url)
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response
def patch(self, url, data, files=None, expected_code=None):
"""
Issue a PATCH request
"""
response = self.client.patch(url, data=data, files=files, format='json')
if expected_code is not None:
self.assertEqual(response.status_code, expected_code)
return response

105
InvenTree/InvenTree/apps.py Normal file
View File

@@ -0,0 +1,105 @@
# -*- coding: utf-8 -*-
import logging
from django.apps import AppConfig
from django.core.exceptions import AppRegistryNotReady
from InvenTree.ready import isInTestMode, canAppAccessDatabase
import InvenTree.tasks
logger = logging.getLogger("inventree")
class InvenTreeConfig(AppConfig):
name = 'InvenTree'
def ready(self):
if canAppAccessDatabase():
self.start_background_tasks()
if not isInTestMode():
self.update_exchange_rates()
def start_background_tasks(self):
try:
from django_q.models import Schedule
except (AppRegistryNotReady):
return
logger.info("Starting background tasks...")
InvenTree.tasks.schedule_task(
'InvenTree.tasks.delete_successful_tasks',
schedule_type=Schedule.DAILY,
)
InvenTree.tasks.schedule_task(
'InvenTree.tasks.check_for_updates',
schedule_type=Schedule.DAILY
)
InvenTree.tasks.schedule_task(
'InvenTree.tasks.heartbeat',
schedule_type=Schedule.MINUTES,
minutes=15
)
InvenTree.tasks.schedule_task(
'InvenTree.tasks.update_exchange_rates',
schedule_type=Schedule.DAILY,
)
def update_exchange_rates(self):
"""
Update exchange rates each time the server is started, *if*:
a) Have not been updated recently (one day or less)
b) The base exchange rate has been altered
"""
try:
from djmoney.contrib.exchange.models import ExchangeBackend
from datetime import datetime, timedelta
from InvenTree.tasks import update_exchange_rates
from common.settings import currency_code_default
except AppRegistryNotReady:
pass
base_currency = currency_code_default()
update = False
try:
backend = ExchangeBackend.objects.get(name='InvenTreeExchange')
last_update = backend.last_update
if last_update is not None:
delta = datetime.now().date() - last_update.date()
if delta > timedelta(days=1):
print(f"Last update was {last_update}")
update = True
else:
# Never been updated
print("Exchange backend has never been updated")
update = True
# Backend currency has changed?
if not base_currency == backend.base_currency:
print(f"Base currency changed from {backend.base_currency} to {base_currency}")
update = True
except (ExchangeBackend.DoesNotExist):
print("Exchange backend not found - updating")
update = True
except:
# Some other error - potentially the tables are not ready yet
return
if update:
update_exchange_rates()

View File

@@ -0,0 +1,119 @@
# -*- coding: utf-8 -*-
"""
Provides extra global data to all templates.
"""
from InvenTree.status_codes import SalesOrderStatus, PurchaseOrderStatus
from InvenTree.status_codes import BuildStatus, StockStatus
from InvenTree.status_codes import StockHistoryCode
import InvenTree.status
from users.models import RuleSet
def health_status(request):
"""
Provide system health status information to the global context.
- Not required for AJAX requests
- Do not provide if it is already provided to the context
"""
if request.path.endswith('.js'):
# Do not provide to script requests
return {}
if hasattr(request, '_inventree_health_status'):
# Do not duplicate efforts
return {}
request._inventree_health_status = True
status = {
'django_q_running': InvenTree.status.is_worker_running(),
'email_configured': InvenTree.status.is_email_configured(),
}
all_healthy = True
for k in status.keys():
if status[k] is not True:
all_healthy = False
status['system_healthy'] = all_healthy
status['up_to_date'] = InvenTree.version.isInvenTreeUpToDate()
return status
def status_codes(request):
"""
Provide status code enumerations.
"""
if hasattr(request, '_inventree_status_codes'):
# Do not duplicate efforts
return {}
request._inventree_status_codes = True
return {
# Expose the StatusCode classes to the templates
'SalesOrderStatus': SalesOrderStatus,
'PurchaseOrderStatus': PurchaseOrderStatus,
'BuildStatus': BuildStatus,
'StockStatus': StockStatus,
'StockHistoryCode': StockHistoryCode,
}
def user_roles(request):
"""
Return a map of the current roles assigned to the user.
Roles are denoted by their simple names, and then the permission type.
Permissions can be access as follows:
- roles.part.view
- roles.build.delete
Each value will return a boolean True / False
"""
user = request.user
roles = {
}
if user.is_superuser:
for ruleset in RuleSet.RULESET_MODELS.keys():
roles[ruleset] = {
'view': True,
'add': True,
'change': True,
'delete': True,
}
else:
for group in user.groups.all():
for rule in group.rule_sets.all():
# Ensure the role name is in the dict
if rule.name not in roles:
roles[rule.name] = {
'view': user.is_superuser,
'add': user.is_superuser,
'change': user.is_superuser,
'delete': user.is_superuser
}
# Roles are additive across groups
roles[rule.name]['view'] |= rule.can_view
roles[rule.name]['add'] |= rule.can_add
roles[rule.name]['change'] |= rule.can_change
roles[rule.name]['delete'] |= rule.can_delete
return {'roles': roles}

View File

@@ -0,0 +1,34 @@
from common.settings import currency_code_default, currency_codes
from urllib.error import HTTPError, URLError
from djmoney.contrib.exchange.backends.base import SimpleExchangeBackend
class InvenTreeExchange(SimpleExchangeBackend):
"""
Backend for automatically updating currency exchange rates.
Uses the exchangerate.host service API
"""
name = "InvenTreeExchange"
def __init__(self):
self.url = "https://api.exchangerate.host/latest"
super().__init__()
def get_params(self):
# No API key is required
return {
}
def update_rates(self, base_currency=currency_code_default()):
symbols = ','.join(currency_codes())
try:
super().update_rates(base=base_currency, symbols=symbols)
# catch connection errors
except (HTTPError, URLError):
print('Encountered connection error while updating')

View File

@@ -0,0 +1,162 @@
""" Custom fields used in InvenTree """
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
from .validators import allowable_url_schemes
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import URLField as FormURLField
from django.db import models as models
from django.core import validators
from django import forms
from decimal import Decimal
from djmoney.models.fields import MoneyField as ModelMoneyField
from djmoney.forms.fields import MoneyField
from djmoney.models.validators import MinMoneyValidator
import InvenTree.helpers
class InvenTreeURLFormField(FormURLField):
""" Custom URL form field with custom scheme validators """
default_validators = [validators.URLValidator(schemes=allowable_url_schemes())]
class InvenTreeURLField(models.URLField):
""" Custom URL field which has custom scheme validators """
default_validators = [validators.URLValidator(schemes=allowable_url_schemes())]
def formfield(self, **kwargs):
return super().formfield(**{
'form_class': InvenTreeURLFormField
})
def money_kwargs():
""" returns the database settings for MoneyFields """
from common.settings import currency_code_mappings, currency_code_default
kwargs = {}
kwargs['currency_choices'] = currency_code_mappings()
kwargs['default_currency'] = currency_code_default()
return kwargs
class InvenTreeModelMoneyField(ModelMoneyField):
"""
Custom MoneyField for clean migrations while using dynamic currency settings
"""
def __init__(self, **kwargs):
# detect if creating migration
if 'migrate' in sys.argv or 'makemigrations' in sys.argv:
# remove currency information for a clean migration
kwargs['default_currency'] = ''
kwargs['currency_choices'] = []
else:
# set defaults
kwargs.update(money_kwargs())
# Set a minimum value validator
validators = kwargs.get('validators', [])
if len(validators) == 0:
validators.append(
MinMoneyValidator(0),
)
kwargs['validators'] = validators
super().__init__(**kwargs)
def formfield(self, **kwargs):
""" override form class to use own function """
kwargs['form_class'] = InvenTreeMoneyField
return super().formfield(**kwargs)
class InvenTreeMoneyField(MoneyField):
""" custom MoneyField for clean migrations while using dynamic currency settings """
def __init__(self, *args, **kwargs):
# override initial values with the real info from database
kwargs.update(money_kwargs())
super().__init__(*args, **kwargs)
class DatePickerFormField(forms.DateField):
"""
Custom date-picker field
"""
def __init__(self, **kwargs):
help_text = kwargs.get('help_text', _('Enter date'))
label = kwargs.get('label', None)
required = kwargs.get('required', False)
initial = kwargs.get('initial', None)
widget = forms.DateInput(
attrs={
'type': 'date',
}
)
forms.DateField.__init__(
self,
required=required,
initial=initial,
help_text=help_text,
widget=widget,
label=label
)
def round_decimal(value, places):
"""
Round value to the specified number of places.
"""
if value is not None:
# see https://docs.python.org/2/library/decimal.html#decimal.Decimal.quantize for options
return value.quantize(Decimal(10) ** -places)
return value
class RoundingDecimalFormField(forms.DecimalField):
def to_python(self, value):
value = super(RoundingDecimalFormField, self).to_python(value)
value = round_decimal(value, self.decimal_places)
return value
def prepare_value(self, value):
"""
Override the 'prepare_value' method, to remove trailing zeros when displaying.
Why? It looks nice!
"""
if type(value) == Decimal:
return InvenTree.helpers.normalize(value)
else:
return value
class RoundingDecimalField(models.DecimalField):
def to_python(self, value):
value = super(RoundingDecimalField, self).to_python(value)
return round_decimal(value, self.decimal_places)
def formfield(self, **kwargs):
defaults = {
'form_class': RoundingDecimalFormField
}
defaults.update(kwargs)
return super().formfield(**kwargs)

View File

@@ -0,0 +1,206 @@
"""
Helper forms which subclass Django forms to provide additional functionality
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.auth.models import User
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Field
from crispy_forms.bootstrap import PrependedText, AppendedText, PrependedAppendedText, StrictButton, Div
from part.models import PartCategory
class HelperForm(forms.ModelForm):
""" Provides simple integration of crispy_forms extension. """
# Custom field decorations can be specified here, per form class
field_prefix = {}
field_suffix = {}
field_placeholder = {}
def __init__(self, *args, **kwargs):
super(forms.ModelForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.form_show_errors = True
"""
Create a default 'layout' for this form.
Ref: https://django-crispy-forms.readthedocs.io/en/latest/layouts.html
This is required to do fancy things later (like adding PrependedText, etc).
Simply create a 'blank' layout for each available field.
"""
self.rebuild_layout()
def is_valid(self):
valid = super(HelperForm, self).is_valid()
return valid
def rebuild_layout(self):
layouts = []
for field in self.fields:
prefix = self.field_prefix.get(field, None)
suffix = self.field_suffix.get(field, None)
placeholder = self.field_placeholder.get(field, '')
# Look for font-awesome icons
if prefix and prefix.startswith('fa-'):
prefix = r"<i class='fas {fa}'/>".format(fa=prefix)
if suffix and suffix.startswith('fa-'):
suffix = r"<i class='fas {fa}'/>".format(fa=suffix)
if prefix and suffix:
layouts.append(
Field(
PrependedAppendedText(
field,
prepended_text=prefix,
appended_text=suffix,
placeholder=placeholder
)
)
)
elif prefix:
layouts.append(
Field(
PrependedText(
field,
prefix,
placeholder=placeholder
)
)
)
elif suffix:
layouts.append(
Field(
AppendedText(
field,
suffix,
placeholder=placeholder
)
)
)
else:
layouts.append(Field(field, placeholder=placeholder))
self.helper.layout = Layout(*layouts)
class ConfirmForm(forms.Form):
""" Generic confirmation form """
confirm = forms.BooleanField(
required=False, initial=False,
help_text=_("Confirm")
)
class Meta:
fields = [
'confirm'
]
class DeleteForm(forms.Form):
""" Generic deletion form which provides simple user confirmation
"""
confirm_delete = forms.BooleanField(
required=False,
initial=False,
label=_('Confirm delete'),
help_text=_('Confirm item deletion')
)
class Meta:
fields = [
'confirm_delete'
]
class EditUserForm(HelperForm):
""" Form for editing user information
"""
class Meta:
model = User
fields = [
'username',
'first_name',
'last_name',
'email'
]
class SetPasswordForm(HelperForm):
""" Form for setting user password
"""
enter_password = forms.CharField(max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Enter password'),
help_text=_('Enter new password'))
confirm_password = forms.CharField(max_length=100,
min_length=8,
required=True,
initial='',
widget=forms.PasswordInput(attrs={'autocomplete': 'off'}),
label=_('Confirm password'),
help_text=_('Confirm new password'))
class Meta:
model = User
fields = [
'enter_password',
'confirm_password'
]
class SettingCategorySelectForm(forms.ModelForm):
""" Form for setting category settings """
category = forms.ModelChoiceField(queryset=PartCategory.objects.all())
class Meta:
model = PartCategory
fields = [
'category'
]
def __init__(self, *args, **kwargs):
super(SettingCategorySelectForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
# Form rendering
self.helper.form_show_labels = False
self.helper.layout = Layout(
Div(
Div(Field('category'),
css_class='col-sm-6',
style='width: 70%;'),
Div(StrictButton(_('Select Category'), css_class='btn btn-primary', type='submit'),
css_class='col-sm-6',
style='width: 30%; padding-left: 0;'),
css_class='row',
),
)

View File

@@ -0,0 +1,664 @@
"""
Provides helper functions used throughout the InvenTree project
"""
import io
import re
import json
import os.path
from PIL import Image
from decimal import Decimal, InvalidOperation
from wsgiref.util import FileWrapper
from django.http import StreamingHttpResponse
from django.core.exceptions import ValidationError, FieldError
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import Permission
import InvenTree.version
from common.models import InvenTreeSetting
from .settings import MEDIA_URL, STATIC_URL
from common.settings import currency_code_default
from djmoney.money import Money
def getSetting(key, backup_value=None):
"""
Shortcut for reading a setting value from the database
"""
return InvenTreeSetting.get_setting(key, backup_value=backup_value)
def generateTestKey(test_name):
"""
Generate a test 'key' for a given test name.
This must not have illegal chars as it will be used for dict lookup in a template.
Tests must be named such that they will have unique keys.
"""
key = test_name.strip().lower()
key = key.replace(" ", "")
# Remove any characters that cannot be used to represent a variable
key = re.sub(r'[^a-zA-Z0-9]', '', key)
return key
def getMediaUrl(filename):
"""
Return the qualified access path for the given file,
under the media directory.
"""
return os.path.join(MEDIA_URL, str(filename))
def getStaticUrl(filename):
"""
Return the qualified access path for the given file,
under the static media directory.
"""
return os.path.join(STATIC_URL, str(filename))
def getBlankImage():
"""
Return the qualified path for the 'blank image' placeholder.
"""
return getStaticUrl("img/blank_image.png")
def getBlankThumbnail():
"""
Return the qualified path for the 'blank image' thumbnail placeholder.
"""
return getStaticUrl("img/blank_image.thumbnail.png")
def TestIfImage(img):
""" Test if an image file is indeed an image """
try:
Image.open(img).verify()
return True
except:
return False
def TestIfImageURL(url):
""" Test if an image URL (or filename) looks like a valid image format.
Simply tests the extension against a set of allowed values
"""
return os.path.splitext(os.path.basename(url))[-1].lower() in [
'.jpg', '.jpeg',
'.png', '.bmp',
'.tif', '.tiff',
'.webp', '.gif',
]
def str2bool(text, test=True):
""" Test if a string 'looks' like a boolean value.
Args:
text: Input text
test (default = True): Set which boolean value to look for
Returns:
True if the text looks like the selected boolean value
"""
if test:
return str(text).lower() in ['1', 'y', 'yes', 't', 'true', 'ok', 'on', ]
else:
return str(text).lower() in ['0', 'n', 'no', 'none', 'f', 'false', 'off', ]
def is_bool(text):
"""
Determine if a string value 'looks' like a boolean.
"""
if str2bool(text, True):
return True
elif str2bool(text, False):
return True
else:
return False
def isNull(text):
"""
Test if a string 'looks' like a null value.
This is useful for querying the API against a null key.
Args:
text: Input text
Returns:
True if the text looks like a null value
"""
return str(text).strip().lower() in ['top', 'null', 'none', 'empty', 'false', '-1', '']
def normalize(d):
"""
Normalize a decimal number, and remove exponential formatting.
"""
if type(d) is not Decimal:
d = Decimal(d)
d = d.normalize()
# Ref: https://docs.python.org/3/library/decimal.html
return d.quantize(Decimal(1)) if d == d.to_integral() else d.normalize()
def increment(n):
"""
Attempt to increment an integer (or a string that looks like an integer!)
e.g.
001 -> 002
2 -> 3
AB01 -> AB02
QQQ -> QQQ
"""
value = str(n).strip()
# Ignore empty strings
if not value:
return value
pattern = r"(.*?)(\d+)?$"
result = re.search(pattern, value)
# No match!
if result is None:
return value
groups = result.groups()
# If we cannot match the regex, then simply return the provided value
if not len(groups) == 2:
return value
prefix, number = groups
# No number extracted? Simply return the prefix (without incrementing!)
if not number:
return prefix
# Record the width of the number
width = len(number)
try:
number = int(number) + 1
number = str(number)
except ValueError:
pass
number = number.zfill(width)
return prefix + number
def decimal2string(d):
"""
Format a Decimal number as a string,
stripping out any trailing zeroes or decimal points.
Essentially make it look like a whole number if it is one.
Args:
d: A python Decimal object
Returns:
A string representation of the input number
"""
if type(d) is Decimal:
d = normalize(d)
try:
# Ensure that the provided string can actually be converted to a float
float(d)
except ValueError:
# Not a number
return str(d)
s = str(d)
# Return entire number if there is no decimal place
if '.' not in s:
return s
return s.rstrip("0").rstrip(".")
def decimal2money(d, currency=None):
"""
Format a Decimal number as Money
Args:
d: A python Decimal object
currency: Currency of the input amount, defaults to default currency in settings
Returns:
A Money object from the input(s)
"""
if not currency:
currency = currency_code_default()
return Money(d, currency)
def WrapWithQuotes(text, quote='"'):
""" Wrap the supplied text with quotes
Args:
text: Input text to wrap
quote: Quote character to use for wrapping (default = "")
Returns:
Supplied text wrapped in quote char
"""
if not text.startswith(quote):
text = quote + text
if not text.endswith(quote):
text = text + quote
return text
def MakeBarcode(object_name, object_pk, object_data={}, **kwargs):
""" Generate a string for a barcode. Adds some global InvenTree parameters.
Args:
object_type: string describing the object type e.g. 'StockItem'
object_id: ID (Primary Key) of the object in the database
object_url: url for JSON API detail view of the object
data: Python dict object containing extra datawhich will be rendered to string (must only contain stringable values)
Returns:
json string of the supplied data plus some other data
"""
url = kwargs.get('url', False)
brief = kwargs.get('brief', True)
data = {}
if url:
request = object_data.get('request', None)
item_url = object_data.get('item_url', None)
absolute_url = None
if request and item_url:
absolute_url = request.build_absolute_uri(item_url)
# Return URL (No JSON)
return absolute_url
if item_url:
# Return URL (No JSON)
return item_url
elif brief:
data[object_name] = object_pk
else:
data['tool'] = 'InvenTree'
data['version'] = InvenTree.version.inventreeVersion()
data['instance'] = InvenTree.version.inventreeInstanceName()
# Ensure PK is included
object_data['id'] = object_pk
data[object_name] = object_data
return json.dumps(data, sort_keys=True)
def GetExportFormats():
""" Return a list of allowable file formats for exporting data """
return [
'csv',
'tsv',
'xls',
'xlsx',
'json',
'yaml',
]
def DownloadFile(data, filename, content_type='application/text'):
""" Create a dynamic file for the user to download.
Args:
data: Raw file data (string or bytes)
filename: Filename for the file download
content_type: Content type for the download
Return:
A StreamingHttpResponse object wrapping the supplied data
"""
filename = WrapWithQuotes(filename)
if type(data) == str:
wrapper = FileWrapper(io.StringIO(data))
else:
wrapper = FileWrapper(io.BytesIO(data))
response = StreamingHttpResponse(wrapper, content_type=content_type)
response['Content-Length'] = len(data)
response['Content-Disposition'] = 'attachment; filename={f}'.format(f=filename)
return response
def extract_serial_numbers(serials, expected_quantity):
""" Attempt to extract serial numbers from an input string.
- Serial numbers must be integer values
- Serial numbers must be positive
- Serial numbers can be split by whitespace / newline / commma chars
- Serial numbers can be supplied as an inclusive range using hyphen char e.g. 10-20
- Serial numbers can be supplied as <start>+ for getting all expecteded numbers starting from <start>
- Serial numbers can be supplied as <start>+<length> for getting <length> numbers starting from <start>
Args:
expected_quantity: The number of (unique) serial numbers we expect
"""
serials = serials.strip()
groups = re.split("[\s,]+", serials)
numbers = []
errors = []
# helpers
def number_add(n):
if n in numbers:
errors.append(_('Duplicate serial: {n}').format(n=n))
else:
numbers.append(n)
try:
expected_quantity = int(expected_quantity)
except ValueError:
raise ValidationError([_("Invalid quantity provided")])
if len(serials) == 0:
raise ValidationError([_("Empty serial number string")])
for group in groups:
group = group.strip()
# Hyphen indicates a range of numbers
if '-' in group:
items = group.split('-')
if len(items) == 2:
a = items[0].strip()
b = items[1].strip()
try:
a = int(a)
b = int(b)
if a < b:
for n in range(a, b + 1):
number_add(n)
else:
errors.append(_("Invalid group: {g}").format(g=group))
except ValueError:
errors.append(_("Invalid group: {g}").format(g=group))
continue
else:
errors.append(_("Invalid group: {g}").format(g=group))
continue
# plus signals either
# 1: 'start+': expected number of serials, starting at start
# 2: 'start+number': number of serials, starting at start
elif '+' in group:
items = group.split('+')
# case 1, 2
if len(items) == 2:
start = int(items[0])
# case 2
if bool(items[1]):
end = start + int(items[1]) + 1
# case 1
else:
end = start + expected_quantity
for n in range(start, end):
number_add(n)
# no case
else:
errors.append(_("Invalid group: {g}").format(g=group))
continue
else:
if group in numbers:
errors.append(_("Duplicate serial: {g}".format(g=group)))
else:
numbers.append(group)
if len(errors) > 0:
raise ValidationError(errors)
if len(numbers) == 0:
raise ValidationError([_("No serial numbers found")])
# The number of extracted serial numbers must match the expected quantity
if not expected_quantity == len(numbers):
raise ValidationError([_("Number of unique serial number ({s}) must match quantity ({q})").format(s=len(numbers), q=expected_quantity)])
return numbers
def validateFilterString(value, model=None):
"""
Validate that a provided filter string looks like a list of comma-separated key=value pairs
These should nominally match to a valid database filter based on the model being filtered.
e.g. "category=6, IPN=12"
e.g. "part__name=widget"
The ReportTemplate class uses the filter string to work out which items a given report applies to.
For example, an acceptance test report template might only apply to stock items with a given IPN,
so the string could be set to:
filters = "IPN = ACME0001"
Returns a map of key:value pairs
"""
# Empty results map
results = {}
value = str(value).strip()
if not value or len(value) == 0:
return results
groups = value.split(',')
for group in groups:
group = group.strip()
pair = group.split('=')
if not len(pair) == 2:
raise ValidationError(
"Invalid group: {g}".format(g=group)
)
k, v = pair
k = k.strip()
v = v.strip()
if not k or not v:
raise ValidationError(
"Invalid group: {g}".format(g=group)
)
results[k] = v
# If a model is provided, verify that the provided filters can be used against it
if model is not None:
try:
model.objects.filter(**results)
except FieldError as e:
raise ValidationError(
str(e),
)
return results
def addUserPermission(user, permission):
"""
Shortcut function for adding a certain permission to a user.
"""
perm = Permission.objects.get(codename=permission)
user.user_permissions.add(perm)
def addUserPermissions(user, permissions):
"""
Shortcut function for adding multiple permissions to a user.
"""
for permission in permissions:
addUserPermission(user, permission)
def getMigrationFileNames(app):
"""
Return a list of all migration filenames for provided app
"""
local_dir = os.path.dirname(os.path.abspath(__file__))
migration_dir = os.path.join(local_dir, '..', app, 'migrations')
files = os.listdir(migration_dir)
# Regex pattern for migration files
pattern = r"^[\d]+_.*\.py$"
migration_files = []
for f in files:
if re.match(pattern, f):
migration_files.append(f)
return migration_files
def getOldestMigrationFile(app, exclude_extension=True, ignore_initial=True):
"""
Return the filename associated with the oldest migration
"""
oldest_num = -1
oldest_file = None
for f in getMigrationFileNames(app):
if ignore_initial and f.startswith('0001_initial'):
continue
num = int(f.split('_')[0])
if oldest_file is None or num < oldest_num:
oldest_num = num
oldest_file = f
if exclude_extension:
oldest_file = oldest_file.replace('.py', '')
return oldest_file
def getNewestMigrationFile(app, exclude_extension=True):
"""
Return the filename associated with the newest migration
"""
newest_file = None
newest_num = -1
for f in getMigrationFileNames(app):
num = int(f.split('_')[0])
if newest_file is None or num > newest_num:
newest_num = num
newest_file = f
if exclude_extension:
newest_file = newest_file.replace('.py', '')
return newest_file
def clean_decimal(number):
""" Clean-up decimal value """
# Check if empty
if number is None or number == '' or number == 0:
return Decimal(0)
# Convert to string and remove spaces
number = str(number).replace(' ', '')
# Guess what type of decimal and thousands separators are used
count_comma = number.count(',')
count_point = number.count('.')
if count_comma == 1:
# Comma is used as decimal separator
if count_point > 0:
# Points are used as thousands separators: remove them
number = number.replace('.', '')
# Replace decimal separator with point
number = number.replace(',', '.')
elif count_point == 1:
# Point is used as decimal separator
if count_comma > 0:
# Commas are used as thousands separators: remove them
number = number.replace(',', '')
# Convert to Decimal type
try:
clean_number = Decimal(number)
except InvalidOperation:
# Number cannot be converted to Decimal (eg. a string containing letters)
return Decimal(0)
return clean_number.quantize(Decimal(1)) if clean_number == clean_number.to_integral() else clean_number.normalize()

View File

@@ -0,0 +1,38 @@
"""
Custom management command to cleanup old settings that are not defined anymore
"""
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Cleanup old (undefined) settings in the database
"""
def handle(self, *args, **kwargs):
print("Collecting settings")
from common.models import InvenTreeSetting, InvenTreeUserSetting
# general settings
db_settings = InvenTreeSetting.objects.all()
model_settings = InvenTreeSetting.GLOBAL_SETTINGS
# check if key exist and delete if not
for setting in db_settings:
if setting.key not in model_settings:
setting.delete()
print(f"deleted setting '{setting.key}'")
# user settings
db_settings = InvenTreeUserSetting.objects.all()
model_settings = InvenTreeUserSetting.GLOBAL_SETTINGS
# check if key exist and delete if not
for setting in db_settings:
if setting.key not in model_settings:
setting.delete()
print(f"deleted user setting '{setting.key}'")
print("checked all settings")

View File

@@ -0,0 +1,61 @@
"""
Custom management command to prerender files
"""
from django.core.management.base import BaseCommand
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.module_loading import import_string
from django.http.request import HttpRequest
from django.utils.translation import override as lang_over
import os
def render_file(file_name, source, target, locales, ctx):
""" renders a file into all provided locales """
for locale in locales:
target_file = os.path.join(target, locale + '.' + file_name)
with open(target_file, 'w') as localised_file:
with lang_over(locale):
renderd = render_to_string(os.path.join(source, file_name), ctx)
localised_file.write(renderd)
class Command(BaseCommand):
"""
django command to prerender files
"""
def handle(self, *args, **kwargs):
# static directories
LC_DIR = settings.LOCALE_PATHS[0]
SOURCE_DIR = settings.STATICFILES_I18_SRC
TARGET_DIR = settings.STATICFILES_I18_TRG
# ensure static directory exists
if not os.path.exists(TARGET_DIR):
os.makedirs(TARGET_DIR, exist_ok=True)
# collect locales
locales = {}
for locale in os.listdir(LC_DIR):
path = os.path.join(LC_DIR, locale)
if os.path.exists(path) and os.path.isdir(path):
locales[locale] = locale
# render!
request = HttpRequest()
ctx = {}
processors = tuple(import_string(path) for path in settings.STATFILES_I18_PROCESSORS)
for processor in processors:
ctx.update(processor(request))
for file in os.listdir(SOURCE_DIR, ):
path = os.path.join(SOURCE_DIR, file)
if os.path.exists(path) and os.path.isfile(path):
print(f"render {file}")
render_file(file, SOURCE_DIR, TARGET_DIR, locales, ctx)
else:
raise NotImplementedError('Using multi-level directories is not implemented at this point') # TODO multilevel dir if needed
print(f"rendered all files in {SOURCE_DIR}")

View File

@@ -0,0 +1,60 @@
"""
Custom management command to rebuild all MPTT models
- This is crucial after importing any fixtures, etc
"""
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Rebuild all database models which leverage the MPTT structure.
"""
def handle(self, *args, **kwargs):
# Part model
try:
print("Rebuilding Part objects")
from part.models import Part
Part.objects.rebuild()
except:
print("Error rebuilding Part objects")
# Part category
try:
print("Rebuilding PartCategory objects")
from part.models import PartCategory
PartCategory.objects.rebuild()
except:
print("Error rebuilding PartCategory objects")
# StockItem model
try:
print("Rebuilding StockItem objects")
from stock.models import StockItem
StockItem.objects.rebuild()
except:
print("Error rebuilding StockItem objects")
# StockLocation model
try:
print("Rebuilding StockLocation objects")
from stock.models import StockLocation
StockLocation.objects.rebuild()
except:
print("Error rebuilding StockLocation objects")
# Build model
try:
print("Rebuilding Build objects")
from build.models import Build
Build.objects.rebuild()
except:
print("Error rebuilding Build objects")

View File

@@ -0,0 +1,42 @@
"""
Custom management command, wait for the database to be ready!
"""
from django.core.management.base import BaseCommand
from django.db import connection
from django.db.utils import OperationalError, ImproperlyConfigured
import time
class Command(BaseCommand):
"""
django command to pause execution until the database is ready
"""
def handle(self, *args, **kwargs):
self.stdout.write("Waiting for database...")
connected = False
while not connected:
time.sleep(5)
try:
connection.ensure_connection()
connected = True
except OperationalError as e:
self.stdout.write(f"Could not connect to database: {e}")
except ImproperlyConfigured as e:
self.stdout.write(f"Improperly configured: {e}")
else:
if not connection.is_usable():
self.stdout.write("Database configuration is not usable")
if connected:
self.stdout.write("Database connection sucessful!")

View File

@@ -0,0 +1,221 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
from rest_framework import serializers
from rest_framework.metadata import SimpleMetadata
from rest_framework.utils import model_meta
from rest_framework.fields import empty
import users.models
logger = logging.getLogger('inventree')
class InvenTreeMetadata(SimpleMetadata):
"""
Custom metadata class for the DRF API.
This custom metadata class imits the available "actions",
based on the user's role permissions.
Thus when a client send an OPTIONS request to an API endpoint,
it will only receive a list of actions which it is allowed to perform!
Additionally, we include some extra information about database models,
so we can perform lookup for ForeignKey related fields.
"""
def determine_metadata(self, request, view):
self.request = request
self.view = view
metadata = super().determine_metadata(request, view)
user = request.user
if user is None:
# No actions for you!
metadata['actions'] = {}
return metadata
try:
# Extract the model name associated with the view
self.model = view.serializer_class.Meta.model
# Construct the 'table name' from the model
app_label = self.model._meta.app_label
tbl_label = self.model._meta.model_name
metadata['model'] = tbl_label
table = f"{app_label}_{tbl_label}"
actions = metadata.get('actions', None)
if actions is not None:
check = users.models.RuleSet.check_table_permission
# Map the request method to a permission type
rolemap = {
'POST': 'add',
'PUT': 'change',
'PATCH': 'change',
'DELETE': 'delete',
}
# Remove any HTTP methods that the user does not have permission for
for method, permission in rolemap.items():
if method in actions and not check(user, table, permission):
del actions[method]
# Add a 'DELETE' action if we are allowed to delete
if 'DELETE' in view.allowed_methods and check(user, table, 'delete'):
actions['DELETE'] = True
# Add a 'VIEW' action if we are allowed to view
if 'GET' in view.allowed_methods and check(user, table, 'view'):
actions['GET'] = True
except AttributeError:
# We will assume that if the serializer class does *not* have a Meta
# then we don't need a permission
pass
return metadata
def get_serializer_info(self, serializer):
"""
Override get_serializer_info so that we can add 'default' values
to any fields whose Meta.model specifies a default value
"""
serializer_info = super().get_serializer_info(serializer)
try:
ModelClass = serializer.Meta.model
model_fields = model_meta.get_field_info(ModelClass)
# Iterate through simple fields
for name, field in model_fields.fields.items():
if field.has_default() and name in serializer_info.keys():
default = field.default
if callable(default):
try:
default = default()
except:
continue
serializer_info[name]['default'] = default
# Iterate through relations
for name, relation in model_fields.relations.items():
if name not in serializer_info.keys():
# Skip relation not defined in serializer
continue
if relation.reverse:
# Ignore reverse relations
continue
# Extract and provide the "limit_choices_to" filters
# This is used to automatically filter AJAX requests
serializer_info[name]['filters'] = relation.model_field.get_limit_choices_to()
if 'help_text' not in serializer_info[name] and hasattr(relation.model_field, 'help_text'):
serializer_info[name]['help_text'] = relation.model_field.help_text
except AttributeError:
pass
# Try to extract 'instance' information
instance = None
# Extract extra information if an instance is available
if hasattr(serializer, 'instance'):
instance = serializer.instance
if instance is None:
try:
instance = self.view.get_object()
except:
pass
if instance is not None:
"""
If there is an instance associated with this API View,
introspect that instance to find any specific API info.
"""
if hasattr(instance, 'api_instance_filters'):
instance_filters = instance.api_instance_filters()
for field_name, field_filters in instance_filters.items():
if field_name not in serializer_info.keys():
# The field might be missing, but is added later on
# This function seems to get called multiple times?
continue
if 'instance_filters' not in serializer_info[field_name].keys():
serializer_info[field_name]['instance_filters'] = {}
for key, value in field_filters.items():
serializer_info[field_name]['instance_filters'][key] = value
return serializer_info
def get_field_info(self, field):
"""
Given an instance of a serializer field, return a dictionary
of metadata about it.
We take the regular DRF metadata and add our own unique flavor
"""
# Run super method first
field_info = super().get_field_info(field)
# If a default value is specified for the serializer field, add it!
if 'default' not in field_info and not field.default == empty:
field_info['default'] = field.get_default()
# Force non-nullable fields to read as "required"
# (even if there is a default value!)
if not field.allow_null and not (hasattr(field, 'allow_blank') and field.allow_blank):
field_info['required'] = True
# Introspect writable related fields
if field_info['type'] == 'field' and not field_info['read_only']:
# If the field is a PrimaryKeyRelatedField, we can extract the model from the queryset
if isinstance(field, serializers.PrimaryKeyRelatedField):
model = field.queryset.model
else:
logger.debug("Could not extract model for:", field_info['label'], '->', field)
model = None
if model:
# Mark this field as "related", and point to the URL where we can get the data!
field_info['type'] = 'related field'
field_info['model'] = model._meta.model_name
# Special case for 'user' model
if field_info['model'] == 'user':
field_info['api_url'] = '/api/user/'
else:
field_info['api_url'] = model.get_api_url()
return field_info

View File

@@ -0,0 +1,159 @@
from django.shortcuts import HttpResponseRedirect
from django.urls import reverse_lazy
from django.db import connection
from django.shortcuts import redirect
import logging
import time
import operator
from rest_framework.authtoken.models import Token
logger = logging.getLogger("inventree")
class AuthRequiredMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
assert hasattr(request, 'user')
response = self.get_response(request)
if not request.user.is_authenticated:
"""
Normally, a web-based session would use csrftoken based authentication.
However when running an external application (e.g. the InvenTree app),
we wish to use token-based auth to grab media files.
So, we will allow token-based authentication but ONLY for the /media/ directory.
What problem is this solving?
- The InvenTree mobile app does not use csrf token auth
- Token auth is used by the Django REST framework, but that is under the /api/ endpoint
- Media files (e.g. Part images) are required to be served to the app
- We do not want to make /media/ files accessible without login!
There is PROBABLY a better way of going about this?
a) Allow token-based authentication against a user?
b) Serve /media/ files in a duplicate location e.g. /api/media/ ?
c) Is there a "standard" way of solving this problem?
My [google|stackoverflow]-fu has failed me. So this hack has been created.
"""
authorized = False
# Allow static files to be accessed without auth
# Important for e.g. login page
if request.path_info.startswith('/static/'):
authorized = True
# Unauthorized users can access the login page
elif request.path_info.startswith('/accounts/'):
authorized = True
elif 'Authorization' in request.headers.keys():
auth = request.headers['Authorization'].strip()
if auth.startswith('Token') and len(auth.split()) == 2:
token = auth.split()[1]
# Does the provided token match a valid user?
if Token.objects.filter(key=token).exists():
allowed = ['/api/', '/media/']
# Only allow token-auth for /media/ or /static/ dirs!
if any([request.path_info.startswith(a) for a in allowed]):
authorized = True
# No authorization was found for the request
if not authorized:
# A logout request will redirect the user to the login screen
if request.path_info == reverse_lazy('logout'):
return HttpResponseRedirect(reverse_lazy('login'))
path = request.path_info
# List of URL endpoints we *do not* want to redirect to
urls = [
reverse_lazy('login'),
reverse_lazy('logout'),
reverse_lazy('admin:login'),
reverse_lazy('admin:logout'),
]
if path not in urls and not path.startswith('/api/'):
# Save the 'next' parameter to pass through to the login view
return redirect('%s?next=%s' % (reverse_lazy('login'), request.path))
# Code to be executed for each request/response after
# the view is called.
return response
class QueryCountMiddleware(object):
"""
This middleware will log the number of queries run
and the total time taken for each request (with a
status code of 200). It does not currently support
multi-db setups.
To enable this middleware, set 'log_queries: True' in the local InvenTree config file.
Reference: https://www.dabapps.com/blog/logging-sql-queries-django-13/
Note: 2020-08-15 - This is no longer used, instead we now rely on the django-debug-toolbar addon
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
t_start = time.time()
response = self.get_response(request)
t_stop = time.time()
if response.status_code == 200:
total_time = 0
if len(connection.queries) > 0:
queries = {}
for query in connection.queries:
query_time = query.get('time')
sql = query.get('sql').split('.')[0]
if sql in queries:
queries[sql] += 1
else:
queries[sql] = 1
if query_time is None:
# django-debug-toolbar monkeypatches the connection
# cursor wrapper and adds extra information in each
# item in connection.queries. The query time is stored
# under the key "duration" rather than "time" and is
# in milliseconds, not seconds.
query_time = float(query.get('duration', 0))
total_time += float(query_time)
logger.debug('{n} queries run, {a:.3f}s / {b:.3f}s'.format(
n=len(connection.queries),
a=total_time,
b=(t_stop - t_start)))
for x in sorted(queries.items(), key=operator.itemgetter(1), reverse=True):
print(x[0], ':', x[1])
return response

View File

@@ -0,0 +1,244 @@
"""
Generic models which provide extra functionality over base Django model types.
"""
from __future__ import unicode_literals
import os
from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import gettext_lazy as _
from django.core.exceptions import ValidationError
from django.db.models.signals import pre_delete
from django.dispatch import receiver
from mptt.models import MPTTModel, TreeForeignKey
from mptt.exceptions import InvalidMove
from .validators import validate_tree_name
def rename_attachment(instance, filename):
"""
Function for renaming an attachment file.
The subdirectory for the uploaded file is determined by the implementing class.
Args:
instance: Instance of a PartAttachment object
filename: name of uploaded file
Returns:
path to store file, format: '<subdir>/<id>/filename'
"""
# Construct a path to store a file attachment for a given model type
return os.path.join(instance.getSubdir(), filename)
class InvenTreeAttachment(models.Model):
""" Provides an abstracted class for managing file attachments.
Attributes:
attachment: File
comment: String descriptor for the attachment
user: User associated with file upload
upload_date: Date the file was uploaded
"""
def getSubdir(self):
"""
Return the subdirectory under which attachments should be stored.
Note: Re-implement this for each subclass of InvenTreeAttachment
"""
return "attachments"
def __str__(self):
return os.path.basename(self.attachment.name)
attachment = models.FileField(upload_to=rename_attachment, verbose_name=_('Attachment'),
help_text=_('Select file to attach'))
comment = models.CharField(blank=True, max_length=100, verbose_name=_('Comment'), help_text=_('File comment'))
user = models.ForeignKey(
User,
on_delete=models.SET_NULL,
blank=True, null=True,
verbose_name=_('User'),
help_text=_('User'),
)
upload_date = models.DateField(auto_now_add=True, null=True, blank=True, verbose_name=_('upload date'))
@property
def basename(self):
return os.path.basename(self.attachment.name)
class Meta:
abstract = True
class InvenTreeTree(MPTTModel):
""" Provides an abstracted self-referencing tree model for data categories.
- Each Category has one parent Category, which can be blank (for a top-level Category).
- Each Category can have zero-or-more child Categor(y/ies)
Attributes:
name: brief name
description: longer form description
parent: The item immediately above this one. An item with a null parent is a top-level item
"""
def api_instance_filters(self):
"""
Instance filters for InvenTreeTree models
"""
return {
'parent': {
'exclude_tree': self.pk,
}
}
def save(self, *args, **kwargs):
try:
super().save(*args, **kwargs)
except InvalidMove:
raise ValidationError({
'parent': _("Invalid choice"),
})
class Meta:
abstract = True
# Names must be unique at any given level in the tree
unique_together = ('name', 'parent')
class MPTTMeta:
order_insertion_by = ['name']
name = models.CharField(
blank=False,
max_length=100,
validators=[validate_tree_name],
verbose_name=_("Name"),
help_text=_("Name"),
)
description = models.CharField(
blank=True,
max_length=250,
verbose_name=_("Description"),
help_text=_("Description (optional)")
)
# When a category is deleted, graft the children onto its parent
parent = TreeForeignKey('self',
on_delete=models.DO_NOTHING,
blank=True,
null=True,
verbose_name=_("parent"),
related_name='children')
@property
def item_count(self):
""" Return the number of items which exist *under* this node in the tree.
Here an 'item' is considered to be the 'leaf' at the end of each branch,
and the exact nature here will depend on the class implementation.
The default implementation returns zero
"""
return 0
def getUniqueParents(self):
""" Return a flat set of all parent items that exist above this node.
If any parents are repeated (which would be very bad!), the process is halted
"""
return self.get_ancestors()
def getUniqueChildren(self, include_self=True):
""" Return a flat set of all child items that exist under this node.
If any child items are repeated, the repetitions are omitted.
"""
return self.get_descendants(include_self=include_self)
@property
def has_children(self):
""" True if there are any children under this item """
return self.getUniqueChildren(include_self=False).count() > 0
def getAcceptableParents(self):
""" Returns a list of acceptable parent items within this model
Acceptable parents are ones which are not underneath this item.
Setting the parent of an item to its own child results in recursion.
"""
contents = ContentType.objects.get_for_model(type(self))
available = contents.get_all_objects_for_this_type()
# List of child IDs
childs = self.getUniqueChildren()
acceptable = [None]
for a in available:
if a.id not in childs:
acceptable.append(a)
return acceptable
@property
def parentpath(self):
""" Get the parent path of this category
Returns:
List of category names from the top level to the parent of this category
"""
return [a for a in self.get_ancestors()]
@property
def path(self):
""" Get the complete part of this category.
e.g. ["Top", "Second", "Third", "This"]
Returns:
List of category names from the top level to this category
"""
return self.parentpath + [self]
@property
def pathstring(self):
""" Get a string representation for the path of this item.
e.g. "Top/Second/Third/This"
"""
return '/'.join([item.name for item in self.path])
def __str__(self):
""" String representation of a category is the full path to that category """
return "{path} - {desc}".format(path=self.pathstring, desc=self.description)
@receiver(pre_delete, sender=InvenTreeTree, dispatch_uid='tree_pre_delete_log')
def before_delete_tree_item(sender, instance, using, **kwargs):
""" Receives pre_delete signal from InvenTreeTree object.
Before an item is deleted, update each child object to point to the parent of the object being deleted.
"""
# Update each tree item below this one
for child in instance.children.all():
child.parent = instance.parent
child.save()

View File

@@ -0,0 +1,72 @@
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import permissions
import users.models
class RolePermission(permissions.BasePermission):
"""
Role mixin for API endpoints, allowing us to specify the user "role"
which is required for certain operations.
Each endpoint can have one or more of the following actions:
- GET
- POST
- PUT
- PATCH
- DELETE
Specify the required "role" using the role_required attribute.
e.g.
role_required = "part"
The RoleMixin class will then determine if the user has the required permission
to perform the specified action.
For example, a DELETE action will be rejected unless the user has the "part.remove" permission
"""
def has_permission(self, request, view):
"""
Determine if the current user has the specified permissions
"""
user = request.user
# Superuser can do it all
if user.is_superuser:
return True
# Map the request method to a permission type
rolemap = {
'GET': 'view',
'OPTIONS': 'view',
'POST': 'add',
'PUT': 'change',
'PATCH': 'change',
'DELETE': 'delete',
}
permission = rolemap[request.method]
try:
# Extract the model name associated with this request
model = view.serializer_class.Meta.model
app_label = model._meta.app_label
model_name = model._meta.model_name
table = f"{app_label}_{model_name}"
except AttributeError:
# We will assume that if the serializer class does *not* have a Meta,
# then we don't need a permission
return True
result = users.models.RuleSet.check_table_permission(user, table, permission)
return result

View File

@@ -0,0 +1,43 @@
# -*- coding: utf-8 -*-
import inspect
import importlib
import pkgutil
def iter_namespace(pkg):
return pkgutil.iter_modules(pkg.__path__, pkg.__name__ + ".")
def get_modules(pkg):
# Return all modules in a given package
return [importlib.import_module(name) for finder, name, ispkg in iter_namespace(pkg)]
def get_classes(module):
# Return all classes in a given module
return inspect.getmembers(module, inspect.isclass)
def get_plugins(pkg, baseclass):
"""
Return a list of all modules under a given package.
- Modules must be a subclass of the provided 'baseclass'
- Modules must have a non-empty PLUGIN_NAME parameter
"""
plugins = []
modules = get_modules(pkg)
# Iterate through each module in the package
for mod in modules:
# Iterate through each class in the module
for item in get_classes(mod):
plugin = item[1]
if issubclass(plugin, baseclass) and plugin.PLUGIN_NAME:
plugins.append(plugin)
return plugins

View File

@@ -0,0 +1,50 @@
import sys
def isInTestMode():
"""
Returns True if the database is in testing mode
"""
if 'test' in sys.argv:
return True
return False
def canAppAccessDatabase(allow_test=False):
"""
Returns True if the apps.py file can access database records.
There are some circumstances where we don't want the ready function in apps.py
to touch the database
"""
# If any of the following management commands are being executed,
# prevent custom "on load" code from running!
excluded_commands = [
'flush',
'loaddata',
'dumpdata',
'makemigrations',
'migrate',
'check',
'shell',
'createsuperuser',
'wait_for_db',
'prerender',
'rebuild',
'collectstatic',
'makemessages',
'compilemessages',
]
if not allow_test:
# Override for testing mode?
excluded_commands.append('test')
for cmd in excluded_commands:
if cmd in sys.argv:
return False
return True

View File

@@ -0,0 +1,241 @@
"""
Serializers used in various InvenTree apps
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from decimal import Decimal
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError as DjangoValidationError
from django.utils.translation import ugettext_lazy as _
from djmoney.contrib.django_rest_framework.fields import MoneyField
from djmoney.money import Money
from djmoney.utils import MONEY_CLASSES, get_currency_field_name
from rest_framework import serializers
from rest_framework.utils import model_meta
from rest_framework.fields import empty
from rest_framework.exceptions import ValidationError
from rest_framework.serializers import DecimalField
class InvenTreeMoneySerializer(MoneyField):
"""
Custom serializer for 'MoneyField',
which ensures that passed values are numerically valid
Ref: https://github.com/django-money/django-money/blob/master/djmoney/contrib/django_rest_framework/fields.py
"""
def get_value(self, data):
"""
Test that the returned amount is a valid Decimal
"""
amount = super(DecimalField, self).get_value(data)
# Convert an empty string to None
if len(str(amount).strip()) == 0:
amount = None
try:
if amount is not None:
amount = Decimal(amount)
except:
raise ValidationError(_("Must be a valid number"))
currency = data.get(get_currency_field_name(self.field_name), self.default_currency)
if currency and amount is not None and not isinstance(amount, MONEY_CLASSES) and amount is not empty:
return Money(amount, currency)
return amount
class UserSerializer(serializers.ModelSerializer):
""" Serializer for User - provides all fields """
class Meta:
model = User
fields = 'all'
class UserSerializerBrief(serializers.ModelSerializer):
""" Serializer for User - provides limited information """
class Meta:
model = User
fields = [
'pk',
'username',
]
class InvenTreeModelSerializer(serializers.ModelSerializer):
"""
Inherits the standard Django ModelSerializer class,
but also ensures that the underlying model class data are checked on validation.
"""
def __init__(self, instance=None, data=empty, **kwargs):
# self.instance = instance
# If instance is None, we are creating a new instance
if instance is None and data is not empty:
# Required to side-step immutability of a QueryDict
data = data.copy()
# Add missing fields which have default values
ModelClass = self.Meta.model
fields = model_meta.get_field_info(ModelClass)
for field_name, field in fields.fields.items():
"""
Update the field IF (and ONLY IF):
- The field has a specified default value
- The field does not already have a value set
"""
if field.has_default() and field_name not in data:
value = field.default
# Account for callable functions
if callable(value):
try:
value = value()
except:
continue
data[field_name] = value
super().__init__(instance, data, **kwargs)
def get_initial(self):
"""
Construct initial data for the serializer.
Use the 'default' values specified by the django model definition
"""
initials = super().get_initial().copy()
# Are we creating a new instance?
if self.instance is None:
ModelClass = self.Meta.model
fields = model_meta.get_field_info(ModelClass)
for field_name, field in fields.fields.items():
if field.has_default() and field_name not in initials:
value = field.default
# Account for callable functions
if callable(value):
try:
value = value()
except:
continue
initials[field_name] = value
return initials
def save(self, **kwargs):
"""
Catch any django ValidationError thrown at the moment save() is called,
and re-throw as a DRF ValidationError
"""
try:
super().save(**kwargs)
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=serializers.as_serializer_error(exc))
return self.instance
def run_validation(self, data=empty):
"""
Perform serializer validation.
In addition to running validators on the serializer fields,
this class ensures that the underlying model is also validated.
"""
# Run any native validation checks first (may raise a ValidationError)
data = super().run_validation(data)
# Now ensure the underlying model is correct
if not hasattr(self, 'instance') or self.instance is None:
# No instance exists (we are creating a new one)
instance = self.Meta.model(**data)
else:
# Instance already exists (we are updating!)
instance = self.instance
# Update instance fields
for attr, value in data.items():
setattr(instance, attr, value)
# Run a 'full_clean' on the model.
# Note that by default, DRF does *not* perform full model validation!
try:
instance.full_clean()
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=serializers.as_serializer_error(exc))
return data
class InvenTreeAttachmentSerializerField(serializers.FileField):
"""
Override the DRF native FileField serializer,
to remove the leading server path.
For example, the FileField might supply something like:
http://127.0.0.1:8000/media/foo/bar.jpg
Whereas we wish to return:
/media/foo/bar.jpg
Why? You can't handle the why!
Actually, if the server process is serving the data at 127.0.0.1,
but a proxy service (e.g. nginx) is then providing DNS lookup to the outside world,
then an attachment which prefixes the "address" of the internal server
will not be accessible from the outside world.
"""
def to_representation(self, value):
if not value:
return None
return os.path.join(str(settings.MEDIA_URL), str(value))
class InvenTreeImageSerializerField(serializers.ImageField):
"""
Custom image serializer.
On upload, validate that the file is a valid image file
"""
def to_representation(self, value):
if not value:
return None
return os.path.join(str(settings.MEDIA_URL), str(value))

View File

@@ -0,0 +1,643 @@
"""
Django settings for InvenTree project.
In practice the settings in this file should not be adjusted,
instead settings can be configured in the config.yaml file
located in the top level project directory.
This allows implementation configuration to be hidden from source control,
as well as separate configuration parameters from the more complex
database setup in this file.
"""
import logging
import os
import random
import string
import shutil
import sys
from datetime import datetime
import moneyed
import yaml
from django.utils.translation import gettext_lazy as _
from django.contrib.messages import constants as messages
def _is_true(x):
# Shortcut function to determine if a value "looks" like a boolean
return str(x).lower() in ['1', 'y', 'yes', 't', 'true']
def get_setting(environment_var, backup_val, default_value=None):
"""
Helper function for retrieving a configuration setting value
- First preference is to look for the environment variable
- Second preference is to look for the value of the settings file
- Third preference is the default value
"""
val = os.getenv(environment_var)
if val is not None:
return val
if backup_val is not None:
return backup_val
return default_value
# Determine if we are running in "test" mode e.g. "manage.py test"
TESTING = 'test' in sys.argv
# New requirement for django 3.2+
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Specify where the "config file" is located.
# By default, this is 'config.yaml'
cfg_filename = os.getenv('INVENTREE_CONFIG_FILE')
if cfg_filename:
cfg_filename = cfg_filename.strip()
cfg_filename = os.path.abspath(cfg_filename)
else:
# Config file is *not* specified - use the default
cfg_filename = os.path.join(BASE_DIR, 'config.yaml')
if not os.path.exists(cfg_filename):
print("InvenTree configuration file 'config.yaml' not found - creating default file")
cfg_template = os.path.join(BASE_DIR, "config_template.yaml")
shutil.copyfile(cfg_template, cfg_filename)
print(f"Created config file {cfg_filename}")
with open(cfg_filename, 'r') as cfg:
CONFIG = yaml.safe_load(cfg)
# Default action is to run the system in Debug mode
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = _is_true(get_setting(
'INVENTREE_DEBUG',
CONFIG.get('debug', True)
))
DOCKER = _is_true(get_setting(
'INVENTREE_DOCKER',
False
))
# Configure logging settings
log_level = get_setting(
'INVENTREE_LOG_LEVEL',
CONFIG.get('log_level', 'WARNING')
)
logging.basicConfig(
level=log_level,
format="%(asctime)s %(levelname)s %(message)s",
)
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
log_level = 'WARNING'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'root': {
'handlers': ['console'],
'level': log_level,
},
}
# Get a logger instance for this setup file
logger = logging.getLogger("inventree")
"""
Specify a secret key to be used by django.
Following options are tested, in descending order of preference:
A) Check for environment variable INVENTREE_SECRET_KEY => Use raw key data
B) Check for environment variable INVENTREE_SECRET_KEY_FILE => Load key data from file
C) Look for default key file "secret_key.txt"
d) Create "secret_key.txt" if it does not exist
"""
if os.getenv("INVENTREE_SECRET_KEY"):
# Secret key passed in directly
SECRET_KEY = os.getenv("INVENTREE_SECRET_KEY").strip()
logger.info("SECRET_KEY loaded by INVENTREE_SECRET_KEY")
else:
# Secret key passed in by file location
key_file = os.getenv("INVENTREE_SECRET_KEY_FILE")
if key_file:
key_file = os.path.abspath(key_file)
else:
# default secret key location
key_file = os.path.join(BASE_DIR, "secret_key.txt")
key_file = os.path.abspath(key_file)
if not os.path.exists(key_file):
logger.info(f"Generating random key file at '{key_file}'")
# Create a random key file
with open(key_file, 'w') as f:
options = string.digits + string.ascii_letters + string.punctuation
key = ''.join([random.choice(options) for i in range(100)])
f.write(key)
logger.info(f"Loading SECRET_KEY from '{key_file}'")
try:
SECRET_KEY = open(key_file, "r").read().strip()
except Exception:
logger.exception(f"Couldn't load keyfile {key_file}")
sys.exit(-1)
# List of allowed hosts (default = allow all)
ALLOWED_HOSTS = CONFIG.get('allowed_hosts', ['*'])
# Cross Origin Resource Sharing (CORS) options
# Only allow CORS access to API
CORS_URLS_REGEX = r'^/api/.*$'
# Extract CORS options from configuration file
cors_opt = CONFIG.get('cors', None)
if cors_opt:
CORS_ORIGIN_ALLOW_ALL = cors_opt.get('allow_all', False)
if not CORS_ORIGIN_ALLOW_ALL:
CORS_ORIGIN_WHITELIST = cors_opt.get('whitelist', [])
# Web URL endpoint for served static files
STATIC_URL = '/static/'
# The filesystem location for served static files
STATIC_ROOT = os.path.abspath(
get_setting(
'INVENTREE_STATIC_ROOT',
CONFIG.get('static_root', '/home/inventree/data/static')
)
)
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'InvenTree', 'static'),
]
# Translated Template settings
STATICFILES_I18_PREFIX = 'i18n'
STATICFILES_I18_SRC = os.path.join(BASE_DIR, 'templates', 'js', 'translated')
STATICFILES_I18_TRG = STATICFILES_DIRS[0] + '_' + STATICFILES_I18_PREFIX
STATICFILES_DIRS.append(STATICFILES_I18_TRG)
STATICFILES_I18_TRG = os.path.join(STATICFILES_I18_TRG, STATICFILES_I18_PREFIX)
STATFILES_I18_PROCESSORS = [
'InvenTree.context.status_codes',
]
# Color Themes Directory
STATIC_COLOR_THEMES_DIR = os.path.join(STATIC_ROOT, 'css', 'color-themes')
# Web URL endpoint for served media files
MEDIA_URL = '/media/'
# The filesystem location for served static files
MEDIA_ROOT = os.path.abspath(
get_setting(
'INVENTREE_MEDIA_ROOT',
CONFIG.get('media_root', '/home/inventree/data/media')
)
)
if DEBUG:
logger.info("InvenTree running in DEBUG mode")
logger.info(f"MEDIA_ROOT: '{MEDIA_ROOT}'")
logger.info(f"STATIC_ROOT: '{STATIC_ROOT}'")
# Application definition
INSTALLED_APPS = [
# Core django modules
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# InvenTree apps
'build.apps.BuildConfig',
'common.apps.CommonConfig',
'company.apps.CompanyConfig',
'label.apps.LabelConfig',
'order.apps.OrderConfig',
'part.apps.PartConfig',
'report.apps.ReportConfig',
'stock.apps.StockConfig',
'users.apps.UsersConfig',
'InvenTree.apps.InvenTreeConfig', # InvenTree app runs last
# Third part add-ons
'django_filters', # Extended filter functionality
'rest_framework', # DRF (Django Rest Framework)
'rest_framework.authtoken', # Token authentication for API
'corsheaders', # Cross-origin Resource Sharing for DRF
'crispy_forms', # Improved form rendering
'import_export', # Import / export tables to file
'django_cleanup.apps.CleanupConfig', # Automatically delete orphaned MEDIA files
'mptt', # Modified Preorder Tree Traversal
'markdownx', # Markdown editing
'markdownify', # Markdown template rendering
'django_admin_shell', # Python shell for the admin interface
'djmoney', # django-money integration
'djmoney.contrib.exchange', # django-money exchange rates
'error_report', # Error reporting in the admin interface
'django_q',
'formtools', # Form wizard tools
]
MIDDLEWARE = CONFIG.get('middleware', [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'InvenTree.middleware.AuthRequiredMiddleware'
])
# Error reporting middleware
MIDDLEWARE.append('error_report.middleware.ExceptionProcessor')
AUTHENTICATION_BACKENDS = CONFIG.get('authentication_backends', [
'django.contrib.auth.backends.ModelBackend'
])
# If the debug toolbar is enabled, add the modules
if DEBUG and CONFIG.get('debug_toolbar', False):
logger.info("Running with DEBUG_TOOLBAR enabled")
INSTALLED_APPS.append('debug_toolbar')
MIDDLEWARE.append('debug_toolbar.middleware.DebugToolbarMiddleware')
ROOT_URLCONF = 'InvenTree.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
# Allow templates in the reporting directory to be accessed
os.path.join(MEDIA_ROOT, 'report'),
os.path.join(MEDIA_ROOT, 'label'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.template.context_processors.i18n',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'InvenTree.context.health_status',
'InvenTree.context.status_codes',
'InvenTree.context.user_roles',
],
},
},
]
REST_FRAMEWORK = {
'EXCEPTION_HANDLER': 'rest_framework.views.exception_handler',
'DATETIME_FORMAT': '%Y-%m-%d %H:%M',
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
'rest_framework.permissions.DjangoModelPermissions',
'InvenTree.permissions.RolePermission',
),
'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema',
'DEFAULT_METADATA_CLASS': 'InvenTree.metadata.InvenTreeMetadata'
}
WSGI_APPLICATION = 'InvenTree.wsgi.application'
background_workers = os.environ.get('INVENTREE_BACKGROUND_WORKERS', None)
if background_workers is not None:
try:
background_workers = int(background_workers)
except ValueError:
background_workers = None
if background_workers is None:
# Sensible default?
background_workers = 4
# django-q configuration
Q_CLUSTER = {
'name': 'InvenTree',
'workers': background_workers,
'timeout': 90,
'retry': 120,
'queue_limit': 50,
'bulk': 10,
'orm': 'default',
'sync': False,
}
# Markdownx configuration
# Ref: https://neutronx.github.io/django-markdownx/customization/
MARKDOWNX_MEDIA_PATH = datetime.now().strftime('markdownx/%Y/%m/%d')
# Markdownify configuration
# Ref: https://django-markdownify.readthedocs.io/en/latest/settings.html
MARKDOWNIFY_WHITELIST_TAGS = [
'a',
'abbr',
'b',
'blockquote',
'em',
'h1', 'h2', 'h3',
'i',
'img',
'li',
'ol',
'p',
'strong',
'ul'
]
MARKDOWNIFY_WHITELIST_ATTRS = [
'href',
'src',
'alt',
]
MARKDOWNIFY_BLEACH = False
DATABASES = {}
"""
Configure the database backend based on the user-specified values.
- Primarily this configuration happens in the config.yaml file
- However there may be reason to configure the DB via environmental variables
- The following code lets the user "mix and match" database configuration
"""
logger.info("Configuring database backend:")
# Extract database configuration from the config.yaml file
db_config = CONFIG.get('database', {})
if not db_config:
db_config = {}
# Environment variables take preference over config file!
db_keys = ['ENGINE', 'NAME', 'USER', 'PASSWORD', 'HOST', 'PORT']
for key in db_keys:
# First, check the environment variables
env_key = f"INVENTREE_DB_{key}"
env_var = os.environ.get(env_key, None)
if env_var:
# Override configuration value
db_config[key] = env_var
# Check that required database configuration options are specified
reqiured_keys = ['ENGINE', 'NAME']
for key in reqiured_keys:
if key not in db_config:
error_msg = f'Missing required database configuration value {key}'
logger.error(error_msg)
print('Error: ' + error_msg)
sys.exit(-1)
"""
Special considerations for the database 'ENGINE' setting.
It can be specified in config.yaml (or envvar) as either (for example):
- sqlite3
- django.db.backends.sqlite3
- django.db.backends.postgresql
"""
db_engine = db_config['ENGINE'].lower()
# Correct common misspelling
if db_engine == 'sqlite':
db_engine = 'sqlite3'
if db_engine in ['sqlite3', 'postgresql', 'mysql']:
# Prepend the required python module string
db_engine = f'django.db.backends.{db_engine}'
db_config['ENGINE'] = db_engine
db_name = db_config['NAME']
db_host = db_config.get('HOST', "''")
print("InvenTree Database Configuration")
print("================================")
print(f"ENGINE: {db_engine}")
print(f"NAME: {db_name}")
print(f"HOST: {db_host}")
DATABASES['default'] = db_config
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Extra (optional) URL validators
# See https://docs.djangoproject.com/en/2.2/ref/validators/#django.core.validators.URLValidator
EXTRA_URL_SCHEMES = CONFIG.get('extra_url_schemes', [])
if not type(EXTRA_URL_SCHEMES) in [list]:
logger.warning("extra_url_schemes not correctly formatted")
EXTRA_URL_SCHEMES = []
# Internationalization
# https://docs.djangoproject.com/en/dev/topics/i18n/
LANGUAGE_CODE = CONFIG.get('language', 'en-us')
# If a new language translation is supported, it must be added here
LANGUAGES = [
('de', _('German')),
('el', _('Greek')),
('en', _('English')),
('es', _('Spanish')),
('fr', _('French')),
('he', _('Hebrew')),
('it', _('Italian')),
('ja', _('Japanese')),
('ko', _('Korean')),
('nl', _('Dutch')),
('no', _('Norwegian')),
('pl', _('Polish')),
('ru', _('Russian')),
('sv', _('Swedish')),
('th', _('Thai')),
('tr', _('Turkish')),
('vi', _('Vietnamese')),
('zh-cn', _('Chinese')),
]
# Currencies available for use
CURRENCIES = CONFIG.get(
'currencies',
[
'AUD', 'CAD', 'EUR', 'GBP', 'JPY', 'NZD', 'USD',
],
)
# Check that each provided currency is supported
for currency in CURRENCIES:
if currency not in moneyed.CURRENCIES:
print(f"Currency code '{currency}' is not supported")
sys.exit(1)
# Custom currency exchange backend
EXCHANGE_BACKEND = 'InvenTree.exchange.InvenTreeExchange'
# Extract email settings from the config file
email_config = CONFIG.get('email', {})
EMAIL_BACKEND = get_setting(
'INVENTREE_EMAIL_BACKEND',
email_config.get('backend', 'django.core.mail.backends.smtp.EmailBackend')
)
# Email backend settings
EMAIL_HOST = get_setting(
'INVENTREE_EMAIL_HOST',
email_config.get('host', '')
)
EMAIL_PORT = get_setting(
'INVENTREE_EMAIL_PORT',
email_config.get('port', 25)
)
EMAIL_HOST_USER = get_setting(
'INVENTREE_EMAIL_USERNAME',
email_config.get('username', ''),
)
EMAIL_HOST_PASSWORD = get_setting(
'INVENTREE_EMAIL_PASSWORD',
email_config.get('password', ''),
)
DEFAULT_FROM_EMAIL = get_setting(
'INVENTREE_EMAIL_SENDER',
email_config.get('sender', ''),
)
EMAIL_SUBJECT_PREFIX = '[InvenTree] '
EMAIL_USE_LOCALTIME = False
EMAIL_USE_TLS = get_setting(
'INVENTREE_EMAIL_TLS',
email_config.get('tls', False),
)
EMAIL_USE_SSL = get_setting(
'INVENTREE_EMAIL_SSL',
email_config.get('ssl', False),
)
EMAIL_TIMEOUT = 60
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'locale/'),
)
TIME_ZONE = get_setting(
'INVENTREE_TIMEZONE',
CONFIG.get('timezone', 'UTC')
)
USE_I18N = True
USE_L10N = True
# Do not use native timezone support in "test" mode
# It generates a *lot* of cruft in the logs
if not TESTING:
USE_TZ = True
DATE_INPUT_FORMATS = [
"%Y-%m-%d",
]
# crispy forms use the bootstrap templates
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# Use database transactions when importing / exporting data
IMPORT_EXPORT_USE_TRANSACTIONS = True
# Internal IP addresses allowed to see the debug toolbar
INTERNAL_IPS = [
'127.0.0.1',
]
MESSAGE_TAGS = {
messages.SUCCESS: 'alert alert-block alert-success',
messages.ERROR: 'alert alert-block alert-danger',
messages.INFO: 'alert alert-block alert-info',
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,170 @@
/**
* @author: Dennis Hernández
* @webSite: http://djhvscf.github.io/Blog
* @update: zhixin wen <wenzhixin2010@gmail.com>
*/
!($ => {
const diacriticsMap = {}
const defaultAccentsDiacritics = [
{base: 'A', letters: '\u0041\u24B6\uFF21\u00C0\u00C1\u00C2\u1EA6\u1EA4\u1EAA\u1EA8\u00C3\u0100\u0102\u1EB0\u1EAE\u1EB4\u1EB2\u0226\u01E0\u00C4\u01DE\u1EA2\u00C5\u01FA\u01CD\u0200\u0202\u1EA0\u1EAC\u1EB6\u1E00\u0104\u023A\u2C6F'},
{base: 'AA',letters: '\uA732'},
{base: 'AE',letters: '\u00C6\u01FC\u01E2'},
{base: 'AO',letters: '\uA734'},
{base: 'AU',letters: '\uA736'},
{base: 'AV',letters: '\uA738\uA73A'},
{base: 'AY',letters: '\uA73C'},
{base: 'B', letters: '\u0042\u24B7\uFF22\u1E02\u1E04\u1E06\u0243\u0182\u0181'},
{base: 'C', letters: '\u0043\u24B8\uFF23\u0106\u0108\u010A\u010C\u00C7\u1E08\u0187\u023B\uA73E'},
{base: 'D', letters: '\u0044\u24B9\uFF24\u1E0A\u010E\u1E0C\u1E10\u1E12\u1E0E\u0110\u018B\u018A\u0189\uA779'},
{base: 'DZ',letters: '\u01F1\u01C4'},
{base: 'Dz',letters: '\u01F2\u01C5'},
{base: 'E', letters: '\u0045\u24BA\uFF25\u00C8\u00C9\u00CA\u1EC0\u1EBE\u1EC4\u1EC2\u1EBC\u0112\u1E14\u1E16\u0114\u0116\u00CB\u1EBA\u011A\u0204\u0206\u1EB8\u1EC6\u0228\u1E1C\u0118\u1E18\u1E1A\u0190\u018E'},
{base: 'F', letters: '\u0046\u24BB\uFF26\u1E1E\u0191\uA77B'},
{base: 'G', letters: '\u0047\u24BC\uFF27\u01F4\u011C\u1E20\u011E\u0120\u01E6\u0122\u01E4\u0193\uA7A0\uA77D\uA77E'},
{base: 'H', letters: '\u0048\u24BD\uFF28\u0124\u1E22\u1E26\u021E\u1E24\u1E28\u1E2A\u0126\u2C67\u2C75\uA78D'},
{base: 'I', letters: '\u0049\u24BE\uFF29\u00CC\u00CD\u00CE\u0128\u012A\u012C\u0130\u00CF\u1E2E\u1EC8\u01CF\u0208\u020A\u1ECA\u012E\u1E2C\u0197'},
{base: 'J', letters: '\u004A\u24BF\uFF2A\u0134\u0248'},
{base: 'K', letters: '\u004B\u24C0\uFF2B\u1E30\u01E8\u1E32\u0136\u1E34\u0198\u2C69\uA740\uA742\uA744\uA7A2'},
{base: 'L', letters: '\u004C\u24C1\uFF2C\u013F\u0139\u013D\u1E36\u1E38\u013B\u1E3C\u1E3A\u0141\u023D\u2C62\u2C60\uA748\uA746\uA780'},
{base: 'LJ',letters: '\u01C7'},
{base: 'Lj',letters: '\u01C8'},
{base: 'M', letters: '\u004D\u24C2\uFF2D\u1E3E\u1E40\u1E42\u2C6E\u019C'},
{base: 'N', letters: '\u004E\u24C3\uFF2E\u01F8\u0143\u00D1\u1E44\u0147\u1E46\u0145\u1E4A\u1E48\u0220\u019D\uA790\uA7A4'},
{base: 'NJ',letters: '\u01CA'},
{base: 'Nj',letters: '\u01CB'},
{base: 'O', letters: '\u004F\u24C4\uFF2F\u00D2\u00D3\u00D4\u1ED2\u1ED0\u1ED6\u1ED4\u00D5\u1E4C\u022C\u1E4E\u014C\u1E50\u1E52\u014E\u022E\u0230\u00D6\u022A\u1ECE\u0150\u01D1\u020C\u020E\u01A0\u1EDC\u1EDA\u1EE0\u1EDE\u1EE2\u1ECC\u1ED8\u01EA\u01EC\u00D8\u01FE\u0186\u019F\uA74A\uA74C'},
{base: 'OI',letters: '\u01A2'},
{base: 'OO',letters: '\uA74E'},
{base: 'OU',letters: '\u0222'},
{base: 'OE',letters: '\u008C\u0152'},
{base: 'oe',letters: '\u009C\u0153'},
{base: 'P', letters: '\u0050\u24C5\uFF30\u1E54\u1E56\u01A4\u2C63\uA750\uA752\uA754'},
{base: 'Q', letters: '\u0051\u24C6\uFF31\uA756\uA758\u024A'},
{base: 'R', letters: '\u0052\u24C7\uFF32\u0154\u1E58\u0158\u0210\u0212\u1E5A\u1E5C\u0156\u1E5E\u024C\u2C64\uA75A\uA7A6\uA782'},
{base: 'S', letters: '\u0053\u24C8\uFF33\u1E9E\u015A\u1E64\u015C\u1E60\u0160\u1E66\u1E62\u1E68\u0218\u015E\u2C7E\uA7A8\uA784'},
{base: 'T', letters: '\u0054\u24C9\uFF34\u1E6A\u0164\u1E6C\u021A\u0162\u1E70\u1E6E\u0166\u01AC\u01AE\u023E\uA786'},
{base: 'TZ',letters: '\uA728'},
{base: 'U', letters: '\u0055\u24CA\uFF35\u00D9\u00DA\u00DB\u0168\u1E78\u016A\u1E7A\u016C\u00DC\u01DB\u01D7\u01D5\u01D9\u1EE6\u016E\u0170\u01D3\u0214\u0216\u01AF\u1EEA\u1EE8\u1EEE\u1EEC\u1EF0\u1EE4\u1E72\u0172\u1E76\u1E74\u0244'},
{base: 'V', letters: '\u0056\u24CB\uFF36\u1E7C\u1E7E\u01B2\uA75E\u0245'},
{base: 'VY',letters: '\uA760'},
{base: 'W', letters: '\u0057\u24CC\uFF37\u1E80\u1E82\u0174\u1E86\u1E84\u1E88\u2C72'},
{base: 'X', letters: '\u0058\u24CD\uFF38\u1E8A\u1E8C'},
{base: 'Y', letters: '\u0059\u24CE\uFF39\u1EF2\u00DD\u0176\u1EF8\u0232\u1E8E\u0178\u1EF6\u1EF4\u01B3\u024E\u1EFE'},
{base: 'Z', letters: '\u005A\u24CF\uFF3A\u0179\u1E90\u017B\u017D\u1E92\u1E94\u01B5\u0224\u2C7F\u2C6B\uA762'},
{base: 'a', letters: '\u0061\u24D0\uFF41\u1E9A\u00E0\u00E1\u00E2\u1EA7\u1EA5\u1EAB\u1EA9\u00E3\u0101\u0103\u1EB1\u1EAF\u1EB5\u1EB3\u0227\u01E1\u00E4\u01DF\u1EA3\u00E5\u01FB\u01CE\u0201\u0203\u1EA1\u1EAD\u1EB7\u1E01\u0105\u2C65\u0250'},
{base: 'aa',letters: '\uA733'},
{base: 'ae',letters: '\u00E6\u01FD\u01E3'},
{base: 'ao',letters: '\uA735'},
{base: 'au',letters: '\uA737'},
{base: 'av',letters: '\uA739\uA73B'},
{base: 'ay',letters: '\uA73D'},
{base: 'b', letters: '\u0062\u24D1\uFF42\u1E03\u1E05\u1E07\u0180\u0183\u0253'},
{base: 'c', letters: '\u0063\u24D2\uFF43\u0107\u0109\u010B\u010D\u00E7\u1E09\u0188\u023C\uA73F\u2184'},
{base: 'd', letters: '\u0064\u24D3\uFF44\u1E0B\u010F\u1E0D\u1E11\u1E13\u1E0F\u0111\u018C\u0256\u0257\uA77A'},
{base: 'dz',letters: '\u01F3\u01C6'},
{base: 'e', letters: '\u0065\u24D4\uFF45\u00E8\u00E9\u00EA\u1EC1\u1EBF\u1EC5\u1EC3\u1EBD\u0113\u1E15\u1E17\u0115\u0117\u00EB\u1EBB\u011B\u0205\u0207\u1EB9\u1EC7\u0229\u1E1D\u0119\u1E19\u1E1B\u0247\u025B\u01DD'},
{base: 'f', letters: '\u0066\u24D5\uFF46\u1E1F\u0192\uA77C'},
{base: 'g', letters: '\u0067\u24D6\uFF47\u01F5\u011D\u1E21\u011F\u0121\u01E7\u0123\u01E5\u0260\uA7A1\u1D79\uA77F'},
{base: 'h', letters: '\u0068\u24D7\uFF48\u0125\u1E23\u1E27\u021F\u1E25\u1E29\u1E2B\u1E96\u0127\u2C68\u2C76\u0265'},
{base: 'hv',letters: '\u0195'},
{base: 'i', letters: '\u0069\u24D8\uFF49\u00EC\u00ED\u00EE\u0129\u012B\u012D\u00EF\u1E2F\u1EC9\u01D0\u0209\u020B\u1ECB\u012F\u1E2D\u0268\u0131'},
{base: 'j', letters: '\u006A\u24D9\uFF4A\u0135\u01F0\u0249'},
{base: 'k', letters: '\u006B\u24DA\uFF4B\u1E31\u01E9\u1E33\u0137\u1E35\u0199\u2C6A\uA741\uA743\uA745\uA7A3'},
{base: 'l', letters: '\u006C\u24DB\uFF4C\u0140\u013A\u013E\u1E37\u1E39\u013C\u1E3D\u1E3B\u017F\u0142\u019A\u026B\u2C61\uA749\uA781\uA747'},
{base: 'lj',letters: '\u01C9'},
{base: 'm', letters: '\u006D\u24DC\uFF4D\u1E3F\u1E41\u1E43\u0271\u026F'},
{base: 'n', letters: '\u006E\u24DD\uFF4E\u01F9\u0144\u00F1\u1E45\u0148\u1E47\u0146\u1E4B\u1E49\u019E\u0272\u0149\uA791\uA7A5'},
{base: 'nj',letters: '\u01CC'},
{base: 'o', letters: '\u006F\u24DE\uFF4F\u00F2\u00F3\u00F4\u1ED3\u1ED1\u1ED7\u1ED5\u00F5\u1E4D\u022D\u1E4F\u014D\u1E51\u1E53\u014F\u022F\u0231\u00F6\u022B\u1ECF\u0151\u01D2\u020D\u020F\u01A1\u1EDD\u1EDB\u1EE1\u1EDF\u1EE3\u1ECD\u1ED9\u01EB\u01ED\u00F8\u01FF\u0254\uA74B\uA74D\u0275'},
{base: 'oi',letters: '\u01A3'},
{base: 'ou',letters: '\u0223'},
{base: 'oo',letters: '\uA74F'},
{base: 'p',letters: '\u0070\u24DF\uFF50\u1E55\u1E57\u01A5\u1D7D\uA751\uA753\uA755'},
{base: 'q',letters: '\u0071\u24E0\uFF51\u024B\uA757\uA759'},
{base: 'r',letters: '\u0072\u24E1\uFF52\u0155\u1E59\u0159\u0211\u0213\u1E5B\u1E5D\u0157\u1E5F\u024D\u027D\uA75B\uA7A7\uA783'},
{base: 's',letters: '\u0073\u24E2\uFF53\u00DF\u015B\u1E65\u015D\u1E61\u0161\u1E67\u1E63\u1E69\u0219\u015F\u023F\uA7A9\uA785\u1E9B'},
{base: 't',letters: '\u0074\u24E3\uFF54\u1E6B\u1E97\u0165\u1E6D\u021B\u0163\u1E71\u1E6F\u0167\u01AD\u0288\u2C66\uA787'},
{base: 'tz',letters: '\uA729'},
{base: 'u',letters: '\u0075\u24E4\uFF55\u00F9\u00FA\u00FB\u0169\u1E79\u016B\u1E7B\u016D\u00FC\u01DC\u01D8\u01D6\u01DA\u1EE7\u016F\u0171\u01D4\u0215\u0217\u01B0\u1EEB\u1EE9\u1EEF\u1EED\u1EF1\u1EE5\u1E73\u0173\u1E77\u1E75\u0289'},
{base: 'v',letters: '\u0076\u24E5\uFF56\u1E7D\u1E7F\u028B\uA75F\u028C'},
{base: 'vy',letters: '\uA761'},
{base: 'w',letters: '\u0077\u24E6\uFF57\u1E81\u1E83\u0175\u1E87\u1E85\u1E98\u1E89\u2C73'},
{base: 'x',letters: '\u0078\u24E7\uFF58\u1E8B\u1E8D'},
{base: 'y',letters: '\u0079\u24E8\uFF59\u1EF3\u00FD\u0177\u1EF9\u0233\u1E8F\u00FF\u1EF7\u1E99\u1EF5\u01B4\u024F\u1EFF'},
{base: 'z',letters: '\u007A\u24E9\uFF5A\u017A\u1E91\u017C\u017E\u1E93\u1E95\u01B6\u0225\u0240\u2C6C\uA763'}
]
const initNeutraliser = () => {
for (const diacritic of defaultAccentsDiacritics) {
const letters = diacritic.letters
for (let i = 0; i < letters.length; i++) {
diacriticsMap[letters[i]] = diacritic.base
}
}
}
/* eslint-disable no-control-regex */
const removeDiacritics = str => str.replace(/[^\u0000-\u007E]/g, a => diacriticsMap[a] || a)
$.extend($.fn.bootstrapTable.defaults, {
searchAccentNeutralise: false
})
$.BootstrapTable = class extends $.BootstrapTable {
init () {
if (this.options.searchAccentNeutralise) {
initNeutraliser()
}
super.init()
}
initSearch () {
if (this.options.sidePagination !== 'server') {
let s = this.searchText && this.searchText.toLowerCase()
const f = $.isEmptyObject(this.filterColumns) ? null : this.filterColumns
// Check filter
this.data = f ? this.options.data.filter((item, i) => {
for (const key in f) {
if (item[key] !== f[key]) {
return false
}
}
return true
}) : this.options.data
this.data = s ? this.options.data.filter((item, i) => {
for (let [key, value] of Object.entries(item)) {
key = $.isNumeric(key) ? parseInt(key, 10) : key
const column = this.columns[this.fieldsColumnsIndex[key]]
const j = this.header.fields.indexOf(key)
if (column && column.searchFormatter) {
value = $.fn.bootstrapTable.utils.calculateObjectValue(column,
this.header.formatters[j], [value, item, i], value)
}
const index = this.header.fields.indexOf(key)
if (index !== -1 && this.header.searchables[index] && typeof value === 'string') {
if (this.options.searchAccentNeutralise) {
value = removeDiacritics(value)
s = removeDiacritics(s)
}
if (this.options.strictSearch) {
if ((`${value}`).toLowerCase() === s) {
return true
}
} else {
if ((`${value}`).toLowerCase().includes(s)) {
return true
}
}
}
}
return false
}) : this.data
}
}
}
})(jQuery)

View File

@@ -0,0 +1,17 @@
{
"name": "Accent Neutralise",
"version": "1.0.0",
"description": "Plugin to neutralise the words.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/accent-neutralise",
"example": "#",
"plugins": [{
"name": "bootstrap-table-accent-neutralise",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/accent-neutralise"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Auto Refresh",
"version": "1.0.0",
"description": "Plugin to automatically refresh the table on an interval.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/auto-refresh",
"example": "#",
"plugins": [{
"name": "bootstrap-table-auto-refresh",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/auto-refresh"
}],
"author": {
"name": "fenichaler",
"image": "https://avatars.githubusercontent.com/u/3437075"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Cookie",
"version": "1.2.1",
"description": "Plugin to use the cookie of the browser.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/cookie",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/cookie.html",
"plugins": [{
"name": "bootstrap-table-cookie",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/cookie"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Copy Rows",
"version": "1.0.0",
"description": "Allows pushing of selected column data to the clipboard.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/copy-rows",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/copy-rows.html",
"plugins": [{
"name": "copy-rows",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/copy-rows"
}],
"author": {
"name": "Homer Glascock",
"image": "https://avatars1.githubusercontent.com/u/5546710"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "DeferURL",
"version": "1.0.0",
"description": "Plugin to defer server side processing.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/defer-url",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/defer-url.html",
"plugins": [{
"name": "bootstrap-table-defer-url",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/defer-url"
}],
"author": {
"name": "rubensa",
"image": "https://avatars1.githubusercontent.com/u/1469340"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Table Editable",
"version": "1.1.0",
"description": "Use the x-editable to in-place editing your table.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/editable",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/editable.html",
"plugins": [{
"name": "x-editable",
"url": "https://github.com/vitalets/x-editable"
}],
"author": {
"name": "wenzhixin",
"image": "https://avatars1.githubusercontent.com/u/2117018"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Table Export",
"version": "1.1.0",
"description": "Export your table data to JSON, XML, CSV, TXT, SQL, Word, Excel, PNG, PDF.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/export",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/export.html",
"plugins": [{
"name": "tableExport.jquery.plugin",
"url": "https://github.com/hhurz/tableExport.jquery.plugin"
}],
"author": {
"name": "wenzhixin",
"image": "https://avatars1.githubusercontent.com/u/2117018"
}
}

View File

@@ -0,0 +1,17 @@
{
"name": "Filter Control",
"version": "2.1.0",
"description": "Plugin to add input/select element on the top of the columns in order to filter the data.",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/filter-control",
"example": "http://issues.wenzhixin.net.cn/bootstrap-table/#extensions/filter-control.html",
"plugins": [{
"name": "bootstrap-table-filter-control",
"url": "https://github.com/wenzhixin/bootstrap-table/tree/master/src/extensions/filter-control"
}],
"author": {
"name": "djhvscf",
"image": "https://avatars1.githubusercontent.com/u/4496763"
}
}

Some files were not shown because too many files have changed in this diff Show More