mirror of
https://github.com/bugsink/bugsink.git
synced 2025-12-21 04:50:07 -06:00
Merge branch 'main' into minidumps
This commit is contained in:
33
CHANGELOG.md
33
CHANGELOG.md
@@ -1,5 +1,38 @@
|
||||
# Changes
|
||||
|
||||
## 2.0.6 (8 November 2025)
|
||||
|
||||
### Security
|
||||
|
||||
Add a mitigation for another DOS attack using adverserial brotli payloads.
|
||||
Similar to, but distinct from, the fix in 2.0.5.
|
||||
|
||||
## 2.0.5 (8 November 2025)
|
||||
|
||||
### Security
|
||||
|
||||
Add a mitigation for certain DOS attacks using adverserial brotli payloads, see #266
|
||||
|
||||
### Backwards incompatible changes
|
||||
|
||||
Fail to start when using non-sqlite for snappea, See #252
|
||||
|
||||
Since this was always recommended against, and probably broken anyway, this is not
|
||||
expected to be backwards incompatible _in practice_, but it is at least in prinicple.
|
||||
|
||||
|
||||
### Other changes
|
||||
|
||||
* Markdown stacktrace: render with all frames, See 9cb89ecf46a7
|
||||
* Add database vendor, version and machine arch to phonehome message, see d8fef759cabc
|
||||
* Fix redirect on single-click actions when hosting at subdomain, Fix #250
|
||||
* 'poor mans's DB lock: lock the right DB; See e55c0eb417e2, and #252 for context
|
||||
* Add more warnings about using non-sqlite for snappea in the conf templates, See #252
|
||||
* `parse_timestamp`: _actually_ parse as UTC when timezone not provided, see 8ad7f9738085
|
||||
* Add debug setting for email-sending, Fix #86
|
||||
* docker-compose-sample.yaml: more clearly email:password, See #261
|
||||
* create snappea database on Docker start rather than image build, See #244
|
||||
|
||||
## 2.0.4 (9 October 2025)
|
||||
|
||||
* `convert_mariadb_uuids` command to fix UUID column problems on MariaDB
|
||||
|
||||
@@ -54,8 +54,6 @@ RUN groupadd --gid 14237 bugsink \
|
||||
|
||||
USER bugsink
|
||||
|
||||
RUN ["bugsink-manage", "migrate", "snappea", "--database=snappea"]
|
||||
|
||||
HEALTHCHECK CMD python -c 'import requests; requests.get("http://localhost:8000/health/ready").raise_for_status()'
|
||||
|
||||
CMD [ "monofy", "bugsink-show-version", "&&", "bugsink-manage", "check", "--deploy", "--fail-level", "WARNING", "&&", "bugsink-manage", "migrate", "&&", "bugsink-manage", "prestart", "&&", "gunicorn", "--config", "bugsink/gunicorn.docker.conf.py", "--bind=0.0.0.0:$PORT", "--access-logfile", "-", "bugsink.wsgi", "|||", "bugsink-runsnappea"]
|
||||
CMD [ "monofy", "bugsink-show-version", "&&", "bugsink-manage", "check", "--deploy", "--fail-level", "WARNING", "&&", "bugsink-manage", "migrate", "snappea", "--database=snappea", "&&", "bugsink-manage", "migrate", "&&", "bugsink-manage", "prestart", "&&", "gunicorn", "--config", "bugsink/gunicorn.docker.conf.py", "--bind=0.0.0.0:$PORT", "--access-logfile", "-", "bugsink.wsgi", "|||", "bugsink-runsnappea"]
|
||||
|
||||
@@ -79,8 +79,6 @@ RUN groupadd --gid 14237 bugsink \
|
||||
|
||||
USER bugsink
|
||||
|
||||
RUN ["bugsink-manage", "migrate", "snappea", "--database=snappea"]
|
||||
|
||||
HEALTHCHECK CMD python -c 'import requests; requests.get("http://localhost:8000/health/ready").raise_for_status()'
|
||||
|
||||
CMD [ "monofy", "bugsink-show-version", "&&", "bugsink-manage", "check", "--deploy", "--fail-level", "WARNING", "&&", "bugsink-manage", "migrate", "&&", "bugsink-manage", "prestart", "&&", "gunicorn", "--config", "gunicorn.docker.conf.py", "--bind=0.0.0.0:$PORT", "--access-logfile", "-", "bugsink.wsgi", "|||", "bugsink-runsnappea"]
|
||||
CMD [ "monofy", "bugsink-show-version", "&&", "bugsink-manage", "check", "--deploy", "--fail-level", "WARNING", "&&", "bugsink-manage", "migrate", "snappea", "--database=snappea", "&&", "bugsink-manage", "migrate", "&&", "bugsink-manage", "prestart", "&&", "gunicorn", "--config", "gunicorn.docker.conf.py", "--bind=0.0.0.0:$PORT", "--access-logfile", "-", "bugsink.wsgi", "|||", "bugsink-runsnappea"]
|
||||
|
||||
2
LICENSE
2
LICENSE
@@ -5,7 +5,7 @@ Portions of this software are licensed as follows:
|
||||
* All content that resides under the "ee/" directory of this repository, if
|
||||
that directory exists, is licensed under the license defined in "ee/LICENSE".
|
||||
|
||||
* All content that residues under the 'sentry' directory is Copyright 2019 Sentry
|
||||
* All content that resides under the 'sentry' directory is Copyright 2019 Sentry
|
||||
(https://sentry.io) and individual contributors. (BSD 3-Clause License)
|
||||
|
||||
* The icons in SVG format, directly included in various html templates, are from
|
||||
|
||||
@@ -66,7 +66,7 @@ class Command(BaseCommand):
|
||||
dsn = os.environ["SENTRY_DSN"]
|
||||
else:
|
||||
raise CommandError(
|
||||
"You must provide a DSN to send data to Sentry. Use --dsn or set SENTRY_DSN environment variable.")
|
||||
"You must provide a DSN. Use --dsn or set SENTRY_DSN environment variable.")
|
||||
else:
|
||||
dsn = options['dsn']
|
||||
|
||||
@@ -134,9 +134,6 @@ class Command(BaseCommand):
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"X-Sentry-Auth": get_header_value(dsn),
|
||||
# as it stands we always send identifier here, even if it's not a filename. Whether that's useful or
|
||||
# annoying is an open question, but no reason to change it for now
|
||||
"X-BugSink-DebugInfo": identifier,
|
||||
}
|
||||
|
||||
if options["x_forwarded_for"]:
|
||||
|
||||
@@ -106,6 +106,7 @@ else:
|
||||
|
||||
|
||||
if os.getenv("EMAIL_HOST"):
|
||||
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend" # default, here for explicitness
|
||||
EMAIL_HOST = os.getenv("EMAIL_HOST")
|
||||
EMAIL_HOST_USER = os.getenv("EMAIL_HOST_USER")
|
||||
EMAIL_HOST_PASSWORD = os.getenv("EMAIL_HOST_PASSWORD")
|
||||
@@ -114,6 +115,10 @@ if os.getenv("EMAIL_HOST"):
|
||||
# True, we use that.
|
||||
EMAIL_USE_SSL = os.getenv("EMAIL_USE_SSL", "False").lower() in ("true", "1", "yes")
|
||||
EMAIL_USE_TLS = os.getenv("EMAIL_USE_TLS", str(not EMAIL_USE_SSL)).lower() in ("true", "1", "yes")
|
||||
|
||||
if os.getenv("EMAIL_LOGGING", "false").lower() in ("true", "1", "yes"):
|
||||
LOGGING['loggers']['bugsink.email']['level'] = "INFO"
|
||||
|
||||
else:
|
||||
# print("WARNING: EMAIL_HOST not set; email will not be sent")
|
||||
EMAIL_BACKEND = "bugsink.email_backends.QuietConsoleEmailBackend"
|
||||
|
||||
@@ -71,6 +71,10 @@ EMAIL_BACKEND = "bugsink.email_backends.QuietConsoleEmailBackend" # instead of
|
||||
# EMAIL_USE_TLS = ...
|
||||
# EMAIL_USE_SSL = ...
|
||||
|
||||
# Uncomment the line below to show all sent emails in the logs
|
||||
# LOGGING['loggers']['bugsink.email']['level'] = "INFO"
|
||||
|
||||
|
||||
SERVER_EMAIL = DEFAULT_FROM_EMAIL = "Bugsink <bugsink@example.org>"
|
||||
|
||||
# constants for "create by" (user/team/project) settings
|
||||
|
||||
@@ -356,6 +356,13 @@ LOGGING['loggers']['bugsink.performance'] = {
|
||||
"propagate": False,
|
||||
}
|
||||
|
||||
# Email logging is hidden below WARNING by default, but this can be changed by setting the level to INFO.
|
||||
LOGGING['loggers']['bugsink.email'] = {
|
||||
"level": "WARNING",
|
||||
"handlers": ["console"],
|
||||
"propagate": False,
|
||||
}
|
||||
|
||||
# Snappea Logging
|
||||
LOGGING["formatters"]["snappea"] = {
|
||||
"format": "{threadName} - {levelname:7} - {message}",
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from django.core.exceptions import BadRequest
|
||||
|
||||
import zlib
|
||||
import io
|
||||
import brotli
|
||||
|
||||
from bugsink.app_settings import get_settings
|
||||
from bugsink.utils import assert_
|
||||
|
||||
|
||||
DEFAULT_CHUNK_SIZE = 8 * 1024
|
||||
@@ -24,6 +25,15 @@ class MaxLengthExceeded(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class BrotliError(ValueError):
|
||||
"""similar to brotli.error, but separate from it, to clarify non-library failure"""
|
||||
|
||||
|
||||
def brotli_assert(condition, message):
|
||||
if not condition:
|
||||
raise BrotliError(message)
|
||||
|
||||
|
||||
def zlib_generator(input_stream, wbits, chunk_size=DEFAULT_CHUNK_SIZE):
|
||||
z = zlib.decompressobj(wbits=wbits)
|
||||
|
||||
@@ -38,43 +48,75 @@ def zlib_generator(input_stream, wbits, chunk_size=DEFAULT_CHUNK_SIZE):
|
||||
|
||||
|
||||
def brotli_generator(input_stream, chunk_size=DEFAULT_CHUNK_SIZE):
|
||||
# implementation notes: in principle chunk_size for input and output could be different, we keep them the same here.
|
||||
# I've also seen that the actual output data may be quite a bit larger than the output_buffer_limit; a detail that
|
||||
# I do not fully understand (but I understand that at least it's not _unboundedly_ larger).
|
||||
|
||||
# The brotli_assertions in the below are designed to guarantee that progress towards termination is made. In short:
|
||||
# when no progress is made on the input_stream, either progress must be made on the output_stream or we must be in
|
||||
# finished state.
|
||||
decompressor = brotli.Decompressor()
|
||||
input_is_finished = False
|
||||
|
||||
while True:
|
||||
compressed_chunk = input_stream.read(chunk_size)
|
||||
if not compressed_chunk:
|
||||
break
|
||||
while not (decompressor.is_finished() and input_is_finished):
|
||||
if decompressor.can_accept_more_data():
|
||||
compressed_chunk = input_stream.read(chunk_size)
|
||||
if compressed_chunk:
|
||||
data = decompressor.process(compressed_chunk, output_buffer_limit=chunk_size)
|
||||
# brotli_assert not needed: we made progress on the `input_stream` in any case (we cannot infinitely be
|
||||
# in this branch because the input_stream is finite).
|
||||
|
||||
yield decompressor.process(compressed_chunk)
|
||||
else:
|
||||
input_is_finished = True
|
||||
data = decompressor.process(b"", output_buffer_limit=chunk_size) # b"": no input available, "drain"
|
||||
brotli_assert(
|
||||
len(data) or decompressor.is_finished(),
|
||||
"Draining done -> decompressor finished; if not, something's off")
|
||||
|
||||
assert_(decompressor.is_finished())
|
||||
else:
|
||||
data = decompressor.process(b"", output_buffer_limit=chunk_size) # b"" compressor cannot accept more input
|
||||
brotli_assert(
|
||||
len(data) > 0,
|
||||
"A brotli processor that cannot accept input _must_ be able to produce output or it would be stuck.")
|
||||
|
||||
if data:
|
||||
yield data
|
||||
|
||||
|
||||
class GeneratorReader:
|
||||
"""Read from a generator (yielding bytes) as from a file-like object. In practice: used by content_encoding_reader,
|
||||
so it's grown to fit that use case (and we may later want to reflect that in the name)."""
|
||||
|
||||
def __init__(self, generator):
|
||||
def __init__(self, generator, bad_request_exceptions=()):
|
||||
self.generator = generator
|
||||
self.unread = b""
|
||||
self.bad_request_exceptions = bad_request_exceptions
|
||||
self.buffer = bytearray()
|
||||
|
||||
def read(self, size=None):
|
||||
try:
|
||||
return self._read(size)
|
||||
except self.bad_request_exceptions as e:
|
||||
raise BadRequest(str(e)) from e
|
||||
|
||||
def _read(self, size=None):
|
||||
if size is None:
|
||||
for chunk in self.generator:
|
||||
self.unread += chunk
|
||||
|
||||
result = self.unread
|
||||
self.unread = b""
|
||||
self.buffer.extend(chunk)
|
||||
result = bytes(self.buffer)
|
||||
self.buffer.clear()
|
||||
return result
|
||||
|
||||
while size > len(self.unread):
|
||||
while len(self.buffer) < size:
|
||||
try:
|
||||
chunk = next(self.generator)
|
||||
if chunk == b"":
|
||||
if not chunk:
|
||||
break
|
||||
self.unread += chunk
|
||||
self.buffer.extend(chunk)
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
self.unread, result = self.unread[size:], self.unread[:size]
|
||||
result = bytes(self.buffer[:size])
|
||||
del self.buffer[:size]
|
||||
return result
|
||||
|
||||
|
||||
@@ -82,13 +124,13 @@ def content_encoding_reader(request):
|
||||
encoding = request.META.get("HTTP_CONTENT_ENCODING", "").lower()
|
||||
|
||||
if encoding == "gzip":
|
||||
return GeneratorReader(zlib_generator(request, WBITS_PARAM_FOR_GZIP))
|
||||
return GeneratorReader(zlib_generator(request, WBITS_PARAM_FOR_GZIP), bad_request_exceptions=(zlib.error,))
|
||||
|
||||
if encoding == "deflate":
|
||||
return GeneratorReader(zlib_generator(request, WBITS_PARAM_FOR_DEFLATE))
|
||||
return GeneratorReader(zlib_generator(request, WBITS_PARAM_FOR_DEFLATE), bad_request_exceptions=(zlib.error,))
|
||||
|
||||
if encoding == "br":
|
||||
return GeneratorReader(brotli_generator(request))
|
||||
return GeneratorReader(brotli_generator(request), bad_request_exceptions=(brotli.error, BrotliError))
|
||||
|
||||
return request
|
||||
|
||||
|
||||
@@ -43,68 +43,43 @@ class StreamsTestCase(RegularTestCase):
|
||||
def test_compress_decompress_gzip(self):
|
||||
with open(__file__, 'rb') as f:
|
||||
myself_times_ten = f.read() * 10
|
||||
|
||||
plain_stream = io.BytesIO(myself_times_ten)
|
||||
|
||||
compressed_stream = io.BytesIO(compress_with_zlib(plain_stream, WBITS_PARAM_FOR_GZIP))
|
||||
|
||||
result = b""
|
||||
reader = GeneratorReader(zlib_generator(compressed_stream, WBITS_PARAM_FOR_GZIP))
|
||||
|
||||
while True:
|
||||
chunk = reader.read(3)
|
||||
result += chunk
|
||||
if chunk == b"":
|
||||
break
|
||||
|
||||
self.assertEqual(myself_times_ten, result)
|
||||
self.assertEqual(myself_times_ten, reader.read())
|
||||
|
||||
def test_compress_decompress_deflate(self):
|
||||
with open(__file__, 'rb') as f:
|
||||
myself_times_ten = f.read() * 10
|
||||
|
||||
plain_stream = io.BytesIO(myself_times_ten)
|
||||
|
||||
compressed_stream = io.BytesIO(compress_with_zlib(plain_stream, WBITS_PARAM_FOR_DEFLATE))
|
||||
|
||||
result = b""
|
||||
reader = GeneratorReader(zlib_generator(compressed_stream, WBITS_PARAM_FOR_DEFLATE))
|
||||
|
||||
while True:
|
||||
chunk = reader.read(3)
|
||||
result += chunk
|
||||
if chunk == b"":
|
||||
break
|
||||
|
||||
self.assertEqual(myself_times_ten, result)
|
||||
self.assertEqual(myself_times_ten, reader.read())
|
||||
|
||||
def test_compress_decompress_brotli(self):
|
||||
with open(__file__, 'rb') as f:
|
||||
myself_times_ten = f.read() * 10
|
||||
|
||||
compressed_stream = io.BytesIO(brotli.compress(myself_times_ten))
|
||||
|
||||
result = b""
|
||||
reader = GeneratorReader(brotli_generator(compressed_stream))
|
||||
|
||||
while True:
|
||||
chunk = reader.read(3)
|
||||
result += chunk
|
||||
if chunk == b"":
|
||||
break
|
||||
self.assertEqual(myself_times_ten, reader.read())
|
||||
|
||||
self.assertEqual(myself_times_ten, result)
|
||||
def test_decompress_brotli_tiny_bomb(self):
|
||||
# by picking something "sufficiently large" we can ensure all three code paths in brotli_generator are taken,
|
||||
# in particular the "cannot accept more input" path. (for it to be taken, we need a "big thing" on the output
|
||||
# side)
|
||||
compressed_stream = io.BytesIO(brotli.compress(b"\x00" * 15_000_000))
|
||||
|
||||
def test_compress_decompress_read_none(self):
|
||||
with open(__file__, 'rb') as f:
|
||||
myself_times_ten = f.read() * 10
|
||||
plain_stream = io.BytesIO(myself_times_ten)
|
||||
|
||||
compressed_stream = io.BytesIO(compress_with_zlib(plain_stream, WBITS_PARAM_FOR_DEFLATE))
|
||||
|
||||
result = b""
|
||||
reader = GeneratorReader(zlib_generator(compressed_stream, WBITS_PARAM_FOR_DEFLATE))
|
||||
|
||||
result = reader.read(None)
|
||||
self.assertEqual(myself_times_ten, result)
|
||||
size = 0
|
||||
generator = brotli_generator(compressed_stream)
|
||||
for chunk in generator:
|
||||
size += len(chunk)
|
||||
self.assertEqual(15_000_000, size)
|
||||
|
||||
def test_max_data_reader(self):
|
||||
stream = io.BytesIO(b"hello" * 100)
|
||||
@@ -143,6 +118,37 @@ class StreamsTestCase(RegularTestCase):
|
||||
with self.assertRaises(ValueError):
|
||||
writer.write(b"hellohello")
|
||||
|
||||
def test_generator_reader(self):
|
||||
|
||||
def generator():
|
||||
yield b"hello "
|
||||
yield b"I am "
|
||||
yield b"a generator"
|
||||
|
||||
reader = GeneratorReader(generator())
|
||||
|
||||
self.assertEqual(b"hel", reader.read(3))
|
||||
self.assertEqual(b"lo ", reader.read(3))
|
||||
self.assertEqual(b"I a", reader.read(3))
|
||||
self.assertEqual(b"m a", reader.read(3))
|
||||
self.assertEqual(b" generator", reader.read(None))
|
||||
|
||||
def test_generator_reader_performance(self):
|
||||
# at least one test directly for GeneratorReader; doubles as a regression test for performance issue that showed
|
||||
# up when the underlying generator yielded relatively big chunks and the read() size was small. should run
|
||||
# easily under a second.
|
||||
|
||||
def yielding_big_chunks():
|
||||
yield b"x" * 500_000
|
||||
|
||||
read = []
|
||||
reader = GeneratorReader(yielding_big_chunks())
|
||||
while True:
|
||||
chunk = reader.read(1)
|
||||
if chunk == b"":
|
||||
break
|
||||
read.append(chunk)
|
||||
|
||||
|
||||
@override_settings(DEBUG_CSRF=True)
|
||||
class CSRFViewsTestCase(DjangoTestCase):
|
||||
|
||||
@@ -30,6 +30,8 @@ def send_rendered_email(subject, base_template_name, recipient_list, context=Non
|
||||
)
|
||||
return
|
||||
|
||||
logger.info("Sending email with subject '%s' to %s", subject, recipient_list)
|
||||
|
||||
if context is None:
|
||||
context = {}
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ services:
|
||||
- "8000:8000"
|
||||
environment:
|
||||
SECRET_KEY: django-insecure-RMLYThim9NybWgXiUGat32Aa0Qbgqscf4NPDQuZO2glcZPOiXn # Change this (and remove django-insecure prefix), e.g. openssl rand -base64 50
|
||||
CREATE_SUPERUSER: admin:admin # Change this (or remove it and execute 'createsuperuser' against the running container)
|
||||
CREATE_SUPERUSER: email:password # Change this (or remove it and execute 'createsuperuser' against the running container)
|
||||
PORT: 8000
|
||||
DATABASE_URL: postgresql://bugsinkuser:your_super_secret_password@db:5432/bugsink # Change password to match POSTGRES_PASSWORD above
|
||||
BEHIND_HTTPS_PROXY: "false" # Change this for setups behind a proxy w/ ssl enabled
|
||||
|
||||
@@ -32,7 +32,7 @@ class EventAdmin(admin.ModelAdmin):
|
||||
|
||||
ordering = ['-timestamp']
|
||||
|
||||
search_fields = ['event_id', 'debug_info']
|
||||
search_fields = ['event_id']
|
||||
|
||||
list_display = [
|
||||
'timestamp',
|
||||
@@ -41,7 +41,6 @@ class EventAdmin(admin.ModelAdmin):
|
||||
'level',
|
||||
'sdk_name',
|
||||
'sdk_version',
|
||||
'debug_info',
|
||||
'on_site',
|
||||
]
|
||||
|
||||
@@ -73,7 +72,6 @@ class EventAdmin(admin.ModelAdmin):
|
||||
'environment',
|
||||
'sdk_name',
|
||||
'sdk_version',
|
||||
'debug_info',
|
||||
'pretty_data',
|
||||
]
|
||||
|
||||
|
||||
17
events/migrations/0024_remove_event_debug_info.py
Normal file
17
events/migrations/0024_remove_event_debug_info.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 5.2 on 2025-11-09 19:56
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("events", "0023_event_remote_addr"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="event",
|
||||
name="debug_info",
|
||||
),
|
||||
]
|
||||
@@ -117,9 +117,6 @@ class Event(models.Model):
|
||||
sdk_name = models.CharField(max_length=255, blank=True, null=False, default="")
|
||||
sdk_version = models.CharField(max_length=255, blank=True, null=False, default="")
|
||||
|
||||
# this is a temporary(?), bugsink-specific value;
|
||||
debug_info = models.CharField(max_length=255, blank=True, null=False, default="")
|
||||
|
||||
# denormalized/cached fields:
|
||||
calculated_type = models.CharField(max_length=128, blank=True, null=False, default="")
|
||||
calculated_value = models.TextField(max_length=1024, blank=True, null=False, default="")
|
||||
@@ -239,8 +236,6 @@ class Event(models.Model):
|
||||
sdk_name=maybe_empty(parsed_data.get("", {}).get("name", ""))[:255],
|
||||
sdk_version=maybe_empty(parsed_data.get("", {}).get("version", ""))[:255],
|
||||
|
||||
debug_info=event_metadata["debug_info"][:255],
|
||||
|
||||
# just getting from the dict would be more precise, since we always add this info, but doing the .get()
|
||||
# allows for backwards compatability (digesting events for which the info was not added on-ingest) so
|
||||
# we'll take the defensive approach "for now" (until most everyone is on >= 1.7.4)
|
||||
|
||||
@@ -10,7 +10,7 @@ logger = logging.getLogger("bugsink.ingest")
|
||||
|
||||
class StoreEnvelope:
|
||||
def __init__(self, ingested_at, project_pk, request):
|
||||
self._read = b""
|
||||
self._read = bytearray()
|
||||
|
||||
self._ingested_at = ingested_at
|
||||
self._project_pk = project_pk
|
||||
@@ -20,7 +20,7 @@ class StoreEnvelope:
|
||||
def read(self, size):
|
||||
result = self.request.read(size)
|
||||
if result:
|
||||
self._read += result
|
||||
self._read.extend(result)
|
||||
return result
|
||||
|
||||
def __getattr__(self, attr):
|
||||
@@ -33,7 +33,7 @@ class StoreEnvelope:
|
||||
@immediate_atomic()
|
||||
def store(self):
|
||||
# read the rest of the request; the regular .ingest() method breaks early by design
|
||||
self._read += self.request.read()
|
||||
self._read.extend(self.request.read())
|
||||
|
||||
if Envelope.objects.count() >= get_settings().KEEP_ENVELOPES: # >= b/c about to add
|
||||
# -1 because 0-indexed; we delete including the boundary, so we'll have space for the new one
|
||||
@@ -43,7 +43,7 @@ class StoreEnvelope:
|
||||
envelope = Envelope.objects.create(
|
||||
ingested_at=self._ingested_at,
|
||||
project_pk=self._project_pk,
|
||||
data=self._read,
|
||||
data=bytes(self._read),
|
||||
)
|
||||
|
||||
# arguably "debug", but if you turned StoreEnvelope on, you probably want to use its results "soon", and I'd
|
||||
|
||||
@@ -185,7 +185,8 @@ class StreamingEnvelopeParser:
|
||||
should_be_empty = io.BytesIO()
|
||||
self.remainder, self.at_eof = readuntil(
|
||||
self.input_stream, self.remainder, NewlineFinder(), should_be_empty, self.chunk_size)
|
||||
if should_be_empty.getvalue() != b"":
|
||||
should_be_empty_value = should_be_empty.getvalue()
|
||||
if should_be_empty_value != b"":
|
||||
raise ParseError("Item with explicit length not terminated by newline/EOF")
|
||||
finally:
|
||||
item_output_stream.close()
|
||||
|
||||
@@ -51,7 +51,6 @@ def _digest_params(event_data, project, request, now=None):
|
||||
"event_id": event_data["event_id"],
|
||||
"project_id": project.id,
|
||||
"ingested_at": format_timestamp(now),
|
||||
"debug_info": "",
|
||||
},
|
||||
"event_data": event_data,
|
||||
"digested_at": now,
|
||||
@@ -321,7 +320,6 @@ class IngestViewTestCase(TransactionTestCase):
|
||||
content_type="application/json",
|
||||
headers={
|
||||
"X-Sentry-Auth": sentry_auth_header,
|
||||
"X-BugSink-DebugInfo": filename,
|
||||
},
|
||||
data=data_bytes,
|
||||
)
|
||||
|
||||
@@ -181,10 +181,6 @@ class BaseIngestAPIView(View):
|
||||
|
||||
@classmethod
|
||||
def get_event_meta(cls, event_id, ingested_at, request, project):
|
||||
# Meta means: not part of the event data. Basically: information that is available at the time of ingestion, and
|
||||
# that must be passed to digest() in a serializable form.
|
||||
debug_info = request.META.get("HTTP_X_BUGSINK_DEBUGINFO", "")
|
||||
|
||||
# .get(..) -- don't want to crash on this and it's non-trivial to find a source that tells me with certainty
|
||||
# that the REMOTE_ADDR is always in request.META (it probably is in practice)
|
||||
remote_addr = request.META.get("REMOTE_ADDR")
|
||||
@@ -193,7 +189,6 @@ class BaseIngestAPIView(View):
|
||||
"event_id": event_id,
|
||||
"project_id": project.id,
|
||||
"ingested_at": format_timestamp(ingested_at),
|
||||
"debug_info": debug_info,
|
||||
"remote_addr": remote_addr,
|
||||
}
|
||||
|
||||
|
||||
@@ -521,7 +521,6 @@ class IntegrationTest(TransactionTestCase):
|
||||
content_type="application/json",
|
||||
headers={
|
||||
"X-Sentry-Auth": sentry_auth_header,
|
||||
"X-BugSink-DebugInfo": filename,
|
||||
},
|
||||
)
|
||||
self.assertEqual(
|
||||
@@ -554,7 +553,7 @@ class IntegrationTest(TransactionTestCase):
|
||||
|
||||
except Exception as e:
|
||||
# we want to know _which_ event failed, hence the raise-from-e here
|
||||
raise AssertionError("Error rendering event %s" % event.debug_info) from e
|
||||
raise AssertionError("Error rendering event") from e
|
||||
|
||||
def test_render_stacktrace_md(self):
|
||||
user = User.objects.create_user(username='test', password='test')
|
||||
@@ -588,7 +587,6 @@ class IntegrationTest(TransactionTestCase):
|
||||
content_type="application/json",
|
||||
headers={
|
||||
"X-Sentry-Auth": sentry_auth_header,
|
||||
"X-BugSink-DebugInfo": filename,
|
||||
},
|
||||
)
|
||||
self.assertEqual(
|
||||
|
||||
@@ -7,7 +7,7 @@ semver==3.0.*
|
||||
django-admin-autocomplete-filter==0.7.*
|
||||
pygments==2.19.*
|
||||
inotify_simple==2.0.*
|
||||
Brotli==1.1.*
|
||||
Brotli==1.2.*
|
||||
python-dateutil==2.9.*
|
||||
whitenoise==6.11.*
|
||||
requests==2.32.*
|
||||
|
||||
Reference in New Issue
Block a user