mirror of
https://github.com/opencloud-eu/opencloud.git
synced 2025-12-30 08:50:49 -06:00
build(deps): bump github.com/kovidgoyal/imaging from 1.6.4 to 1.7.2
Bumps [github.com/kovidgoyal/imaging](https://github.com/kovidgoyal/imaging) from 1.6.4 to 1.7.2. - [Release notes](https://github.com/kovidgoyal/imaging/releases) - [Changelog](https://github.com/kovidgoyal/imaging/blob/master/.goreleaser.yaml) - [Commits](https://github.com/kovidgoyal/imaging/compare/v1.6.4...v1.7.2) --- updated-dependencies: - dependency-name: github.com/kovidgoyal/imaging dependency-version: 1.7.2 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com>
This commit is contained in:
committed by
Ralf Haferkamp
parent
fb94f34a1f
commit
d76cacd99f
6
go.mod
6
go.mod
@@ -48,7 +48,7 @@ require (
|
||||
github.com/jellydator/ttlcache/v3 v3.4.0
|
||||
github.com/jinzhu/now v1.1.5
|
||||
github.com/justinas/alice v1.2.0
|
||||
github.com/kovidgoyal/imaging v1.6.4
|
||||
github.com/kovidgoyal/imaging v1.7.2
|
||||
github.com/leonelquinteros/gotext v1.7.2
|
||||
github.com/libregraph/idm v0.5.0
|
||||
github.com/libregraph/lico v0.66.0
|
||||
@@ -104,7 +104,7 @@ require (
|
||||
go.opentelemetry.io/otel/trace v1.38.0
|
||||
golang.org/x/crypto v0.43.0
|
||||
golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac
|
||||
golang.org/x/image v0.31.0
|
||||
golang.org/x/image v0.32.0
|
||||
golang.org/x/net v0.46.0
|
||||
golang.org/x/oauth2 v0.32.0
|
||||
golang.org/x/sync v0.17.0
|
||||
@@ -257,6 +257,7 @@ require (
|
||||
github.com/kevinburke/ssh_config v1.2.0 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/kovidgoyal/go-parallel v1.0.1 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/lestrrat-go/blackmagic v1.0.4 // indirect
|
||||
github.com/lestrrat-go/dsig v1.0.0 // indirect
|
||||
@@ -327,6 +328,7 @@ require (
|
||||
github.com/rs/xid v1.6.0 // indirect
|
||||
github.com/russellhaering/goxmldsig v1.5.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd // indirect
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/segmentio/kafka-go v0.4.49 // indirect
|
||||
github.com/segmentio/ksuid v1.0.4 // indirect
|
||||
|
||||
12
go.sum
12
go.sum
@@ -729,8 +729,10 @@ github.com/kolo/xmlrpc v0.0.0-20200310150728-e0350524596b/go.mod h1:o03bZfuBwAXH
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kovidgoyal/imaging v1.6.4 h1:K0idhRPXnRrJBKnBYcTfI1HTWSNDeAn7hYDvf9I0dCk=
|
||||
github.com/kovidgoyal/imaging v1.6.4/go.mod h1:bEIgsaZmXlvFfkv/CUxr9rJook6AQkJnpB5EPosRfRY=
|
||||
github.com/kovidgoyal/go-parallel v1.0.1 h1:nYUjN+EdpbmQjTg3N5eTUInuXTB3/1oD2vHdaMfuHoI=
|
||||
github.com/kovidgoyal/go-parallel v1.0.1/go.mod h1:BJNIbe6+hxyFWv7n6oEDPj3PA5qSw5OCtf0hcVxWJiw=
|
||||
github.com/kovidgoyal/imaging v1.7.2 h1:mmT6k6Az3mC6dbqdZ6Q9KQCdZFWTAQ+q97NyGZgJ/2c=
|
||||
github.com/kovidgoyal/imaging v1.7.2/go.mod h1:GdkCORjfZMMGFY0Pb7TDmRhj7PDhxF/QShKukSCj0VU=
|
||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
@@ -1079,6 +1081,8 @@ github.com/russellhaering/goxmldsig v1.5.0/go.mod h1:x98CjQNFJcWfMxeOrMnMKg70lvD
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=
|
||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/sacloud/libsacloud v1.36.2/go.mod h1:P7YAOVmnIn3DKHqCZcUKYUXmSwGBm3yS7IBEjKVSrjg=
|
||||
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.7.0.20210127161313-bd30bebeac4f/go.mod h1:CJJ5VAbozOl0yEw7nHB9+7BXTJbIn6h7W+f6Gau5IP8=
|
||||
@@ -1360,8 +1364,8 @@ golang.org/x/exp v0.0.0-20250210185358-939b2ce775ac/go.mod h1:hH+7mtFmImwwcMvScy
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E=
|
||||
golang.org/x/image v0.31.0 h1:mLChjE2MV6g1S7oqbXC0/UcKijjm5fnJLUYKIYrLESA=
|
||||
golang.org/x/image v0.31.0/go.mod h1:R9ec5Lcp96v9FTF+ajwaH3uGxPH4fKfHHAVbUILxghA=
|
||||
golang.org/x/image v0.32.0 h1:6lZQWq75h7L5IWNk0r+SCpUJ6tUVd3v4ZHnbRKLkUDQ=
|
||||
golang.org/x/image v0.32.0/go.mod h1:/R37rrQmKXtO6tYXAjtDLwQgFLHmhW+V6ayXlxzP2Pc=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
|
||||
1
vendor/github.com/kovidgoyal/go-parallel/.gitignore
generated
vendored
Normal file
1
vendor/github.com/kovidgoyal/go-parallel/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
dist
|
||||
2
vendor/github.com/kovidgoyal/go-parallel/.goreleaser.yaml
generated
vendored
Normal file
2
vendor/github.com/kovidgoyal/go-parallel/.goreleaser.yaml
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
builds:
|
||||
- skip: true
|
||||
28
vendor/github.com/kovidgoyal/go-parallel/LICENSE
generated
vendored
Normal file
28
vendor/github.com/kovidgoyal/go-parallel/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2025, Kovid Goyal
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
5
vendor/github.com/kovidgoyal/go-parallel/README.md
generated
vendored
Normal file
5
vendor/github.com/kovidgoyal/go-parallel/README.md
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# go-parallel
|
||||
|
||||
Utility functions to make running code in parallel easier and safer.
|
||||
Panics in go routines are turned into regular errors, instead of crashing
|
||||
the program.
|
||||
171
vendor/github.com/kovidgoyal/go-parallel/parallel.go
generated
vendored
Normal file
171
vendor/github.com/kovidgoyal/go-parallel/parallel.go
generated
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
package parallel
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"iter"
|
||||
"runtime"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
var _ = fmt.Print
|
||||
|
||||
type PanicError struct {
|
||||
frames []runtime.Frame
|
||||
panic_value any
|
||||
}
|
||||
|
||||
const indent_lead = " "
|
||||
|
||||
func format_frame_line(frame runtime.Frame) string {
|
||||
return fmt.Sprintf("\r\n%s%s%s:%d", indent_lead, frame.Function, frame.File, frame.Line)
|
||||
}
|
||||
|
||||
func (e *PanicError) walk(level int, yield func(string) bool) bool {
|
||||
s := "Panic"
|
||||
cause := fmt.Sprintf("%v", e.panic_value)
|
||||
if _, ok := e.panic_value.(*PanicError); ok {
|
||||
cause = "sub-panic (see below)"
|
||||
}
|
||||
if level > 0 {
|
||||
s = "\r\n--> Sub-panic"
|
||||
}
|
||||
if !yield(fmt.Sprintf("%s caused by: %s\r\nStack trace (most recent call first):", s, cause)) {
|
||||
return false
|
||||
}
|
||||
for _, f := range e.frames {
|
||||
if !yield(format_frame_line(f)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if sp, ok := e.panic_value.(*PanicError); ok {
|
||||
return sp.walk(level+1, yield)
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (e *PanicError) lines() iter.Seq[string] {
|
||||
return func(yield func(string) bool) {
|
||||
e.walk(0, yield)
|
||||
}
|
||||
}
|
||||
|
||||
func (e *PanicError) Error() string {
|
||||
return strings.Join(slices.Collect(e.lines()), "")
|
||||
}
|
||||
|
||||
func (e *PanicError) Unwrap() error {
|
||||
if ans, ok := e.panic_value.(*PanicError); ok {
|
||||
return ans
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Format a stack trace on panic and return it as an error
|
||||
func Format_stacktrace_on_panic(r any, skip_frames int) (err *PanicError) {
|
||||
pcs := make([]uintptr, 512)
|
||||
n := runtime.Callers(2+skip_frames, pcs)
|
||||
var ans []runtime.Frame
|
||||
frames := runtime.CallersFrames(pcs[:n])
|
||||
found_first_frame := false
|
||||
for frame, more := frames.Next(); more; frame, more = frames.Next() {
|
||||
if !found_first_frame {
|
||||
if strings.HasPrefix(frame.Function, "runtime.") {
|
||||
continue
|
||||
}
|
||||
found_first_frame = true
|
||||
}
|
||||
ans = append(ans, frame)
|
||||
}
|
||||
return &PanicError{frames: ans, panic_value: r}
|
||||
}
|
||||
|
||||
// Run the specified function in parallel over chunks from the specified range.
|
||||
// If the function panics, it is turned into a regular error. If multiple function calls panic,
|
||||
// any one of the panics will be returned.
|
||||
func Run_in_parallel_over_range(num_procs int, f func(int, int), start, limit int) (err error) {
|
||||
num_items := limit - start
|
||||
if num_procs <= 0 {
|
||||
num_procs = runtime.GOMAXPROCS(0)
|
||||
}
|
||||
num_procs = max(1, min(num_procs, num_items))
|
||||
if num_procs < 2 {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
err = Format_stacktrace_on_panic(r, 1)
|
||||
}
|
||||
}()
|
||||
f(start, limit)
|
||||
return
|
||||
}
|
||||
chunk_sz := max(1, num_items/num_procs)
|
||||
var wg sync.WaitGroup
|
||||
echan := make(chan error, num_items/chunk_sz+1)
|
||||
for start < limit {
|
||||
end := min(start+chunk_sz, limit)
|
||||
wg.Add(1)
|
||||
go func(start, end int) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
echan <- Format_stacktrace_on_panic(r, 1)
|
||||
}
|
||||
wg.Done()
|
||||
}()
|
||||
f(start, end)
|
||||
}(start, end)
|
||||
start = end
|
||||
}
|
||||
wg.Wait()
|
||||
close(echan)
|
||||
for qerr := range echan {
|
||||
return qerr
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Run the specified function in parallel over chunks from the specified range.
|
||||
// If the function panics, it is turned into a regular error. If the function
|
||||
// returns an error it is returned. If multiple function calls panic or return errors,
|
||||
// any one of them will be returned.
|
||||
func Run_in_parallel_over_range_with_error(num_procs int, f func(int, int) error, start, limit int) (err error) {
|
||||
num_items := limit - start
|
||||
if num_procs <= 0 {
|
||||
num_procs = runtime.GOMAXPROCS(0)
|
||||
}
|
||||
num_procs = max(1, min(num_procs, num_items))
|
||||
if num_procs < 2 {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
err = Format_stacktrace_on_panic(r, 1)
|
||||
}
|
||||
}()
|
||||
err = f(start, limit)
|
||||
return
|
||||
}
|
||||
chunk_sz := max(1, num_items/num_procs)
|
||||
var wg sync.WaitGroup
|
||||
echan := make(chan error, num_items/chunk_sz+1)
|
||||
for start < limit {
|
||||
end := min(start+chunk_sz, limit)
|
||||
wg.Add(1)
|
||||
go func(start, end int) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
echan <- Format_stacktrace_on_panic(r, 1)
|
||||
}
|
||||
wg.Done()
|
||||
}()
|
||||
if cerr := f(start, end); cerr != nil {
|
||||
echan <- cerr
|
||||
}
|
||||
}(start, end)
|
||||
start = end
|
||||
}
|
||||
wg.Wait()
|
||||
close(echan)
|
||||
for qerr := range echan {
|
||||
return qerr
|
||||
}
|
||||
return
|
||||
}
|
||||
31
vendor/github.com/kovidgoyal/go-parallel/publish.py
generated
vendored
Normal file
31
vendor/github.com/kovidgoyal/go-parallel/publish.py
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python
|
||||
# License: GPLv3 Copyright: 2024, Kovid Goyal <kovid at kovidgoyal.net>
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
|
||||
VERSION = '1.0.1'
|
||||
|
||||
|
||||
def run(*args: str):
|
||||
cp = subprocess.run(args)
|
||||
if cp.returncode != 0:
|
||||
raise SystemExit(cp.returncode)
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
ans = input(f'Publish version \033[91m{VERSION}\033[m (y/n): ')
|
||||
except KeyboardInterrupt:
|
||||
ans = 'n'
|
||||
if ans.lower() != 'y':
|
||||
return
|
||||
os.environ['GITHUB_TOKEN'] = open(os.path.join(os.environ['PENV'], 'github-token')).read().strip().partition(':')[2]
|
||||
run('git', 'tag', '-a', 'v' + VERSION, '-m', f'version {VERSION}')
|
||||
run('git', 'push')
|
||||
run('goreleaser', 'release', '--clean')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
4
vendor/github.com/kovidgoyal/imaging/.goreleaser.yaml
generated
vendored
4
vendor/github.com/kovidgoyal/imaging/.goreleaser.yaml
generated
vendored
@@ -19,7 +19,7 @@ builds:
|
||||
- skip: true
|
||||
|
||||
archives:
|
||||
- format: tar.gz
|
||||
- formats: [ 'tar.gz' ]
|
||||
# this name template makes the OS and Arch compatible with the results of `uname`.
|
||||
name_template: >-
|
||||
{{ .ProjectName }}_
|
||||
@@ -31,7 +31,7 @@ archives:
|
||||
# use zip for windows archives
|
||||
format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
formats: [ 'zip' ]
|
||||
|
||||
changelog:
|
||||
disable: true
|
||||
|
||||
57
vendor/github.com/kovidgoyal/imaging/adjust.go
generated
vendored
57
vendor/github.com/kovidgoyal/imaging/adjust.go
generated
vendored
@@ -10,10 +10,10 @@ import (
|
||||
func Grayscale(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
src.Scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+3 : i+3]
|
||||
r := d[0]
|
||||
@@ -27,7 +27,9 @@ func Grayscale(img image.Image) *image.NRGBA {
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, src.h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -35,10 +37,10 @@ func Grayscale(img image.Image) *image.NRGBA {
|
||||
func Invert(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
src.Scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+3 : i+3]
|
||||
d[0] = 255 - d[0]
|
||||
@@ -47,7 +49,9 @@ func Invert(img image.Image) *image.NRGBA {
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, src.h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -58,9 +62,9 @@ func Invert(img image.Image) *image.NRGBA {
|
||||
// The percentage = -100 gives the image with the saturation value zeroed for each pixel (grayscale).
|
||||
//
|
||||
// Examples:
|
||||
// dstImage = imaging.AdjustSaturation(srcImage, 25) // Increase image saturation by 25%.
|
||||
// dstImage = imaging.AdjustSaturation(srcImage, -10) // Decrease image saturation by 10%.
|
||||
//
|
||||
// dstImage = imaging.AdjustSaturation(srcImage, 25) // Increase image saturation by 25%.
|
||||
// dstImage = imaging.AdjustSaturation(srcImage, -10) // Decrease image saturation by 10%.
|
||||
func AdjustSaturation(img image.Image, percentage float64) *image.NRGBA {
|
||||
if percentage == 0 {
|
||||
return Clone(img)
|
||||
@@ -85,9 +89,9 @@ func AdjustSaturation(img image.Image, percentage float64) *image.NRGBA {
|
||||
// The shift = 180 (or -180) corresponds to a 180° degree rotation of the color wheel and thus gives the image with its hue inverted for each pixel.
|
||||
//
|
||||
// Examples:
|
||||
// dstImage = imaging.AdjustHue(srcImage, 90) // Shift Hue by 90°.
|
||||
// dstImage = imaging.AdjustHue(srcImage, -30) // Shift Hue by -30°.
|
||||
//
|
||||
// dstImage = imaging.AdjustHue(srcImage, 90) // Shift Hue by 90°.
|
||||
// dstImage = imaging.AdjustHue(srcImage, -30) // Shift Hue by -30°.
|
||||
func AdjustHue(img image.Image, shift float64) *image.NRGBA {
|
||||
if math.Mod(shift, 360) == 0 {
|
||||
return Clone(img)
|
||||
@@ -116,7 +120,6 @@ func AdjustHue(img image.Image, shift float64) *image.NRGBA {
|
||||
//
|
||||
// dstImage = imaging.AdjustContrast(srcImage, -10) // Decrease image contrast by 10%.
|
||||
// dstImage = imaging.AdjustContrast(srcImage, 20) // Increase image contrast by 20%.
|
||||
//
|
||||
func AdjustContrast(img image.Image, percentage float64) *image.NRGBA {
|
||||
if percentage == 0 {
|
||||
return Clone(img)
|
||||
@@ -148,7 +151,6 @@ func AdjustContrast(img image.Image, percentage float64) *image.NRGBA {
|
||||
//
|
||||
// dstImage = imaging.AdjustBrightness(srcImage, -15) // Decrease image brightness by 15%.
|
||||
// dstImage = imaging.AdjustBrightness(srcImage, 10) // Increase image brightness by 10%.
|
||||
//
|
||||
func AdjustBrightness(img image.Image, percentage float64) *image.NRGBA {
|
||||
if percentage == 0 {
|
||||
return Clone(img)
|
||||
@@ -172,7 +174,6 @@ func AdjustBrightness(img image.Image, percentage float64) *image.NRGBA {
|
||||
// Example:
|
||||
//
|
||||
// dstImage = imaging.AdjustGamma(srcImage, 0.7)
|
||||
//
|
||||
func AdjustGamma(img image.Image, gamma float64) *image.NRGBA {
|
||||
if gamma == 1 {
|
||||
return Clone(img)
|
||||
@@ -198,7 +199,6 @@ func AdjustGamma(img image.Image, gamma float64) *image.NRGBA {
|
||||
//
|
||||
// dstImage = imaging.AdjustSigmoid(srcImage, 0.5, 3.0) // Increase the contrast.
|
||||
// dstImage = imaging.AdjustSigmoid(srcImage, 0.5, -3.0) // Decrease the contrast.
|
||||
//
|
||||
func AdjustSigmoid(img image.Image, midpoint, factor float64) *image.NRGBA {
|
||||
if factor == 0 {
|
||||
return Clone(img)
|
||||
@@ -212,14 +212,14 @@ func AdjustSigmoid(img image.Image, midpoint, factor float64) *image.NRGBA {
|
||||
e := 1.0e-6
|
||||
|
||||
if factor > 0 {
|
||||
for i := 0; i < 256; i++ {
|
||||
for i := range 256 {
|
||||
x := float64(i) / 255.0
|
||||
sigX := sigmoid(a, b, x)
|
||||
f := (sigX - sig0) / (sig1 - sig0)
|
||||
lut[i] = clamp(f * 255.0)
|
||||
}
|
||||
} else {
|
||||
for i := 0; i < 256; i++ {
|
||||
for i := range 256 {
|
||||
x := float64(i) / 255.0
|
||||
arg := math.Min(math.Max((sig1-sig0)*x+sig0, e), 1.0-e)
|
||||
f := a - math.Log(1.0/arg-1.0)/b
|
||||
@@ -239,10 +239,10 @@ func adjustLUT(img image.Image, lut []uint8) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
lut = lut[0:256]
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
src.Scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+3 : i+3]
|
||||
d[0] = lut[d[0]]
|
||||
@@ -251,7 +251,9 @@ func adjustLUT(img image.Image, lut []uint8) *image.NRGBA {
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, src.h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -270,14 +272,13 @@ func adjustLUT(img image.Image, lut []uint8) *image.NRGBA {
|
||||
// return color.NRGBA{uint8(r), c.G, c.B, c.A}
|
||||
// }
|
||||
// )
|
||||
//
|
||||
func AdjustFunc(img image.Image, fn func(c color.NRGBA) color.NRGBA) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
src.Scan(0, y, src.w, y+1, dst.Pix[i:i+src.w*4])
|
||||
for x := 0; x < src.w; x++ {
|
||||
d := dst.Pix[i : i+4 : i+4]
|
||||
r := d[0]
|
||||
@@ -292,6 +293,8 @@ func AdjustFunc(img image.Image, fn func(c color.NRGBA) color.NRGBA) *image.NRGB
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, src.h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
8
vendor/github.com/kovidgoyal/imaging/convolution.go
generated
vendored
8
vendor/github.com/kovidgoyal/imaging/convolution.go
generated
vendored
@@ -70,8 +70,8 @@ func convolve(img image.Image, kernel []float64, options *ConvolveOptions) *imag
|
||||
}
|
||||
}
|
||||
|
||||
parallel(0, h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
for x := 0; x < w; x++ {
|
||||
var r, g, b float64
|
||||
for _, c := range coefs {
|
||||
@@ -123,7 +123,9 @@ func convolve(img image.Image, kernel []float64, options *ConvolveOptions) *imag
|
||||
d[3] = src.Pix[srcOff+3]
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
56
vendor/github.com/kovidgoyal/imaging/effects.go
generated
vendored
56
vendor/github.com/kovidgoyal/imaging/effects.go
generated
vendored
@@ -15,7 +15,6 @@ func gaussianBlurKernel(x, sigma float64) float64 {
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Blur(srcImage, 3.5)
|
||||
//
|
||||
func Blur(img image.Image, sigma float64) *image.NRGBA {
|
||||
if sigma <= 0 {
|
||||
return Clone(img)
|
||||
@@ -36,25 +35,19 @@ func blurHorizontal(img image.Image, kernel []float64) *image.NRGBA {
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
radius := len(kernel) - 1
|
||||
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
scanLineF := make([]float64, len(scanLine))
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
for y := start; y < limit; y++ {
|
||||
src.Scan(0, y, src.w, y+1, scanLine)
|
||||
for i, v := range scanLine {
|
||||
scanLineF[i] = float64(v)
|
||||
}
|
||||
for x := 0; x < src.w; x++ {
|
||||
min := x - radius
|
||||
if min < 0 {
|
||||
min = 0
|
||||
}
|
||||
max := x + radius
|
||||
if max > src.w-1 {
|
||||
max = src.w - 1
|
||||
}
|
||||
minv := max(0, x-radius)
|
||||
maxv := min(x+radius, src.w-1)
|
||||
var r, g, b, a, wsum float64
|
||||
for ix := min; ix <= max; ix++ {
|
||||
for ix := minv; ix <= maxv; ix++ {
|
||||
i := ix * 4
|
||||
weight := kernel[absint(x-ix)]
|
||||
wsum += weight
|
||||
@@ -76,7 +69,9 @@ func blurHorizontal(img image.Image, kernel []float64) *image.NRGBA {
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, src.h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
@@ -86,25 +81,19 @@ func blurVertical(img image.Image, kernel []float64) *image.NRGBA {
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
radius := len(kernel) - 1
|
||||
|
||||
parallel(0, src.w, func(xs <-chan int) {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
scanLine := make([]uint8, src.h*4)
|
||||
scanLineF := make([]float64, len(scanLine))
|
||||
for x := range xs {
|
||||
src.scan(x, 0, x+1, src.h, scanLine)
|
||||
for x := start; x < limit; x++ {
|
||||
src.Scan(x, 0, x+1, src.h, scanLine)
|
||||
for i, v := range scanLine {
|
||||
scanLineF[i] = float64(v)
|
||||
}
|
||||
for y := 0; y < src.h; y++ {
|
||||
min := y - radius
|
||||
if min < 0 {
|
||||
min = 0
|
||||
}
|
||||
max := y + radius
|
||||
if max > src.h-1 {
|
||||
max = src.h - 1
|
||||
}
|
||||
minv := max(0, y-radius)
|
||||
maxv := min(y+radius, src.h-1)
|
||||
var r, g, b, a, wsum float64
|
||||
for iy := min; iy <= max; iy++ {
|
||||
for iy := minv; iy <= maxv; iy++ {
|
||||
i := iy * 4
|
||||
weight := kernel[absint(y-iy)]
|
||||
wsum += weight
|
||||
@@ -126,7 +115,9 @@ func blurVertical(img image.Image, kernel []float64) *image.NRGBA {
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, src.w); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
@@ -137,7 +128,6 @@ func blurVertical(img image.Image, kernel []float64) *image.NRGBA {
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Sharpen(srcImage, 3.5)
|
||||
//
|
||||
func Sharpen(img image.Image, sigma float64) *image.NRGBA {
|
||||
if sigma <= 0 {
|
||||
return Clone(img)
|
||||
@@ -147,10 +137,10 @@ func Sharpen(img image.Image, sigma float64) *image.NRGBA {
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
blurred := Blur(img, sigma)
|
||||
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
for y := start; y < limit; y++ {
|
||||
src.Scan(0, y, src.w, y+1, scanLine)
|
||||
j := y * dst.Stride
|
||||
for i := 0; i < src.w*4; i++ {
|
||||
val := int(scanLine[i])<<1 - int(blurred.Pix[j])
|
||||
@@ -163,7 +153,9 @@ func Sharpen(img image.Image, sigma float64) *image.NRGBA {
|
||||
j++
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, src.h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
14
vendor/github.com/kovidgoyal/imaging/histogram.go
generated
vendored
14
vendor/github.com/kovidgoyal/imaging/histogram.go
generated
vendored
@@ -19,12 +19,12 @@ func Histogram(img image.Image) [256]float64 {
|
||||
return histogram
|
||||
}
|
||||
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
var tmpHistogram [256]float64
|
||||
var tmpTotal float64
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
for y := start; y < limit; y++ {
|
||||
src.Scan(0, y, src.w, y+1, scanLine)
|
||||
i := 0
|
||||
for x := 0; x < src.w; x++ {
|
||||
s := scanLine[i : i+3 : i+3]
|
||||
@@ -38,14 +38,16 @@ func Histogram(img image.Image) [256]float64 {
|
||||
}
|
||||
}
|
||||
mu.Lock()
|
||||
for i := 0; i < 256; i++ {
|
||||
for i := range 256 {
|
||||
histogram[i] += tmpHistogram[i]
|
||||
}
|
||||
total += tmpTotal
|
||||
mu.Unlock()
|
||||
})
|
||||
}, 0, src.h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for i := 0; i < 256; i++ {
|
||||
for i := range 256 {
|
||||
histogram[i] = histogram[i] / total
|
||||
}
|
||||
return histogram
|
||||
|
||||
185
vendor/github.com/kovidgoyal/imaging/io.go
generated
vendored
185
vendor/github.com/kovidgoyal/imaging/io.go
generated
vendored
@@ -1,7 +1,7 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"bytes"
|
||||
"errors"
|
||||
"image"
|
||||
"image/draw"
|
||||
@@ -9,11 +9,14 @@ import (
|
||||
"image/jpeg"
|
||||
"image/png"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/kovidgoyal/imaging/prism/meta/autometa"
|
||||
"github.com/rwcarlsen/goexif/exif"
|
||||
|
||||
"golang.org/x/image/bmp"
|
||||
"golang.org/x/image/tiff"
|
||||
)
|
||||
@@ -35,7 +38,7 @@ type decodeConfig struct {
|
||||
}
|
||||
|
||||
var defaultDecodeConfig = decodeConfig{
|
||||
autoOrientation: false,
|
||||
autoOrientation: true,
|
||||
}
|
||||
|
||||
// DecodeOption sets an optional parameter for the Decode and Open functions.
|
||||
@@ -43,7 +46,7 @@ type DecodeOption func(*decodeConfig)
|
||||
|
||||
// AutoOrientation returns a DecodeOption that sets the auto-orientation mode.
|
||||
// If auto-orientation is enabled, the image will be transformed after decoding
|
||||
// according to the EXIF orientation tag (if present). By default it's disabled.
|
||||
// according to the EXIF orientation tag (if present). By default it's enabled.
|
||||
func AutoOrientation(enabled bool) DecodeOption {
|
||||
return func(c *decodeConfig) {
|
||||
c.autoOrientation = enabled
|
||||
@@ -53,6 +56,7 @@ func AutoOrientation(enabled bool) DecodeOption {
|
||||
// Decode reads an image from r.
|
||||
func Decode(r io.Reader, opts ...DecodeOption) (image.Image, error) {
|
||||
cfg := defaultDecodeConfig
|
||||
|
||||
for _, option := range opts {
|
||||
option(&cfg)
|
||||
}
|
||||
@@ -61,25 +65,27 @@ func Decode(r io.Reader, opts ...DecodeOption) (image.Image, error) {
|
||||
img, _, err := image.Decode(r)
|
||||
return img, err
|
||||
}
|
||||
|
||||
var orient orientation
|
||||
pr, pw := io.Pipe()
|
||||
r = io.TeeReader(r, pw)
|
||||
done := make(chan struct{})
|
||||
go func() {
|
||||
defer close(done)
|
||||
orient = readOrientation(pr)
|
||||
io.Copy(ioutil.Discard, pr)
|
||||
}()
|
||||
md, r, err := autometa.Load(r)
|
||||
var oval orientation = orientationUnspecified
|
||||
if err == nil && md != nil && len(md.ExifData) > 6 {
|
||||
exif_data, err := exif.Decode(bytes.NewReader(md.ExifData))
|
||||
if err == nil {
|
||||
orient, err := exif_data.Get(exif.Orientation)
|
||||
if err == nil && orient != nil {
|
||||
x, err := strconv.ParseUint(orient.String(), 10, 0)
|
||||
if err == nil && x > 0 && x < 9 {
|
||||
oval = orientation(int(x))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
img, _, err := image.Decode(r)
|
||||
pw.Close()
|
||||
<-done
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return fixOrientation(img, orient), nil
|
||||
return fixOrientation(img, oval), nil
|
||||
}
|
||||
|
||||
// Open loads an image from file.
|
||||
@@ -91,7 +97,6 @@ func Decode(r io.Reader, opts ...DecodeOption) (image.Image, error) {
|
||||
//
|
||||
// // Load an image and transform it depending on the EXIF orientation tag (if present).
|
||||
// img, err := imaging.Open("test.jpg", imaging.AutoOrientation(true))
|
||||
//
|
||||
func Open(filename string, opts ...DecodeOption) (image.Image, error) {
|
||||
file, err := fs.Open(filename)
|
||||
if err != nil {
|
||||
@@ -101,6 +106,15 @@ func Open(filename string, opts ...DecodeOption) (image.Image, error) {
|
||||
return Decode(file, opts...)
|
||||
}
|
||||
|
||||
func OpenConfig(filename string) (ans image.Config, format_name string, err error) {
|
||||
file, err := fs.Open(filename)
|
||||
if err != nil {
|
||||
return ans, "", err
|
||||
}
|
||||
defer file.Close()
|
||||
return image.DecodeConfig(file)
|
||||
}
|
||||
|
||||
// Format is an image file format.
|
||||
type Format int
|
||||
|
||||
@@ -111,6 +125,10 @@ const (
|
||||
GIF
|
||||
TIFF
|
||||
BMP
|
||||
PBM
|
||||
PGM
|
||||
PPM
|
||||
PAM
|
||||
)
|
||||
|
||||
var formatExts = map[string]Format{
|
||||
@@ -121,6 +139,10 @@ var formatExts = map[string]Format{
|
||||
"tif": TIFF,
|
||||
"tiff": TIFF,
|
||||
"bmp": BMP,
|
||||
"pbm": PBM,
|
||||
"pgm": PGM,
|
||||
"ppm": PPM,
|
||||
"pam": PAM,
|
||||
}
|
||||
|
||||
var formatNames = map[Format]string{
|
||||
@@ -129,6 +151,9 @@ var formatNames = map[Format]string{
|
||||
GIF: "GIF",
|
||||
TIFF: "TIFF",
|
||||
BMP: "BMP",
|
||||
PBM: "PBM",
|
||||
PGM: "PGM",
|
||||
PAM: "PAM",
|
||||
}
|
||||
|
||||
func (f Format) String() string {
|
||||
@@ -264,7 +289,6 @@ func Encode(w io.Writer, img image.Image, format Format, opts ...EncodeOption) e
|
||||
//
|
||||
// // Save the image as JPEG with optional quality parameter set to 80.
|
||||
// err := imaging.Save(img, "out.jpg", imaging.JPEGQuality(80))
|
||||
//
|
||||
func Save(img image.Image, filename string, opts ...EncodeOption) (err error) {
|
||||
f, err := FormatFromFilename(filename)
|
||||
if err != nil {
|
||||
@@ -298,129 +322,6 @@ const (
|
||||
orientationRotate90 = 8
|
||||
)
|
||||
|
||||
// readOrientation tries to read the orientation EXIF flag from image data in r.
|
||||
// If the EXIF data block is not found or the orientation flag is not found
|
||||
// or any other error occures while reading the data, it returns the
|
||||
// orientationUnspecified (0) value.
|
||||
func readOrientation(r io.Reader) orientation {
|
||||
const (
|
||||
markerSOI = 0xffd8
|
||||
markerAPP1 = 0xffe1
|
||||
exifHeader = 0x45786966
|
||||
byteOrderBE = 0x4d4d
|
||||
byteOrderLE = 0x4949
|
||||
orientationTag = 0x0112
|
||||
)
|
||||
|
||||
// Check if JPEG SOI marker is present.
|
||||
var soi uint16
|
||||
if err := binary.Read(r, binary.BigEndian, &soi); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if soi != markerSOI {
|
||||
return orientationUnspecified // Missing JPEG SOI marker.
|
||||
}
|
||||
|
||||
// Find JPEG APP1 marker.
|
||||
for {
|
||||
var marker, size uint16
|
||||
if err := binary.Read(r, binary.BigEndian, &marker); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if err := binary.Read(r, binary.BigEndian, &size); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if marker>>8 != 0xff {
|
||||
return orientationUnspecified // Invalid JPEG marker.
|
||||
}
|
||||
if marker == markerAPP1 {
|
||||
break
|
||||
}
|
||||
if size < 2 {
|
||||
return orientationUnspecified // Invalid block size.
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, int64(size-2)); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
}
|
||||
|
||||
// Check if EXIF header is present.
|
||||
var header uint32
|
||||
if err := binary.Read(r, binary.BigEndian, &header); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if header != exifHeader {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 2); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Read byte order information.
|
||||
var (
|
||||
byteOrderTag uint16
|
||||
byteOrder binary.ByteOrder
|
||||
)
|
||||
if err := binary.Read(r, binary.BigEndian, &byteOrderTag); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
switch byteOrderTag {
|
||||
case byteOrderBE:
|
||||
byteOrder = binary.BigEndian
|
||||
case byteOrderLE:
|
||||
byteOrder = binary.LittleEndian
|
||||
default:
|
||||
return orientationUnspecified // Invalid byte order flag.
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 2); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Skip the EXIF offset.
|
||||
var offset uint32
|
||||
if err := binary.Read(r, byteOrder, &offset); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if offset < 8 {
|
||||
return orientationUnspecified // Invalid offset value.
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, int64(offset-8)); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Read the number of tags.
|
||||
var numTags uint16
|
||||
if err := binary.Read(r, byteOrder, &numTags); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
|
||||
// Find the orientation tag.
|
||||
for i := 0; i < int(numTags); i++ {
|
||||
var tag uint16
|
||||
if err := binary.Read(r, byteOrder, &tag); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if tag != orientationTag {
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 10); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
continue
|
||||
}
|
||||
if _, err := io.CopyN(ioutil.Discard, r, 6); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
var val uint16
|
||||
if err := binary.Read(r, byteOrder, &val); err != nil {
|
||||
return orientationUnspecified
|
||||
}
|
||||
if val < 1 || val > 8 {
|
||||
return orientationUnspecified // Invalid tag value.
|
||||
}
|
||||
return orientation(val)
|
||||
}
|
||||
return orientationUnspecified // Missing orientation tag.
|
||||
}
|
||||
|
||||
// fixOrientation applies a transform to img corresponding to the given orientation flag.
|
||||
func fixOrientation(img image.Image, o orientation) image.Image {
|
||||
switch o {
|
||||
|
||||
510
vendor/github.com/kovidgoyal/imaging/netpbm.go
generated
vendored
Normal file
510
vendor/github.com/kovidgoyal/imaging/netpbm.go
generated
vendored
Normal file
@@ -0,0 +1,510 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/color"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var _ = fmt.Print
|
||||
|
||||
// skip_comments reads ahead past any comment lines (starting with #) and returns the first non-comment, non-empty line.
|
||||
func skip_comments(br *bufio.Reader) (string, error) {
|
||||
for {
|
||||
line, err := br.ReadString('\n')
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
continue
|
||||
}
|
||||
return line, nil
|
||||
}
|
||||
}
|
||||
|
||||
type data_type int
|
||||
|
||||
const (
|
||||
rgb data_type = iota
|
||||
blackwhite
|
||||
grayscale
|
||||
)
|
||||
|
||||
type header struct {
|
||||
format string
|
||||
width, height, num_channels uint
|
||||
maxval uint32
|
||||
has_alpha bool
|
||||
data_type data_type
|
||||
}
|
||||
|
||||
func (h header) bytes_per_channel() uint {
|
||||
if h.maxval > 255 {
|
||||
return 2
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (h header) num_bytes_per_pixel() uint {
|
||||
return h.num_channels * h.bytes_per_channel()
|
||||
}
|
||||
|
||||
func read_ppm_header(br *bufio.Reader, magic string) (ans header, err error) {
|
||||
ans.format = magic
|
||||
required_num_fields := 3
|
||||
switch magic {
|
||||
case "P1", "P4":
|
||||
ans.data_type = blackwhite
|
||||
ans.num_channels = 1
|
||||
ans.maxval = 1
|
||||
required_num_fields = 2
|
||||
case "P2", "P5":
|
||||
ans.data_type = grayscale
|
||||
ans.num_channels = 1
|
||||
default:
|
||||
ans.data_type = rgb
|
||||
ans.num_channels = 3
|
||||
}
|
||||
var fields []uint
|
||||
for len(fields) < required_num_fields {
|
||||
var line string
|
||||
if line, err = skip_comments(br); err != nil {
|
||||
return
|
||||
}
|
||||
for x := range strings.FieldsSeq(line) {
|
||||
var val uint64
|
||||
if val, err = strconv.ParseUint(x, 10, 0); err != nil {
|
||||
return
|
||||
}
|
||||
fields = append(fields, uint(val))
|
||||
}
|
||||
}
|
||||
ans.width = fields[0]
|
||||
ans.height = fields[1]
|
||||
if required_num_fields > 2 {
|
||||
ans.maxval = uint32(fields[2])
|
||||
}
|
||||
if ans.maxval > 65535 {
|
||||
return ans, fmt.Errorf("header specifies a maximum value %d larger than 65535", ans.maxval)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func read_pam_header(br *bufio.Reader) (ans header, err error) {
|
||||
ans.format = "P7"
|
||||
ans.data_type = rgb
|
||||
ans.num_channels = 3
|
||||
for {
|
||||
line, err := skip_comments(br)
|
||||
if err != nil {
|
||||
return ans, err
|
||||
}
|
||||
if line == "ENDHDR" {
|
||||
break
|
||||
}
|
||||
prefix, payload, found := strings.Cut(line, " ")
|
||||
if !found {
|
||||
return ans, fmt.Errorf("invalid line in header: %#v", line)
|
||||
}
|
||||
switch prefix {
|
||||
case "WIDTH":
|
||||
w, err := strconv.ParseUint(payload, 10, 0)
|
||||
if err != nil {
|
||||
return ans, fmt.Errorf("invalid width %#v in header: %w", payload, err)
|
||||
}
|
||||
ans.width = uint(w)
|
||||
case "HEIGHT":
|
||||
w, err := strconv.ParseUint(payload, 10, 0)
|
||||
if err != nil {
|
||||
return ans, fmt.Errorf("invalid height %#v in header: %w", payload, err)
|
||||
}
|
||||
ans.height = uint(w)
|
||||
case "MAXVAL":
|
||||
w, err := strconv.ParseUint(payload, 10, 0)
|
||||
if err != nil {
|
||||
return ans, fmt.Errorf("invalid maxval %#v in header: %w", payload, err)
|
||||
}
|
||||
ans.maxval = uint32(w)
|
||||
case "DEPTH":
|
||||
w, err := strconv.ParseUint(payload, 10, 0)
|
||||
if err != nil {
|
||||
return ans, fmt.Errorf("invalid depth %#v in header: %w", payload, err)
|
||||
}
|
||||
if w == 0 || w > 4 {
|
||||
return ans, fmt.Errorf("invalid depth %d in header", w)
|
||||
}
|
||||
ans.num_channels = uint(w)
|
||||
case "TUPLTYPE":
|
||||
switch payload {
|
||||
case "BLACKANDWHITE":
|
||||
ans.data_type = blackwhite
|
||||
case "BLACKANDWHITE_ALPHA":
|
||||
ans.has_alpha = true
|
||||
ans.data_type = blackwhite
|
||||
case "GRAYSCALE":
|
||||
ans.data_type = grayscale
|
||||
case "GRAYSCALE_ALPHA":
|
||||
ans.has_alpha = true
|
||||
ans.data_type = grayscale
|
||||
case "RGB":
|
||||
case "RGB_ALPHA":
|
||||
ans.has_alpha = true
|
||||
default:
|
||||
return ans, fmt.Errorf("invalid TUPLTYPE in header: %#v", payload)
|
||||
}
|
||||
}
|
||||
}
|
||||
if ans.width == 0 || ans.height == 0 || ans.maxval == 0 {
|
||||
return ans, fmt.Errorf("header does not specify width, height and maximum value")
|
||||
}
|
||||
ok := true
|
||||
switch ans.data_type {
|
||||
case rgb:
|
||||
ok = (!ans.has_alpha && ans.num_channels == 3) || (ans.has_alpha && ans.num_channels == 4)
|
||||
case blackwhite, grayscale:
|
||||
ok = (!ans.has_alpha && ans.num_channels == 1) || (ans.has_alpha && ans.num_channels == 2)
|
||||
}
|
||||
if !ok {
|
||||
return ans, fmt.Errorf("header specified depth: %d does not match TUPLTYPE", ans.num_channels)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func read_header(br *bufio.Reader) (ans header, err error) {
|
||||
b := []byte{0, 0}
|
||||
if _, err = io.ReadFull(br, b); err != nil {
|
||||
return ans, err
|
||||
}
|
||||
magic := string(b)
|
||||
switch magic {
|
||||
case "P1", "P2", "P3", "P4", "P5", "P6":
|
||||
return read_ppm_header(br, magic)
|
||||
case "P7":
|
||||
return read_pam_header(br)
|
||||
default:
|
||||
err = fmt.Errorf("unsupported netPBM format: %#v", magic)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func ascii_range_over_values(br *bufio.Reader, h header, callback func(uint32, []uint8) []uint8) (ans []uint8, err error) {
|
||||
anssz := h.width * h.height * h.num_bytes_per_pixel()
|
||||
ans = make([]uint8, 0, anssz)
|
||||
for uint(len(ans)) < anssz {
|
||||
token, err := br.ReadString(' ')
|
||||
if err != nil && err != io.EOF {
|
||||
return nil, err
|
||||
}
|
||||
for field := range strings.FieldsSeq(token) {
|
||||
if val, perr := strconv.ParseUint(field, 10, 16); perr == nil {
|
||||
ans = callback(uint32(val), ans)
|
||||
}
|
||||
}
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func decode_rgb_ascii(br *bufio.Reader, h header) (ans []byte, err error) {
|
||||
mult := uint32(255)
|
||||
if h.maxval > 255 {
|
||||
mult = 65535
|
||||
}
|
||||
anssz := h.width * h.height * h.num_bytes_per_pixel()
|
||||
if mult == 255 {
|
||||
ans, err = ascii_range_over_values(br, h, func(val uint32, ans []uint8) []uint8 {
|
||||
ch := (uint32(val) * mult) / h.maxval
|
||||
return append(ans, uint8(ch))
|
||||
})
|
||||
} else {
|
||||
ans, err = ascii_range_over_values(br, h, func(val uint32, ans []uint8) []uint8 {
|
||||
ch := (uint32(val) * mult) / h.maxval
|
||||
ans = append(ans, uint8(ch))
|
||||
if len(ans)%6 == 0 { // alpha is always 255
|
||||
ans = append(ans, 255, 255)
|
||||
}
|
||||
return ans
|
||||
})
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if uint(len(ans)) < anssz {
|
||||
return nil, errors.New("insufficient color data present in PPM file")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func DecodeNetPBMConfig(r io.Reader) (cfg image.Config, err error) {
|
||||
br := bufio.NewReader(r)
|
||||
h, err := read_header(br)
|
||||
if err != nil {
|
||||
return cfg, err
|
||||
}
|
||||
cfg.Width = int(h.width)
|
||||
cfg.Height = int(h.height)
|
||||
cfg.ColorModel = NRGBModel
|
||||
switch h.data_type {
|
||||
case blackwhite, grayscale:
|
||||
if h.has_alpha {
|
||||
if h.maxval > 255 {
|
||||
cfg.ColorModel = color.NRGBA64Model
|
||||
} else {
|
||||
cfg.ColorModel = color.NRGBAModel
|
||||
}
|
||||
} else {
|
||||
if h.maxval > 255 {
|
||||
cfg.ColorModel = color.Gray16Model
|
||||
} else {
|
||||
cfg.ColorModel = color.GrayModel
|
||||
}
|
||||
}
|
||||
default:
|
||||
if h.has_alpha {
|
||||
if h.maxval > 255 {
|
||||
cfg.ColorModel = color.NRGBA64Model
|
||||
} else {
|
||||
cfg.ColorModel = color.NRGBAModel
|
||||
}
|
||||
} else {
|
||||
if h.maxval > 255 {
|
||||
cfg.ColorModel = color.NRGBA64Model
|
||||
} else {
|
||||
cfg.ColorModel = NRGBModel
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func decode_black_white_ascii(br *bufio.Reader, h header) (img image.Image, err error) {
|
||||
r := image.Rect(0, 0, int(h.width), int(h.height))
|
||||
g := &image.Gray{Stride: r.Dx(), Rect: r}
|
||||
g.Pix, err = ascii_range_over_values(br, h, func(val uint32, ans []uint8) []uint8 {
|
||||
var c uint8 = 255 * uint8(1-(val&1))
|
||||
return append(ans, c)
|
||||
})
|
||||
return g, err
|
||||
}
|
||||
|
||||
func decode_grayscale_ascii(br *bufio.Reader, h header) (img image.Image, err error) {
|
||||
r := image.Rect(0, 0, int(h.width), int(h.height))
|
||||
if h.maxval > 255 {
|
||||
g := &image.Gray16{Stride: 2 * r.Dx(), Rect: r}
|
||||
g.Pix, err = ascii_range_over_values(br, h, func(val uint32, ans []uint8) []uint8 {
|
||||
c := uint16(val * 65535 / h.maxval)
|
||||
return append(ans, uint8(c>>8), uint8(c))
|
||||
})
|
||||
return g, err
|
||||
} else {
|
||||
g := &image.Gray{Stride: r.Dx(), Rect: r}
|
||||
g.Pix, err = ascii_range_over_values(br, h, func(val uint32, ans []uint8) []uint8 {
|
||||
c := uint8(val * 255 / h.maxval)
|
||||
return append(ans, c)
|
||||
})
|
||||
return g, err
|
||||
}
|
||||
}
|
||||
|
||||
// Consume whitespace after header (per spec, it's a single whitespace, but can be more)
|
||||
func skip_whitespace_before_pixel_data(br *bufio.Reader, num_of_bytes_needed uint) ([]uint8, error) {
|
||||
for {
|
||||
b, err := br.Peek(1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if b[0] == '\n' || b[0] == '\r' || b[0] == '\t' || b[0] == ' ' {
|
||||
br.ReadByte()
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
ans := make([]byte, num_of_bytes_needed)
|
||||
_, err := io.ReadFull(br, ans)
|
||||
return ans, err
|
||||
}
|
||||
|
||||
func rescale(v uint32, num, den uint32) uint32 {
|
||||
return (v * num) / den
|
||||
}
|
||||
|
||||
func rescale_binary_data(b []uint8, num, den uint32) error {
|
||||
return run_in_parallel_over_range(0, func(start, end int) {
|
||||
for i := start; i < end; i++ {
|
||||
b[i] = uint8(rescale(uint32(b[i]), num, den))
|
||||
}
|
||||
}, 0, len(b))
|
||||
}
|
||||
|
||||
func rescale_binary_data16(b []uint8, num, den uint32) error {
|
||||
if len(b)&1 != 0 {
|
||||
return fmt.Errorf("pixel data is not a multiple of two but uses 16 bits per channel")
|
||||
}
|
||||
return run_in_parallel_over_range(0, func(start, end int) {
|
||||
start *= 2
|
||||
end *= 2
|
||||
for i := start; i < end; i += 2 {
|
||||
v := uint32((uint16(b[i]) << 8) | uint16(b[i+1]))
|
||||
v = rescale(v, num, den)
|
||||
b[i] = uint8(v >> 8)
|
||||
b[i+1] = uint8(v)
|
||||
}
|
||||
}, 0, len(b)/2)
|
||||
}
|
||||
|
||||
func decode_binary_data(br *bufio.Reader, h header) (ans image.Image, err error) {
|
||||
var binary_data []uint8
|
||||
if binary_data, err = skip_whitespace_before_pixel_data(br, h.width*h.height*h.num_bytes_per_pixel()); err != nil {
|
||||
return
|
||||
}
|
||||
if n := h.num_bytes_per_pixel() * h.width * h.height; uint(len(binary_data)) < n {
|
||||
return nil, fmt.Errorf(
|
||||
"insufficient pixel data for image area and num_channels (%d): %f < %d",
|
||||
h.num_channels, float64(len(binary_data))/float64(h.width*h.height), n/(h.width*h.height))
|
||||
}
|
||||
switch {
|
||||
case h.maxval < 255:
|
||||
if err = rescale_binary_data(binary_data, 255, h.maxval); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case 255 < h.maxval && h.maxval < 65535:
|
||||
if err = rescale_binary_data16(binary_data, 65535, h.maxval); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
r := image.Rect(0, 0, int(h.width), int(h.height))
|
||||
switch h.num_channels {
|
||||
case 1:
|
||||
// bw or gray without alpha
|
||||
if h.maxval > 255 {
|
||||
return &image.Gray16{Rect: r, Stride: r.Dx() * 2, Pix: binary_data}, nil
|
||||
}
|
||||
return &image.Gray{Rect: r, Stride: r.Dx(), Pix: binary_data}, nil
|
||||
case 2:
|
||||
// bw or gray with alpha
|
||||
if h.maxval > 255 {
|
||||
g := image.NewNRGBA64(r)
|
||||
b := g.Pix
|
||||
if err = run_in_parallel_over_range(0, func(start, end int) {
|
||||
for i := start; i < end; i++ {
|
||||
src := binary_data[i*4 : i*4+4]
|
||||
dest := b[i*8 : i*8+8]
|
||||
gray1, gray2 := src[0], src[1]
|
||||
dest[0], dest[1], dest[2], dest[3], dest[4], dest[5] = gray1, gray2, gray1, gray2, gray1, gray2
|
||||
dest[6], dest[7] = src[2], src[3]
|
||||
}
|
||||
}, 0, int(h.width*h.height)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
g := image.NewNRGBA(r)
|
||||
b := g.Pix
|
||||
if err = run_in_parallel_over_range(0, func(start, end int) {
|
||||
for i := start; i < end; i++ {
|
||||
src := binary_data[i*2 : i*2+2]
|
||||
dest := b[i*4 : i*4+4]
|
||||
dest[0], dest[1], dest[2], dest[3] = src[0], src[0], src[0], src[1]
|
||||
}
|
||||
}, 0, int(h.width*h.height)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return g, nil
|
||||
case 3:
|
||||
// RGB without alpha
|
||||
if h.maxval > 255 {
|
||||
g := image.NewNRGBA64(r)
|
||||
b := g.Pix
|
||||
if err = run_in_parallel_over_range(0, func(start, end int) {
|
||||
for i := start; i < end; i++ {
|
||||
src := binary_data[i*6 : i*6+6]
|
||||
dest := b[i*8 : i*8+8]
|
||||
copy(dest[:6], src)
|
||||
dest[6], dest[7] = 255, 255
|
||||
}
|
||||
}, 0, int(h.width*h.height)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return g, nil
|
||||
}
|
||||
return NewNRGBWithContiguousRGBPixels(binary_data, 0, 0, r.Dx(), r.Dy())
|
||||
case 4:
|
||||
// RGB with alpha
|
||||
if h.maxval <= 255 {
|
||||
return &image.NRGBA{Rect: r, Stride: r.Dx() * int(h.num_bytes_per_pixel()), Pix: binary_data}, nil
|
||||
}
|
||||
return &image.NRGBA64{Rect: r, Stride: r.Dx() * int(h.num_bytes_per_pixel()), Pix: binary_data}, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported number of channels: %d", h.num_channels)
|
||||
}
|
||||
}
|
||||
|
||||
// Decode decodes a PPM image from r and returns it as an image.Image.
|
||||
// Supports both P3 (ASCII) and P6 (binary) variants.
|
||||
func DecodeNetPBM(r io.Reader) (img image.Image, err error) {
|
||||
br := bufio.NewReader(r)
|
||||
h, err := read_header(br)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var binary_data []uint8
|
||||
switch h.format {
|
||||
case "P1":
|
||||
return decode_black_white_ascii(br, h)
|
||||
case "P2":
|
||||
return decode_grayscale_ascii(br, h)
|
||||
case "P3":
|
||||
vals, err := decode_rgb_ascii(br, h)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if h.maxval <= 255 {
|
||||
return NewNRGBWithContiguousRGBPixels(vals, 0, 0, int(h.width), int(h.height))
|
||||
}
|
||||
return &image.NRGBA64{Pix: vals, Stride: int(h.width) * 8, Rect: image.Rect(0, 0, int(h.width), int(h.height))}, nil
|
||||
case "P4":
|
||||
bytes_per_row := (h.width + 7) / 8
|
||||
if binary_data, err = skip_whitespace_before_pixel_data(br, h.height*bytes_per_row); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ans := image.NewGray(image.Rect(0, 0, int(h.width), int(h.height)))
|
||||
i := 0
|
||||
for range h.height {
|
||||
for x := range h.width {
|
||||
byteIdx := x / 8
|
||||
bitIdx := 7 - uint(x%8)
|
||||
bit := (binary_data[byteIdx] >> bitIdx) & 1
|
||||
ans.Pix[i] = (1 - bit) * 255
|
||||
i++
|
||||
}
|
||||
binary_data = binary_data[bytes_per_row:]
|
||||
}
|
||||
if len(binary_data) > 0 {
|
||||
return nil, fmt.Errorf("insufficient color data in netPBM file, need %d more bytes", len(binary_data))
|
||||
}
|
||||
return ans, nil
|
||||
case "P5", "P6", "P7":
|
||||
return decode_binary_data(br, h)
|
||||
default:
|
||||
return nil, fmt.Errorf("invalid format for PPM: %#v", h.format)
|
||||
}
|
||||
}
|
||||
|
||||
// Register this decoder with Go's image package
|
||||
func init() {
|
||||
image.RegisterFormat("pbm", "P1", DecodeNetPBM, DecodeNetPBMConfig)
|
||||
image.RegisterFormat("pgm", "P2", DecodeNetPBM, DecodeNetPBMConfig)
|
||||
image.RegisterFormat("ppm", "P3", DecodeNetPBM, DecodeNetPBMConfig)
|
||||
image.RegisterFormat("pbm", "P4", DecodeNetPBM, DecodeNetPBMConfig)
|
||||
image.RegisterFormat("pgm", "P5", DecodeNetPBM, DecodeNetPBMConfig)
|
||||
image.RegisterFormat("ppm", "P6", DecodeNetPBM, DecodeNetPBMConfig)
|
||||
image.RegisterFormat("pam", "P7", DecodeNetPBM, DecodeNetPBMConfig)
|
||||
}
|
||||
440
vendor/github.com/kovidgoyal/imaging/nrgb.go
generated
vendored
Normal file
440
vendor/github.com/kovidgoyal/imaging/nrgb.go
generated
vendored
Normal file
@@ -0,0 +1,440 @@
|
||||
package imaging
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
var _ = fmt.Print
|
||||
|
||||
type NRGBColor struct {
|
||||
R, G, B uint8
|
||||
}
|
||||
|
||||
func (c NRGBColor) AsSharp() string {
|
||||
return fmt.Sprintf("#%02X%02X%02X", c.R, c.G, c.B)
|
||||
}
|
||||
|
||||
func (c NRGBColor) String() string {
|
||||
return fmt.Sprintf("NRGBColor{%02X %02X %02X}", c.R, c.G, c.B)
|
||||
}
|
||||
|
||||
func (c NRGBColor) RGBA() (r, g, b, a uint32) {
|
||||
r = uint32(c.R)
|
||||
r |= r << 8
|
||||
g = uint32(c.G)
|
||||
g |= g << 8
|
||||
b = uint32(c.B)
|
||||
b |= b << 8
|
||||
a = 65535 // (255 << 8 | 255)
|
||||
return
|
||||
}
|
||||
|
||||
// NRGB is an in-memory image whose At method returns NRGBColor values.
|
||||
type NRGB struct {
|
||||
// Pix holds the image's pixels, in R, G, B order. The pixel at
|
||||
// (x, y) starts at Pix[(y-Rect.Min.Y)*Stride + (x-Rect.Min.X)*3].
|
||||
Pix []uint8
|
||||
// Stride is the Pix stride (in bytes) between vertically adjacent pixels.
|
||||
Stride int
|
||||
// Rect is the image's bounds.
|
||||
Rect image.Rectangle
|
||||
}
|
||||
|
||||
func nrgbModel(c color.Color) color.Color {
|
||||
if _, ok := c.(NRGBColor); ok {
|
||||
return c
|
||||
}
|
||||
r, g, b, a := c.RGBA()
|
||||
switch a {
|
||||
case 0xffff:
|
||||
return NRGBColor{uint8(r >> 8), uint8(g >> 8), uint8(b >> 8)}
|
||||
case 0:
|
||||
return NRGBColor{0, 0, 0}
|
||||
default:
|
||||
// Since Color.RGBA returns an alpha-premultiplied color, we should have r <= a && g <= a && b <= a.
|
||||
r = (r * 0xffff) / a
|
||||
g = (g * 0xffff) / a
|
||||
b = (b * 0xffff) / a
|
||||
return NRGBColor{uint8(r >> 8), uint8(g >> 8), uint8(b >> 8)}
|
||||
}
|
||||
}
|
||||
|
||||
var NRGBModel color.Model = color.ModelFunc(nrgbModel)
|
||||
|
||||
func (p *NRGB) ColorModel() color.Model { return NRGBModel }
|
||||
|
||||
func (p *NRGB) Bounds() image.Rectangle { return p.Rect }
|
||||
|
||||
func (p *NRGB) At(x, y int) color.Color {
|
||||
return p.NRGBAt(x, y)
|
||||
}
|
||||
|
||||
func (p *NRGB) NRGBAt(x, y int) NRGBColor {
|
||||
if !(image.Point{x, y}.In(p.Rect)) {
|
||||
return NRGBColor{}
|
||||
}
|
||||
i := p.PixOffset(x, y)
|
||||
s := p.Pix[i : i+3 : i+3] // Small cap improves performance, see https://golang.org/issue/27857
|
||||
return NRGBColor{s[0], s[1], s[2]}
|
||||
}
|
||||
|
||||
// PixOffset returns the index of the first element of Pix that corresponds to
|
||||
// the pixel at (x, y).
|
||||
func (p *NRGB) PixOffset(x, y int) int {
|
||||
return (y-p.Rect.Min.Y)*p.Stride + (x-p.Rect.Min.X)*3
|
||||
}
|
||||
|
||||
func (p *NRGB) Set(x, y int, c color.Color) {
|
||||
if !(image.Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := p.PixOffset(x, y)
|
||||
c1 := NRGBModel.Convert(c).(NRGBColor)
|
||||
s := p.Pix[i : i+3 : i+3] // Small cap improves performance, see https://golang.org/issue/27857
|
||||
s[0] = c1.R
|
||||
s[1] = c1.G
|
||||
s[2] = c1.B
|
||||
}
|
||||
|
||||
func (p *NRGB) SetRGBA64(x, y int, c color.RGBA64) {
|
||||
if !(image.Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
r, g, b, a := uint32(c.R), uint32(c.G), uint32(c.B), uint32(c.A)
|
||||
if (a != 0) && (a != 0xffff) {
|
||||
r = (r * 0xffff) / a
|
||||
g = (g * 0xffff) / a
|
||||
b = (b * 0xffff) / a
|
||||
}
|
||||
i := p.PixOffset(x, y)
|
||||
s := p.Pix[i : i+3 : i+3] // Small cap improves performance, see https://golang.org/issue/27857
|
||||
s[0] = uint8(r >> 8)
|
||||
s[1] = uint8(g >> 8)
|
||||
s[2] = uint8(b >> 8)
|
||||
}
|
||||
|
||||
func (p *NRGB) SetNRGBA(x, y int, c color.NRGBA) {
|
||||
if !(image.Point{x, y}.In(p.Rect)) {
|
||||
return
|
||||
}
|
||||
i := p.PixOffset(x, y)
|
||||
s := p.Pix[i : i+3 : i+3] // Small cap improves performance, see https://golang.org/issue/27857
|
||||
s[0] = c.R
|
||||
s[1] = c.G
|
||||
s[2] = c.B
|
||||
}
|
||||
|
||||
// SubImage returns an image representing the portion of the image p visible
|
||||
// through r. The returned value shares pixels with the original image.
|
||||
func (p *NRGB) SubImage(r image.Rectangle) image.Image {
|
||||
r = r.Intersect(p.Rect)
|
||||
// If r1 and r2 are Rectangles, r1.Intersect(r2) is not guaranteed to be inside
|
||||
// either r1 or r2 if the intersection is empty. Without explicitly checking for
|
||||
// this, the Pix[i:] expression below can panic.
|
||||
if r.Empty() {
|
||||
return &NRGB{}
|
||||
}
|
||||
i := p.PixOffset(r.Min.X, r.Min.Y)
|
||||
return &NRGB{
|
||||
Pix: p.Pix[i:],
|
||||
Stride: p.Stride,
|
||||
Rect: r,
|
||||
}
|
||||
}
|
||||
|
||||
// Opaque scans the entire image and reports whether it is fully opaque.
|
||||
func (p *NRGB) Opaque() bool { return true }
|
||||
|
||||
type scanner_rgb struct {
|
||||
image image.Image
|
||||
w, h int
|
||||
palette []NRGBColor
|
||||
opaque_base []float64
|
||||
opaque_base_uint []uint8
|
||||
}
|
||||
|
||||
func (s scanner_rgb) Bytes_per_channel() int { return 1 }
|
||||
func (s scanner_rgb) Num_of_channels() int { return 3 }
|
||||
func (s scanner_rgb) Bounds() image.Rectangle { return s.image.Bounds() }
|
||||
|
||||
func blend(dest []uint8, base []float64, r, g, b, a uint8) {
|
||||
alpha := float64(a) / 255.0
|
||||
dest[0] = uint8(alpha*float64(r) + (1.0-alpha)*base[0])
|
||||
dest[1] = uint8(alpha*float64(g) + (1.0-alpha)*base[1])
|
||||
dest[2] = uint8(alpha*float64(b) + (1.0-alpha)*base[2])
|
||||
}
|
||||
|
||||
func newScannerRGB(img image.Image, opaque_base NRGBColor) *scanner_rgb {
|
||||
s := &scanner_rgb{
|
||||
image: img, w: img.Bounds().Dx(), h: img.Bounds().Dy(),
|
||||
opaque_base: []float64{float64(opaque_base.R), float64(opaque_base.G), float64(opaque_base.B)}[0:3:3],
|
||||
opaque_base_uint: []uint8{opaque_base.R, opaque_base.G, opaque_base.B}[0:3:3],
|
||||
}
|
||||
if img, ok := img.(*image.Paletted); ok {
|
||||
s.palette = make([]NRGBColor, max(256, len(img.Palette)))
|
||||
d := [3]uint8{0, 0, 0}
|
||||
ds := d[:]
|
||||
for i := 0; i < len(img.Palette); i++ {
|
||||
r, g, b, a := img.Palette[i].RGBA()
|
||||
switch a {
|
||||
case 0:
|
||||
s.palette[i] = opaque_base
|
||||
case 0xffff:
|
||||
s.palette[i] = NRGBColor{R: uint8(r >> 8), G: uint8(g >> 8), B: uint8(b >> 8)}
|
||||
default:
|
||||
blend(ds, s.opaque_base, uint8((r*0xffff/a)>>8), uint8((g*0xffff/a)>>8), uint8((b*0xffff/a)>>8), uint8(a>>8))
|
||||
s.palette[i] = NRGBColor{R: d[0], G: d[1], B: d[2]}
|
||||
}
|
||||
}
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// scan scans the given rectangular region of the image into dst.
|
||||
func (s *scanner_rgb) Scan(x1, y1, x2, y2 int, dst []uint8) {
|
||||
switch img := s.image.(type) {
|
||||
case *image.NRGBA:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*4
|
||||
for x := x1; x < x2; x++ {
|
||||
blend(dst[j:j+3:j+3], s.opaque_base, img.Pix[i], img.Pix[i+1], img.Pix[i+2], img.Pix[i+3])
|
||||
j += 3
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
|
||||
case *image.NRGBA64:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*8
|
||||
for x := x1; x < x2; x++ {
|
||||
blend(dst[j:j+3:j+3], s.opaque_base, img.Pix[i], img.Pix[i+2], img.Pix[i+4], img.Pix[i+6])
|
||||
j += 3
|
||||
i += 8
|
||||
}
|
||||
}
|
||||
|
||||
case *image.RGBA:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*4
|
||||
for x := x1; x < x2; x++ {
|
||||
d := dst[j : j+3 : j+3]
|
||||
a := img.Pix[i+3]
|
||||
switch a {
|
||||
case 0:
|
||||
d[0] = s.opaque_base_uint[0]
|
||||
d[1] = s.opaque_base_uint[1]
|
||||
d[2] = s.opaque_base_uint[2]
|
||||
case 0xff:
|
||||
s := img.Pix[i : i+3 : i+3]
|
||||
d[0] = s[0]
|
||||
d[1] = s[1]
|
||||
d[2] = s[2]
|
||||
default:
|
||||
r16 := uint16(img.Pix[i])
|
||||
g16 := uint16(img.Pix[i+1])
|
||||
b16 := uint16(img.Pix[i+2])
|
||||
a16 := uint16(a)
|
||||
blend(d, s.opaque_base, uint8(r16*0xff/a16), uint8(g16*0xff/a16), uint8(b16*0xff/a16), a)
|
||||
}
|
||||
j += 3
|
||||
i += 4
|
||||
}
|
||||
}
|
||||
|
||||
case *image.RGBA64:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*8
|
||||
for x := x1; x < x2; x++ {
|
||||
src := img.Pix[i : i+8 : i+8]
|
||||
d := dst[j : j+3 : j+3]
|
||||
a := src[6]
|
||||
switch a {
|
||||
case 0:
|
||||
d[0] = s.opaque_base_uint[0]
|
||||
d[1] = s.opaque_base_uint[1]
|
||||
d[2] = s.opaque_base_uint[2]
|
||||
case 0xff:
|
||||
d[0] = src[0]
|
||||
d[1] = src[2]
|
||||
d[2] = src[4]
|
||||
default:
|
||||
r32 := uint32(src[0])<<8 | uint32(src[1])
|
||||
g32 := uint32(src[2])<<8 | uint32(src[3])
|
||||
b32 := uint32(src[4])<<8 | uint32(src[5])
|
||||
a32 := uint32(src[6])<<8 | uint32(src[7])
|
||||
blend(d, s.opaque_base, uint8((r32*0xffff/a32)>>8), uint8((g32*0xffff/a32)>>8), uint8((b32*0xffff/a32)>>8), a)
|
||||
}
|
||||
j += 3
|
||||
i += 8
|
||||
}
|
||||
}
|
||||
|
||||
case *image.Gray:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1
|
||||
for x := x1; x < x2; x++ {
|
||||
c := img.Pix[i]
|
||||
d := dst[j : j+3 : j+3]
|
||||
d[0] = c
|
||||
d[1] = c
|
||||
d[2] = c
|
||||
j += 3
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
case *image.Gray16:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1*2
|
||||
for x := x1; x < x2; x++ {
|
||||
c := img.Pix[i]
|
||||
d := dst[j : j+3 : j+3]
|
||||
d[0] = c
|
||||
d[1] = c
|
||||
d[2] = c
|
||||
j += 3
|
||||
i += 2
|
||||
}
|
||||
}
|
||||
|
||||
case *image.YCbCr:
|
||||
j := 0
|
||||
x1 += img.Rect.Min.X
|
||||
x2 += img.Rect.Min.X
|
||||
y1 += img.Rect.Min.Y
|
||||
y2 += img.Rect.Min.Y
|
||||
|
||||
hy := img.Rect.Min.Y / 2
|
||||
hx := img.Rect.Min.X / 2
|
||||
for y := y1; y < y2; y++ {
|
||||
iy := (y-img.Rect.Min.Y)*img.YStride + (x1 - img.Rect.Min.X)
|
||||
|
||||
var yBase int
|
||||
switch img.SubsampleRatio {
|
||||
case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio422:
|
||||
yBase = (y - img.Rect.Min.Y) * img.CStride
|
||||
case image.YCbCrSubsampleRatio420, image.YCbCrSubsampleRatio440:
|
||||
yBase = (y/2 - hy) * img.CStride
|
||||
}
|
||||
|
||||
for x := x1; x < x2; x++ {
|
||||
var ic int
|
||||
switch img.SubsampleRatio {
|
||||
case image.YCbCrSubsampleRatio444, image.YCbCrSubsampleRatio440:
|
||||
ic = yBase + (x - img.Rect.Min.X)
|
||||
case image.YCbCrSubsampleRatio422, image.YCbCrSubsampleRatio420:
|
||||
ic = yBase + (x/2 - hx)
|
||||
default:
|
||||
ic = img.COffset(x, y)
|
||||
}
|
||||
|
||||
yy1 := int32(img.Y[iy]) * 0x10101
|
||||
cb1 := int32(img.Cb[ic]) - 128
|
||||
cr1 := int32(img.Cr[ic]) - 128
|
||||
|
||||
r := yy1 + 91881*cr1
|
||||
if uint32(r)&0xff000000 == 0 {
|
||||
r >>= 16
|
||||
} else {
|
||||
r = ^(r >> 31)
|
||||
}
|
||||
|
||||
g := yy1 - 22554*cb1 - 46802*cr1
|
||||
if uint32(g)&0xff000000 == 0 {
|
||||
g >>= 16
|
||||
} else {
|
||||
g = ^(g >> 31)
|
||||
}
|
||||
|
||||
b := yy1 + 116130*cb1
|
||||
if uint32(b)&0xff000000 == 0 {
|
||||
b >>= 16
|
||||
} else {
|
||||
b = ^(b >> 31)
|
||||
}
|
||||
|
||||
d := dst[j : j+3 : j+3]
|
||||
d[0] = uint8(r)
|
||||
d[1] = uint8(g)
|
||||
d[2] = uint8(b)
|
||||
|
||||
iy++
|
||||
j += 3
|
||||
}
|
||||
}
|
||||
|
||||
case *image.Paletted:
|
||||
j := 0
|
||||
for y := y1; y < y2; y++ {
|
||||
i := y*img.Stride + x1
|
||||
for x := x1; x < x2; x++ {
|
||||
c := s.palette[img.Pix[i]]
|
||||
d := dst[j : j+3 : j+3]
|
||||
d[0] = c.R
|
||||
d[1] = c.G
|
||||
d[2] = c.B
|
||||
j += 3
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
j := 0
|
||||
b := s.image.Bounds()
|
||||
x1 += b.Min.X
|
||||
x2 += b.Min.X
|
||||
y1 += b.Min.Y
|
||||
y2 += b.Min.Y
|
||||
for y := y1; y < y2; y++ {
|
||||
for x := x1; x < x2; x++ {
|
||||
r16, g16, b16, a16 := s.image.At(x, y).RGBA()
|
||||
d := dst[j : j+3 : j+3]
|
||||
switch a16 {
|
||||
case 0xffff:
|
||||
d[0] = uint8(r16 >> 8)
|
||||
d[1] = uint8(g16 >> 8)
|
||||
d[2] = uint8(b16 >> 8)
|
||||
case 0:
|
||||
d[0] = s.opaque_base_uint[0]
|
||||
d[1] = s.opaque_base_uint[1]
|
||||
d[2] = s.opaque_base_uint[2]
|
||||
default:
|
||||
blend(d, s.opaque_base, uint8(((r16*0xffff)/a16)>>8), uint8(((g16*0xffff)/a16)>>8), uint8(((b16*0xffff)/a16)>>8), uint8(a16>>8))
|
||||
}
|
||||
j += 3
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func NewNRGB(r image.Rectangle) *NRGB {
|
||||
return &NRGB{
|
||||
Pix: make([]uint8, 3*r.Dx()*r.Dy()),
|
||||
Stride: 3 * r.Dx(),
|
||||
Rect: r,
|
||||
}
|
||||
}
|
||||
|
||||
func NewNRGBWithContiguousRGBPixels(p []byte, left, top, width, height int) (*NRGB, error) {
|
||||
const bpp = 3
|
||||
if expected := bpp * width * height; expected != len(p) {
|
||||
return nil, fmt.Errorf("the image width and height dont match the size of the specified pixel data: width=%d height=%d sz=%d != %d", width, height, len(p), expected)
|
||||
}
|
||||
return &NRGB{
|
||||
Pix: p,
|
||||
Stride: bpp * width,
|
||||
Rect: image.Rectangle{image.Point{left, top}, image.Point{left + width, top + height}},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewNRGBScanner(source_image image.Image, opaque_base NRGBColor) Scanner {
|
||||
return newScannerRGB(source_image, opaque_base)
|
||||
}
|
||||
41
vendor/github.com/kovidgoyal/imaging/prism/meta/autometa/autometa.go
generated
vendored
Normal file
41
vendor/github.com/kovidgoyal/imaging/prism/meta/autometa/autometa.go
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
package autometa
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/kovidgoyal/imaging/prism/meta"
|
||||
"github.com/kovidgoyal/imaging/prism/meta/jpegmeta"
|
||||
"github.com/kovidgoyal/imaging/prism/meta/pngmeta"
|
||||
"github.com/kovidgoyal/imaging/prism/meta/webpmeta"
|
||||
"github.com/kovidgoyal/imaging/streams"
|
||||
)
|
||||
|
||||
// Load loads the metadata for an image stream, which may be one of the
|
||||
// supported image formats.
|
||||
//
|
||||
// Only as much of the stream is consumed as necessary to extract the metadata;
|
||||
// the returned stream contains a buffered copy of the consumed data such that
|
||||
// reading from it will produce the same results as fully reading the input
|
||||
// stream. This provides a convenient way to load the full image after loading
|
||||
// the metadata.
|
||||
//
|
||||
// An error is returned if basic metadata could not be extracted. The returned
|
||||
// stream still provides the full image data.
|
||||
func Load(r io.Reader) (md *meta.Data, imgStream io.Reader, err error) {
|
||||
loaders := []func(io.Reader) (*meta.Data, error){
|
||||
pngmeta.ExtractMetadata,
|
||||
jpegmeta.ExtractMetadata,
|
||||
webpmeta.ExtractMetadata,
|
||||
}
|
||||
for _, loader := range loaders {
|
||||
r, err = streams.CallbackWithSeekable(r, func(r io.Reader) (err error) {
|
||||
md, err = loader(r)
|
||||
return
|
||||
})
|
||||
if err == nil {
|
||||
return md, r, nil
|
||||
}
|
||||
}
|
||||
return nil, r, fmt.Errorf("unrecognised image format")
|
||||
}
|
||||
3
vendor/github.com/kovidgoyal/imaging/prism/meta/autometa/doc.go
generated
vendored
Normal file
3
vendor/github.com/kovidgoyal/imaging/prism/meta/autometa/doc.go
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
// Package autometa provides support for embedded metadata and automatic
|
||||
// detection of image formats.
|
||||
package autometa
|
||||
54
vendor/github.com/kovidgoyal/imaging/prism/meta/data.go
generated
vendored
Normal file
54
vendor/github.com/kovidgoyal/imaging/prism/meta/data.go
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
package meta
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"github.com/kovidgoyal/imaging/prism/meta/icc"
|
||||
)
|
||||
|
||||
var _ = fmt.Println
|
||||
|
||||
// Data represents the metadata for an image.
|
||||
type Data struct {
|
||||
Format ImageFormat
|
||||
PixelWidth uint32
|
||||
PixelHeight uint32
|
||||
BitsPerComponent uint32
|
||||
ExifData []byte
|
||||
iccProfileData []byte
|
||||
iccProfileErr error
|
||||
}
|
||||
|
||||
// ICCProfile returns an extracted ICC profile from this metadata.
|
||||
//
|
||||
// An error is returned if the ICC profile could not be correctly parsed.
|
||||
//
|
||||
// If no profile data was found, nil is returned without an error.
|
||||
func (md *Data) ICCProfile() (*icc.Profile, error) {
|
||||
if md.iccProfileData == nil {
|
||||
return nil, md.iccProfileErr
|
||||
}
|
||||
|
||||
return icc.NewProfileReader(bytes.NewReader(md.iccProfileData)).ReadProfile()
|
||||
}
|
||||
|
||||
// ICCProfile returns the raw ICC profile data from this metadata.
|
||||
//
|
||||
// An error is returned if the ICC profile could not be correctly extracted from
|
||||
// the image.
|
||||
//
|
||||
// If no profile data was found, nil is returned without an error.
|
||||
func (md *Data) ICCProfileData() ([]byte, error) {
|
||||
return md.iccProfileData, md.iccProfileErr
|
||||
}
|
||||
|
||||
func (md *Data) SetICCProfileData(data []byte) {
|
||||
md.iccProfileData = data
|
||||
md.iccProfileErr = nil
|
||||
}
|
||||
|
||||
func (md *Data) SetICCProfileError(err error) {
|
||||
md.iccProfileData = nil
|
||||
md.iccProfileErr = err
|
||||
}
|
||||
2
vendor/github.com/kovidgoyal/imaging/prism/meta/doc.go
generated
vendored
Normal file
2
vendor/github.com/kovidgoyal/imaging/prism/meta/doc.go
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package meta and its subpackages provide support for embedded image metadata.
|
||||
package meta
|
||||
90
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/colorspace.go
generated
vendored
Normal file
90
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/colorspace.go
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
package icc
|
||||
|
||||
import "fmt"
|
||||
|
||||
type ColorSpace uint32
|
||||
|
||||
const (
|
||||
ColorSpaceXYZ ColorSpace = 0x58595A20 // 'XYZ '
|
||||
ColorSpaceLab ColorSpace = 0x4C616220 // 'Lab '
|
||||
ColorSpaceLuv ColorSpace = 0x4C757620 // 'Luv '
|
||||
ColorSpaceYCbCr ColorSpace = 0x59436272 // 'YCbr'
|
||||
ColorSpaceYxy ColorSpace = 0x59787920 // 'Yxy '
|
||||
ColorSpaceRGB ColorSpace = 0x52474220 // 'RGB '
|
||||
ColorSpaceGray ColorSpace = 0x47524159 // 'Gray'
|
||||
ColorSpaceHSV ColorSpace = 0x48535620 // 'HSV '
|
||||
ColorSpaceHLS ColorSpace = 0x484C5320 // 'HLS '
|
||||
ColorSpaceCMYK ColorSpace = 0x434D594B // 'CMYK'
|
||||
ColorSpaceCMY ColorSpace = 0x434D5920 // 'CMY '
|
||||
ColorSpace2Color ColorSpace = 0x32434C52 // '2CLR'
|
||||
ColorSpace3Color ColorSpace = 0x33434C52 // '3CLR'
|
||||
ColorSpace4Color ColorSpace = 0x34434C52 // '4CLR'
|
||||
ColorSpace5Color ColorSpace = 0x35434C52 // '5CLR'
|
||||
ColorSpace6Color ColorSpace = 0x36434C52 // '6CLR'
|
||||
ColorSpace7Color ColorSpace = 0x37434C52 // '7CLR'
|
||||
ColorSpace8Color ColorSpace = 0x38434C52 // '8CLR'
|
||||
ColorSpace9Color ColorSpace = 0x39434C52 // '9CLR'
|
||||
ColorSpace10Color ColorSpace = 0x41434C52 // 'ACLR'
|
||||
ColorSpace11Color ColorSpace = 0x42434C52 // 'BCLR'
|
||||
ColorSpace12Color ColorSpace = 0x43434C52 // 'CCLR'
|
||||
ColorSpace13Color ColorSpace = 0x44434C52 // 'DCLR'
|
||||
ColorSpace14Color ColorSpace = 0x45434C52 // 'ECLR'
|
||||
ColorSpace15Color ColorSpace = 0x46434C52 // 'FCLR'
|
||||
)
|
||||
|
||||
func (cs ColorSpace) String() string {
|
||||
switch cs {
|
||||
case ColorSpaceXYZ:
|
||||
return "XYZ"
|
||||
case ColorSpaceLab:
|
||||
return "Lab"
|
||||
case ColorSpaceLuv:
|
||||
return "Luv"
|
||||
case ColorSpaceYCbCr:
|
||||
return "YCbCr"
|
||||
case ColorSpaceYxy:
|
||||
return "Yxy"
|
||||
case ColorSpaceRGB:
|
||||
return "RGB"
|
||||
case ColorSpaceGray:
|
||||
return "Gray"
|
||||
case ColorSpaceHSV:
|
||||
return "HSV"
|
||||
case ColorSpaceHLS:
|
||||
return "HLS"
|
||||
case ColorSpaceCMYK:
|
||||
return "CMYK"
|
||||
case ColorSpaceCMY:
|
||||
return "CMY"
|
||||
case ColorSpace2Color:
|
||||
return "2 color"
|
||||
case ColorSpace3Color:
|
||||
return "3 color"
|
||||
case ColorSpace4Color:
|
||||
return "4 color"
|
||||
case ColorSpace5Color:
|
||||
return "5 color"
|
||||
case ColorSpace6Color:
|
||||
return "6 color"
|
||||
case ColorSpace7Color:
|
||||
return "7 color"
|
||||
case ColorSpace8Color:
|
||||
return "8 color"
|
||||
case ColorSpace9Color:
|
||||
return "9 color"
|
||||
case ColorSpace10Color:
|
||||
return "10 color"
|
||||
case ColorSpace11Color:
|
||||
return "11 color"
|
||||
case ColorSpace12Color:
|
||||
return "12 color"
|
||||
case ColorSpace13Color:
|
||||
return "13 color"
|
||||
case ColorSpace14Color:
|
||||
return "14 color"
|
||||
case ColorSpace15Color:
|
||||
return "15 color"
|
||||
default:
|
||||
return fmt.Sprintf("Unknown (%s)", Signature(cs))
|
||||
}
|
||||
}
|
||||
36
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/deviceclass.go
generated
vendored
Normal file
36
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/deviceclass.go
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
package icc
|
||||
|
||||
import "fmt"
|
||||
|
||||
type DeviceClass uint32
|
||||
|
||||
const (
|
||||
DeviceClassInput DeviceClass = 0x73636E72 // 'scnr'
|
||||
DeviceClassDisplay DeviceClass = 0x6D6E7472 // 'mntr'
|
||||
DeviceClassOutput DeviceClass = 0x70727472 // 'prtr'
|
||||
DeviceClassLink DeviceClass = 0x6C696E6B // 'link'
|
||||
DeviceClassColorSpace DeviceClass = 0x73706163 // 'spac'
|
||||
DeviceClassAbstract DeviceClass = 0x61627374 // 'abst'
|
||||
DeviceClassNamedColor DeviceClass = 0x6E6D636C // 'nmcl'
|
||||
)
|
||||
|
||||
func (dc DeviceClass) String() string {
|
||||
switch dc {
|
||||
case DeviceClassInput:
|
||||
return "Input"
|
||||
case DeviceClassDisplay:
|
||||
return "Display"
|
||||
case DeviceClassOutput:
|
||||
return "Output"
|
||||
case DeviceClassLink:
|
||||
return "Device link"
|
||||
case DeviceClassColorSpace:
|
||||
return "Color space"
|
||||
case DeviceClassAbstract:
|
||||
return "Abstract"
|
||||
case DeviceClassNamedColor:
|
||||
return "Named color"
|
||||
default:
|
||||
return fmt.Sprintf("Unknown (%s)", Signature(dc))
|
||||
}
|
||||
}
|
||||
2
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/doc.go
generated
vendored
Normal file
2
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/doc.go
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package icc provides support for working with ICC colour profile data.
|
||||
package icc
|
||||
44
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/header.go
generated
vendored
Normal file
44
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/header.go
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
package icc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Header struct {
|
||||
ProfileSize uint32
|
||||
PreferredCMM Signature
|
||||
Version Version
|
||||
DeviceClass DeviceClass
|
||||
DataColorSpace ColorSpace
|
||||
ProfileConnectionSpace ColorSpace
|
||||
CreatedAtRaw [6]uint16
|
||||
FileSignature Signature
|
||||
PrimaryPlatform PrimaryPlatform
|
||||
Flags uint32
|
||||
DeviceManufacturer Signature
|
||||
DeviceModel Signature
|
||||
DeviceAttributes uint64
|
||||
RenderingIntent RenderingIntent
|
||||
PCSIlluminant [3]uint32
|
||||
ProfileCreator Signature
|
||||
ProfileID [16]byte
|
||||
Reserved [28]byte
|
||||
}
|
||||
|
||||
func (h Header) CreatedAt() time.Time {
|
||||
b := h.CreatedAtRaw
|
||||
return time.Date(int(b[0]), time.Month(b[1]), int(b[2]), int(b[3]), int(b[4]), int(b[5]), 0, time.UTC)
|
||||
}
|
||||
|
||||
func (h Header) Embedded() bool {
|
||||
return (h.Flags >> 31) != 0
|
||||
}
|
||||
|
||||
func (h Header) DependsOnEmbeddedData() bool {
|
||||
return (h.Flags>>30)&1 != 0
|
||||
}
|
||||
|
||||
func (h Header) String() string {
|
||||
return fmt.Sprintf("Header{PreferredCMM: %s, Version: %s, DeviceManufacturer: %s, DeviceModel: %s, ProfileCreator: %s, RenderingIntent: %s, CreatedAt: %v}", h.PreferredCMM, h.Version, h.DeviceManufacturer, h.DeviceModel, h.ProfileCreator, h.RenderingIntent, h.CreatedAt())
|
||||
}
|
||||
30
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/primaryplatform.go
generated
vendored
Normal file
30
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/primaryplatform.go
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
package icc
|
||||
|
||||
import "fmt"
|
||||
|
||||
type PrimaryPlatform uint32
|
||||
|
||||
const (
|
||||
PrimaryPlatformNone PrimaryPlatform = 0x00000000
|
||||
PrimaryPlatformApple PrimaryPlatform = 0x4150504C // 'AAPL'
|
||||
PrimaryPlatformMicrosoft PrimaryPlatform = 0x4D534654 // 'MSFT'
|
||||
PrimaryPlatformSGI PrimaryPlatform = 0x53474920 // 'SGI '
|
||||
PrimaryPlatformSun PrimaryPlatform = 0x53554E57 // 'SUNW'
|
||||
)
|
||||
|
||||
func (pp PrimaryPlatform) String() string {
|
||||
switch pp {
|
||||
case PrimaryPlatformNone:
|
||||
return "None"
|
||||
case PrimaryPlatformApple:
|
||||
return "Apple Computer, Inc."
|
||||
case PrimaryPlatformMicrosoft:
|
||||
return "Microsoft Corporation"
|
||||
case PrimaryPlatformSGI:
|
||||
return "Silicon Graphics, Inc."
|
||||
case PrimaryPlatformSun:
|
||||
return "Sun Microsystems, Inc."
|
||||
default:
|
||||
return fmt.Sprintf("Unknown (%d)", Signature(pp))
|
||||
}
|
||||
}
|
||||
100
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/profile.go
generated
vendored
Normal file
100
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/profile.go
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
package icc
|
||||
|
||||
type WellKnownProfile int
|
||||
|
||||
const (
|
||||
UnknownProfile WellKnownProfile = iota
|
||||
SRGBProfile
|
||||
AdobeRGBProfile
|
||||
PhotoProProfile
|
||||
DisplayP3Profile
|
||||
)
|
||||
|
||||
func WellKnownProfileFromDescription(x string) WellKnownProfile {
|
||||
switch x {
|
||||
case "sRGB IEC61966-2.1", "sRGB_ICC_v4_Appearance.icc":
|
||||
return SRGBProfile
|
||||
case "Adobe RGB (1998)":
|
||||
return AdobeRGBProfile
|
||||
case "Display P3":
|
||||
return DisplayP3Profile
|
||||
case "ProPhoto RGB":
|
||||
return PhotoProProfile
|
||||
default:
|
||||
return UnknownProfile
|
||||
}
|
||||
}
|
||||
|
||||
func (p WellKnownProfile) String() string {
|
||||
switch p {
|
||||
case SRGBProfile:
|
||||
return "sRGB IEC61966-2.1"
|
||||
case AdobeRGBProfile:
|
||||
return "Adobe RGB (1998)"
|
||||
case PhotoProProfile:
|
||||
return "ProPhoto RGB"
|
||||
case DisplayP3Profile:
|
||||
return "Display P3"
|
||||
default:
|
||||
return "Unknown Profile"
|
||||
}
|
||||
}
|
||||
|
||||
type Profile struct {
|
||||
Header Header
|
||||
TagTable TagTable
|
||||
}
|
||||
|
||||
func (p *Profile) Description() (string, error) {
|
||||
return p.TagTable.getProfileDescription()
|
||||
}
|
||||
|
||||
func (p *Profile) DeviceManufacturerDescription() (string, error) {
|
||||
return p.TagTable.getDeviceManufacturerDescription()
|
||||
}
|
||||
|
||||
func (p *Profile) DeviceModelDescription() (string, error) {
|
||||
return p.TagTable.getDeviceModelDescription()
|
||||
}
|
||||
|
||||
func (p *Profile) WellKnownProfile() WellKnownProfile {
|
||||
model, err := p.DeviceModelDescription()
|
||||
if err == nil {
|
||||
switch model {
|
||||
case "IEC 61966-2-1 Default RGB Colour Space - sRGB":
|
||||
return SRGBProfile
|
||||
}
|
||||
}
|
||||
d, err := p.Description()
|
||||
if err == nil {
|
||||
if ans := WellKnownProfileFromDescription(d); ans != UnknownProfile {
|
||||
return ans
|
||||
}
|
||||
}
|
||||
switch p.Header.DeviceManufacturer {
|
||||
case IECManufacturerSignature:
|
||||
switch p.Header.DeviceModel {
|
||||
case SRGBModelSignature:
|
||||
return SRGBProfile
|
||||
}
|
||||
case AdobeManufacturerSignature:
|
||||
switch p.Header.DeviceModel {
|
||||
case AdobeRGBModelSignature:
|
||||
return AdobeRGBProfile
|
||||
case PhotoProModelSignature:
|
||||
return PhotoProProfile
|
||||
}
|
||||
case AppleManufacturerSignature, AppleUpperManufacturerSignature:
|
||||
switch p.Header.DeviceModel {
|
||||
case DisplayP3ModelSignature:
|
||||
return DisplayP3Profile
|
||||
}
|
||||
}
|
||||
return UnknownProfile
|
||||
}
|
||||
|
||||
func newProfile() *Profile {
|
||||
return &Profile{
|
||||
TagTable: emptyTagTable(),
|
||||
}
|
||||
}
|
||||
97
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/profilereader.go
generated
vendored
Normal file
97
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/profilereader.go
generated
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
package icc
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/kovidgoyal/go-parallel"
|
||||
)
|
||||
|
||||
var _ = fmt.Println
|
||||
var _ = os.Stderr
|
||||
|
||||
type ProfileReader struct {
|
||||
reader io.Reader
|
||||
}
|
||||
|
||||
func (pr *ProfileReader) ReadProfile() (p *Profile, err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
p = nil
|
||||
err = parallel.Format_stacktrace_on_panic(r, 1)
|
||||
}
|
||||
}()
|
||||
|
||||
profile := newProfile()
|
||||
|
||||
err = pr.readHeader(&profile.Header)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to reader header from ICC profile: %w", err)
|
||||
}
|
||||
|
||||
err = pr.readTagTable(&profile.TagTable)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read tag table from ICC profile: %w", err)
|
||||
}
|
||||
|
||||
return profile, nil
|
||||
}
|
||||
|
||||
func (pr *ProfileReader) readHeader(header *Header) (err error) {
|
||||
var data [128]byte
|
||||
if _, err = io.ReadFull(pr.reader, data[:]); err == nil {
|
||||
var n int
|
||||
n, err = binary.Decode(data[:], binary.BigEndian, header)
|
||||
if err == nil {
|
||||
if header.FileSignature != ProfileFileSignature {
|
||||
return fmt.Errorf("ICC header has invalid signature: %s", header.FileSignature)
|
||||
}
|
||||
if n != len(data) {
|
||||
return fmt.Errorf("decoding header consumed %d instead of %d bytes", n, len(data))
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (pr *ProfileReader) readTagTable(tagTable *TagTable) (err error) {
|
||||
var tagCount uint32
|
||||
if err = binary.Read(pr.reader, binary.BigEndian, &tagCount); err != nil {
|
||||
return
|
||||
}
|
||||
type tagIndexEntry struct {
|
||||
Sig uint32
|
||||
Offset uint32
|
||||
Size uint32
|
||||
}
|
||||
endOfTagData := uint32(0)
|
||||
tag_indices := make([]tagIndexEntry, tagCount)
|
||||
if err = binary.Read(pr.reader, binary.BigEndian, tag_indices); err != nil {
|
||||
return fmt.Errorf("failed to read tag indices from ICC profile: %w", err)
|
||||
}
|
||||
for _, t := range tag_indices {
|
||||
endOfTagData = max(endOfTagData, t.Offset+t.Size)
|
||||
}
|
||||
tagDataOffset := 132 + tagCount*12
|
||||
if endOfTagData > tagDataOffset {
|
||||
tagData := make([]byte, endOfTagData-tagDataOffset)
|
||||
if _, err = io.ReadFull(pr.reader, tagData); err != nil {
|
||||
return fmt.Errorf("failed to read tag data from ICC profile: %w", err)
|
||||
}
|
||||
for _, t := range tag_indices {
|
||||
startOffset := t.Offset - tagDataOffset
|
||||
endOffset := startOffset + t.Size
|
||||
tagTable.add(Signature(t.Sig), tagData[startOffset:endOffset])
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewProfileReader(r io.Reader) *ProfileReader {
|
||||
return &ProfileReader{
|
||||
reader: r,
|
||||
}
|
||||
}
|
||||
13
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/profileversion.go
generated
vendored
Normal file
13
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/profileversion.go
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
package icc
|
||||
|
||||
import "fmt"
|
||||
|
||||
type Version struct {
|
||||
Major byte
|
||||
MinorAndRev byte
|
||||
Reserved1, Reserved2 byte
|
||||
}
|
||||
|
||||
func (pv Version) String() string {
|
||||
return fmt.Sprintf("%d.%d.%d", pv.Major, pv.MinorAndRev>>4, pv.MinorAndRev&3)
|
||||
}
|
||||
27
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/renderingintent.go
generated
vendored
Normal file
27
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/renderingintent.go
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
package icc
|
||||
|
||||
import "fmt"
|
||||
|
||||
const (
|
||||
PerceptualRenderingIntent RenderingIntent = 0
|
||||
RelativeColorimetricRenderingIntent RenderingIntent = 1
|
||||
SaturationRenderingIntent RenderingIntent = 2
|
||||
AbsoluteColorimetricRenderingIntent RenderingIntent = 3
|
||||
)
|
||||
|
||||
type RenderingIntent uint32
|
||||
|
||||
func (ri RenderingIntent) String() string {
|
||||
switch ri {
|
||||
case PerceptualRenderingIntent:
|
||||
return "Perceptual"
|
||||
case RelativeColorimetricRenderingIntent:
|
||||
return "Relative colorimetric"
|
||||
case SaturationRenderingIntent:
|
||||
return "Saturation"
|
||||
case AbsoluteColorimetricRenderingIntent:
|
||||
return "Absolute colorimetric"
|
||||
default:
|
||||
return fmt.Sprintf("Unknown (%d)", ri)
|
||||
}
|
||||
}
|
||||
209
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/signature.go
generated
vendored
Normal file
209
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/signature.go
generated
vendored
Normal file
@@ -0,0 +1,209 @@
|
||||
package icc
|
||||
|
||||
type Signature uint32
|
||||
|
||||
const (
|
||||
ProfileFileSignature Signature = 0x61637370 // 'acsp'
|
||||
TextTagSignature Signature = 0x74657874 // 'text'
|
||||
SignateTagSignature Signature = 0x73696720 // 'sig '
|
||||
|
||||
DescSignature Signature = 0x64657363 // 'desc'
|
||||
MultiLocalisedUnicodeSignature Signature = 0x6D6C7563 // 'mluc'
|
||||
DeviceManufacturerDescriptionSignature Signature = 0x646d6e64 // 'dmnd'
|
||||
DeviceModelDescriptionSignature Signature = 0x646d6464 // 'dmdd'
|
||||
|
||||
AdobeManufacturerSignature Signature = 0x41444245 // 'ADBE'
|
||||
AppleManufacturerSignature Signature = 0x6170706c // 'appl'
|
||||
AppleUpperManufacturerSignature Signature = 0x4150504c // 'APPL'
|
||||
IECManufacturerSignature Signature = 0x49454320 // 'IEC '
|
||||
|
||||
AdobeRGBModelSignature Signature = 0x52474220 // 'RGB '
|
||||
SRGBModelSignature Signature = 0x73524742 // 'sRGB'
|
||||
PhotoProModelSignature Signature = 0x50525452 // 'PTPR'
|
||||
DisplayP3ModelSignature Signature = 0x70332020 // 'p3 '
|
||||
|
||||
ChromaticityTypeSignature Signature = 0x6368726D /* 'chrm' */
|
||||
ColorantOrderTypeSignature Signature = 0x636C726F /* 'clro' */
|
||||
ColorantTableTypeSignature Signature = 0x636C7274 /* 'clrt' */
|
||||
CrdInfoTypeSignature Signature = 0x63726469 /* 'crdi' Removed in V4 */
|
||||
CurveTypeSignature Signature = 0x63757276 /* 'curv' */
|
||||
DataTypeSignature Signature = 0x64617461 /* 'data' */
|
||||
DictTypeSignature Signature = 0x64696374 /* 'dict' */
|
||||
DateTimeTypeSignature Signature = 0x6474696D /* 'dtim' */
|
||||
DeviceSettingsTypeSignature Signature = 0x64657673 /* 'devs' Removed in V4 */
|
||||
Lut16TypeSignature Signature = 0x6d667432 /* 'mft2' */
|
||||
Lut8TypeSignature Signature = 0x6d667431 /* 'mft1' */
|
||||
LutAtoBTypeSignature Signature = 0x6d414220 /* 'mAB ' */
|
||||
LutBtoATypeSignature Signature = 0x6d424120 /* 'mBA ' */
|
||||
MeasurementTypeSignature Signature = 0x6D656173 /* 'meas' */
|
||||
MultiLocalizedUnicodeTypeSignature Signature = 0x6D6C7563 /* 'mluc' */
|
||||
MultiProcessElementTypeSignature Signature = 0x6D706574 /* 'mpet' */
|
||||
NamedColorTypeSignature Signature = 0x6E636f6C /* 'ncol' OBSOLETE use ncl2 */
|
||||
NamedColor2TypeSignature Signature = 0x6E636C32 /* 'ncl2' */
|
||||
ParametricCurveTypeSignature Signature = 0x70617261 /* 'para' */
|
||||
ProfileSequenceDescTypeSignature Signature = 0x70736571 /* 'pseq' */
|
||||
ProfileSequceIdTypeSignature Signature = 0x70736964 /* 'psid' */
|
||||
ResponseCurveSet16TypeSignature Signature = 0x72637332 /* 'rcs2' */
|
||||
S15Fixed16ArrayTypeSignature Signature = 0x73663332 /* 'sf32' */
|
||||
ScreeningTypeSignature Signature = 0x7363726E /* 'scrn' Removed in V4 */
|
||||
SignatureTypeSignature Signature = 0x73696720 /* 'sig ' */
|
||||
TextTypeSignature Signature = 0x74657874 /* 'text' */
|
||||
TextDescriptionTypeSignature Signature = 0x64657363 /* 'desc' Removed in V4 */
|
||||
U16Fixed16ArrayTypeSignature Signature = 0x75663332 /* 'uf32' */
|
||||
UcrBgTypeSignature Signature = 0x62666420 /* 'bfd ' Removed in V4 */
|
||||
UInt16ArrayTypeSignature Signature = 0x75693136 /* 'ui16' */
|
||||
UInt32ArrayTypeSignature Signature = 0x75693332 /* 'ui32' */
|
||||
UInt64ArrayTypeSignature Signature = 0x75693634 /* 'ui64' */
|
||||
UInt8ArrayTypeSignature Signature = 0x75693038 /* 'ui08' */
|
||||
ViewingConditionsTypeSignature Signature = 0x76696577 /* 'view' */
|
||||
XYZTypeSignature Signature = 0x58595A20 /* 'XYZ ' */
|
||||
XYZArrayTypeSignature Signature = 0x58595A20 /* 'XYZ ' */
|
||||
|
||||
XYZSignature Signature = 0x58595A20 /* 'XYZ ' */
|
||||
LabSignature Signature = 0x4C616220 /* 'Lab ' */
|
||||
LUVSignature Signature = 0x4C757620 /* 'Luv ' */
|
||||
YCbrSignature Signature = 0x59436272 /* 'YCbr' */
|
||||
YxySignature Signature = 0x59787920 /* 'Yxy ' */
|
||||
RGBSignature Signature = 0x52474220 /* 'RGB ' */
|
||||
GraySignature Signature = 0x47524159 /* 'GRAY' */
|
||||
HSVSignature Signature = 0x48535620 /* 'HSV ' */
|
||||
HLSSignature Signature = 0x484C5320 /* 'HLS ' */
|
||||
CMYKSignature Signature = 0x434D594B /* 'CMYK' */
|
||||
CMYSignature Signature = 0x434D5920 /* 'CMY ' */
|
||||
|
||||
MCH2Signature Signature = 0x32434C52 /* '2CLR' */
|
||||
MCH3Signature Signature = 0x33434C52 /* '3CLR' */
|
||||
MCH4Signature Signature = 0x34434C52 /* '4CLR' */
|
||||
MCH5Signature Signature = 0x35434C52 /* '5CLR' */
|
||||
MCH6Signature Signature = 0x36434C52 /* '6CLR' */
|
||||
MCH7Signature Signature = 0x37434C52 /* '7CLR' */
|
||||
MCH8Signature Signature = 0x38434C52 /* '8CLR' */
|
||||
MCH9Signature Signature = 0x39434C52 /* '9CLR' */
|
||||
MCHASignature Signature = 0x41434C52 /* 'ACLR' */
|
||||
MCHBSignature Signature = 0x42434C52 /* 'BCLR' */
|
||||
MCHCSignature Signature = 0x43434C52 /* 'CCLR' */
|
||||
MCHDSignature Signature = 0x44434C52 /* 'DCLR' */
|
||||
MCHESignature Signature = 0x45434C52 /* 'ECLR' */
|
||||
MCHFSignature Signature = 0x46434C52 /* 'FCLR' */
|
||||
NamedSignature Signature = 0x6e6d636c /* 'nmcl' */
|
||||
|
||||
Color2Signature Signature = 0x32434C52 /* '2CLR' */
|
||||
Color3Signature Signature = 0x33434C52 /* '3CLR' */
|
||||
Color4Signature Signature = 0x34434C52 /* '4CLR' */
|
||||
Color5Signature Signature = 0x35434C52 /* '5CLR' */
|
||||
Color6Signature Signature = 0x36434C52 /* '6CLR' */
|
||||
Color7Signature Signature = 0x37434C52 /* '7CLR' */
|
||||
Color8Signature Signature = 0x38434C52 /* '8CLR' */
|
||||
Color9Signature Signature = 0x39434C52 /* '9CLR' */
|
||||
Color10Signature Signature = 0x41434C52 /* 'ACLR' */
|
||||
Color11Signature Signature = 0x42434C52 /* 'BCLR' */
|
||||
Color12Signature Signature = 0x43434C52 /* 'CCLR' */
|
||||
Color13Signature Signature = 0x44434C52 /* 'DCLR' */
|
||||
Color14Signature Signature = 0x45434C52 /* 'ECLR' */
|
||||
Color15Signature Signature = 0x46434C52 /* 'FCLR' */
|
||||
|
||||
AToB0TagSignature Signature = 0x41324230 /* 'A2B0' */
|
||||
AToB1TagSignature Signature = 0x41324231 /* 'A2B1' */
|
||||
AToB2TagSignature Signature = 0x41324232 /* 'A2B2' */
|
||||
BlueColorantTagSignature Signature = 0x6258595A /* 'bXYZ' */
|
||||
BlueMatrixColumnTagSignature Signature = 0x6258595A /* 'bXYZ' */
|
||||
BlueTRCTagSignature Signature = 0x62545243 /* 'bTRC' */
|
||||
BToA0TagSignature Signature = 0x42324130 /* 'B2A0' */
|
||||
BToA1TagSignature Signature = 0x42324131 /* 'B2A1' */
|
||||
BToA2TagSignature Signature = 0x42324132 /* 'B2A2' */
|
||||
CalibrationDateTimeTagSignature Signature = 0x63616C74 /* 'calt' */
|
||||
CharTargetTagSignature Signature = 0x74617267 /* 'targ' */
|
||||
ChromaticAdaptationTagSignature Signature = 0x63686164 /* 'chad' */
|
||||
ChromaticityTagSignature Signature = 0x6368726D /* 'chrm' */
|
||||
ColorantOrderTagSignature Signature = 0x636C726F /* 'clro' */
|
||||
ColorantTableTagSignature Signature = 0x636C7274 /* 'clrt' */
|
||||
ColorantTableOutTagSignature Signature = 0x636C6F74 /* 'clot' */
|
||||
ColorimetricIntentImageStateTagSignature Signature = 0x63696973 /* 'ciis' */
|
||||
CopyrightTagSignature Signature = 0x63707274 /* 'cprt' */
|
||||
CrdInfoTagSignature Signature = 0x63726469 /* 'crdi' Removed in V4 */
|
||||
DataTagSignature Signature = 0x64617461 /* 'data' Removed in V4 */
|
||||
DateTimeTagSignature Signature = 0x6474696D /* 'dtim' Removed in V4 */
|
||||
DeviceMfgDescTagSignature Signature = 0x646D6E64 /* 'dmnd' */
|
||||
DeviceModelDescTagSignature Signature = 0x646D6464 /* 'dmdd' */
|
||||
DeviceSettingsTagSignature Signature = 0x64657673 /* 'devs' Removed in V4 */
|
||||
DToB0TagSignature Signature = 0x44324230 /* 'D2B0' */
|
||||
DToB1TagSignature Signature = 0x44324231 /* 'D2B1' */
|
||||
DToB2TagSignature Signature = 0x44324232 /* 'D2B2' */
|
||||
DToB3TagSignature Signature = 0x44324233 /* 'D2B3' */
|
||||
BToD0TagSignature Signature = 0x42324430 /* 'B2D0' */
|
||||
BToD1TagSignature Signature = 0x42324431 /* 'B2D1' */
|
||||
BToD2TagSignature Signature = 0x42324432 /* 'B2D2' */
|
||||
BToD3TagSignature Signature = 0x42324433 /* 'B2D3' */
|
||||
GamutTagSignature Signature = 0x67616D74 /* 'gamt' */
|
||||
GrayTRCTagSignature Signature = 0x6b545243 /* 'kTRC' */
|
||||
GreenColorantTagSignature Signature = 0x6758595A /* 'gXYZ' */
|
||||
GreenMatrixColumnTagSignature Signature = 0x6758595A /* 'gXYZ' */
|
||||
GreenTRCTagSignature Signature = 0x67545243 /* 'gTRC' */
|
||||
LuminanceTagSignature Signature = 0x6C756d69 /* 'lumi' */
|
||||
MeasurementTagSignature Signature = 0x6D656173 /* 'meas' */
|
||||
MediaBlackPointTagSignature Signature = 0x626B7074 /* 'bkpt' */
|
||||
MediaWhitePointTagSignature Signature = 0x77747074 /* 'wtpt' */
|
||||
MetaDataTagSignature Signature = 0x6D657461 /* 'meta' */
|
||||
NamedColorTagSignature Signature = 0x6E636f6C /* 'ncol' OBSOLETE use ncl2 */
|
||||
NamedColor2TagSignature Signature = 0x6E636C32 /* 'ncl2' */
|
||||
OutputResponseTagSignature Signature = 0x72657370 /* 'resp' */
|
||||
PerceptualRenderingIntentGamutTagSignature Signature = 0x72696730 /* 'rig0' */
|
||||
Preview0TagSignature Signature = 0x70726530 /* 'pre0' */
|
||||
Preview1TagSignature Signature = 0x70726531 /* 'pre1' */
|
||||
Preview2TagSignature Signature = 0x70726532 /* 'pre2' */
|
||||
PrintConditionTagSignature Signature = 0x7074636e /* 'ptcn' */
|
||||
ProfileDescriptionTagSignature Signature = 0x64657363 /* 'desc' */
|
||||
ProfileSequenceDescTagSignature Signature = 0x70736571 /* 'pseq' */
|
||||
ProfileSequceIdTagSignature Signature = 0x70736964 /* 'psid' */
|
||||
Ps2CRD0TagSignature Signature = 0x70736430 /* 'psd0' Removed in V4 */
|
||||
Ps2CRD1TagSignature Signature = 0x70736431 /* 'psd1' Removed in V4 */
|
||||
Ps2CRD2TagSignature Signature = 0x70736432 /* 'psd2' Removed in V4 */
|
||||
Ps2CRD3TagSignature Signature = 0x70736433 /* 'psd3' Removed in V4 */
|
||||
Ps2CSATagSignature Signature = 0x70733273 /* 'ps2s' Removed in V4 */
|
||||
Ps2RenderingIntentTagSignature Signature = 0x70733269 /* 'ps2i' Removed in V4 */
|
||||
RedColorantTagSignature Signature = 0x7258595A /* 'rXYZ' */
|
||||
RedMatrixColumnTagSignature Signature = 0x7258595A /* 'rXYZ' */
|
||||
RedTRCTagSignature Signature = 0x72545243 /* 'rTRC' */
|
||||
SaturationRenderingIntentGamutTagSignature Signature = 0x72696732 /* 'rig2' */
|
||||
ScreeningDescTagSignature Signature = 0x73637264 /* 'scrd' Removed in V4 */
|
||||
ScreeningTagSignature Signature = 0x7363726E /* 'scrn' Removed in V4 */
|
||||
TechnologyTagSignature Signature = 0x74656368 /* 'tech' */
|
||||
UcrBgTagSignature Signature = 0x62666420 /* 'bfd ' Removed in V4 */
|
||||
ViewingCondDescTagSignature Signature = 0x76756564 /* 'vued' */
|
||||
ViewingConditionsTagSignature Signature = 0x76696577 /* 'view' */
|
||||
|
||||
CurveSetElemTypeSignature Signature = 0x63767374 /* 'cvst' */
|
||||
MatrixElemTypeSignature Signature = 0x6D617466 /* 'matf' */
|
||||
CLutElemTypeSignature Signature = 0x636C7574 /* 'clut' */
|
||||
BAcsElemTypeSignature Signature = 0x62414353 /* 'bACS' */
|
||||
EAcsElemTypeSignature Signature = 0x65414353 /* 'eACS' */
|
||||
)
|
||||
|
||||
func maskNull(b byte) byte {
|
||||
switch b {
|
||||
case 0:
|
||||
return ' '
|
||||
default:
|
||||
return b
|
||||
}
|
||||
}
|
||||
|
||||
func signature(b []byte) Signature {
|
||||
return Signature(uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]))
|
||||
}
|
||||
|
||||
func SignatureFromString(sig string) Signature {
|
||||
var b []byte = []byte{0x20, 0x20, 0x20, 0x20}
|
||||
copy(b, sig)
|
||||
return signature(b)
|
||||
}
|
||||
|
||||
func (s Signature) String() string {
|
||||
v := []byte{
|
||||
(maskNull(byte((s >> 24) & 0xff))),
|
||||
(maskNull(byte((s >> 16) & 0xff))),
|
||||
(maskNull(byte((s >> 8) & 0xff))),
|
||||
(maskNull(byte(s & 0xff))),
|
||||
}
|
||||
return "'" + string(v) + "'"
|
||||
}
|
||||
183
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/tag_description.go
generated
vendored
Normal file
183
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/tag_description.go
generated
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
package icc
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"unicode/utf16"
|
||||
)
|
||||
|
||||
var _ = fmt.Print
|
||||
|
||||
func parse_text_tag(data []byte) (any, error) {
|
||||
var tag_type Signature
|
||||
_, err := binary.Decode(data, binary.BigEndian, &tag_type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch tag_type {
|
||||
case TextTagSignature:
|
||||
return textDecoder(data)
|
||||
case DescSignature:
|
||||
return descDecoder(data)
|
||||
default:
|
||||
return mlucDecoder(data)
|
||||
}
|
||||
}
|
||||
|
||||
type TextTag interface {
|
||||
BestGuessValue() string
|
||||
}
|
||||
|
||||
type DescriptionTag struct {
|
||||
ASCII string
|
||||
Unicode string
|
||||
Script string
|
||||
}
|
||||
|
||||
func (d DescriptionTag) BestGuessValue() string {
|
||||
if d.ASCII != "" {
|
||||
return d.ASCII
|
||||
}
|
||||
return d.Unicode
|
||||
}
|
||||
|
||||
var _ TextTag = (*DescriptionTag)(nil)
|
||||
|
||||
func descDecoder(raw []byte) (any, error) {
|
||||
if len(raw) < 12 {
|
||||
return nil, errors.New("desc tag too short")
|
||||
}
|
||||
asciiLen := int(binary.BigEndian.Uint32(raw[8:12]))
|
||||
if asciiLen < 1 || 12+asciiLen > len(raw) {
|
||||
return nil, errors.New("invalid ASCII length in desc tag")
|
||||
}
|
||||
ascii := raw[12 : 12+asciiLen]
|
||||
if i := bytes.IndexByte(ascii, 0); i >= 0 {
|
||||
ascii = ascii[:i]
|
||||
}
|
||||
|
||||
offset := 12 + asciiLen
|
||||
if len(raw) < offset+4 {
|
||||
return &DescriptionTag{ASCII: string(ascii)}, nil // ASCII-only, no Unicode
|
||||
}
|
||||
|
||||
unicodeCount := int(binary.BigEndian.Uint32(raw[offset : offset+4]))
|
||||
offset += 4
|
||||
if len(raw) < offset+(unicodeCount*2) {
|
||||
return nil, errors.New("desc tag truncated: missing UTF-16 data")
|
||||
}
|
||||
unicodeData := raw[offset : offset+(unicodeCount*2)]
|
||||
offset += unicodeCount * 2
|
||||
unicode := decodeUTF16BE(unicodeData)
|
||||
|
||||
if len(raw) <= offset {
|
||||
return &DescriptionTag{
|
||||
ASCII: string(ascii),
|
||||
Unicode: unicode,
|
||||
}, nil
|
||||
}
|
||||
|
||||
scriptCount := int(raw[offset])
|
||||
offset++
|
||||
if len(raw) < offset+scriptCount {
|
||||
return nil, errors.New("desc tag truncated: missing ScriptCode data")
|
||||
}
|
||||
script := string(raw[offset : offset+scriptCount])
|
||||
|
||||
return &DescriptionTag{
|
||||
ASCII: string(ascii),
|
||||
Unicode: unicode,
|
||||
Script: script,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type PlainText struct {
|
||||
val string
|
||||
}
|
||||
|
||||
var _ TextTag = (*PlainText)(nil)
|
||||
|
||||
func (p PlainText) BestGuessValue() string { return p.val }
|
||||
|
||||
func textDecoder(raw []byte) (any, error) {
|
||||
if len(raw) < 8 {
|
||||
return nil, errors.New("text tag too short")
|
||||
}
|
||||
text := raw[8:]
|
||||
text = bytes.TrimRight(text, "\x00")
|
||||
return &PlainText{string(text)}, nil
|
||||
}
|
||||
|
||||
type MultiLocalizedTag struct {
|
||||
Strings []LocalizedString
|
||||
}
|
||||
|
||||
func (p MultiLocalizedTag) BestGuessValue() string {
|
||||
for _, t := range p.Strings {
|
||||
if t.Value != "" && (t.Language == "en" || t.Language == "eng") {
|
||||
return t.Value
|
||||
}
|
||||
}
|
||||
for _, t := range p.Strings {
|
||||
if t.Value != "" {
|
||||
return t.Value
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
type LocalizedString struct {
|
||||
Language string // e.g. "en"
|
||||
Country string // e.g. "US"
|
||||
Value string
|
||||
}
|
||||
|
||||
func mlucDecoder(raw []byte) (any, error) {
|
||||
if len(raw) < 16 {
|
||||
return nil, errors.New("mluc tag too short")
|
||||
}
|
||||
count := int(binary.BigEndian.Uint32(raw[8:12]))
|
||||
recordSize := int(binary.BigEndian.Uint32(raw[12:16]))
|
||||
if recordSize != 12 {
|
||||
return nil, fmt.Errorf("unexpected mluc record size: %d", recordSize)
|
||||
}
|
||||
if len(raw) < 16+(count*recordSize) {
|
||||
return nil, fmt.Errorf("mluc tag too small for %d records", count)
|
||||
}
|
||||
tag := &MultiLocalizedTag{Strings: make([]LocalizedString, 0, count)}
|
||||
for i := 0; i < count; i++ {
|
||||
base := 16 + i*recordSize
|
||||
langCode := string(raw[base : base+2])
|
||||
countryCode := string(raw[base+2 : base+4])
|
||||
strLen := int(binary.BigEndian.Uint32(raw[base+4 : base+8]))
|
||||
strOffset := int(binary.BigEndian.Uint32(raw[base+8 : base+12]))
|
||||
|
||||
if strOffset+strLen > len(raw) || strLen%2 != 0 {
|
||||
return nil, fmt.Errorf("invalid string offset/length in mluc record %d", i)
|
||||
}
|
||||
|
||||
strData := raw[strOffset : strOffset+strLen]
|
||||
decoded := decodeUTF16BE(strData)
|
||||
tag.Strings = append(tag.Strings, LocalizedString{
|
||||
Language: langCode,
|
||||
Country: countryCode,
|
||||
Value: decoded,
|
||||
})
|
||||
}
|
||||
return tag, nil
|
||||
}
|
||||
|
||||
func decodeUTF16BE(data []byte) string {
|
||||
codeUnits := make([]uint16, len(data)/2)
|
||||
_, _ = binary.Decode(data, binary.BigEndian, codeUnits)
|
||||
return string(utf16.Decode(codeUnits))
|
||||
}
|
||||
|
||||
func sigDecoder(raw []byte) (any, error) {
|
||||
if len(raw) < 12 {
|
||||
return nil, errors.New("sig tag too short")
|
||||
}
|
||||
return signature(raw[8:12]), nil
|
||||
}
|
||||
140
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/tags_clut.go
generated
vendored
Normal file
140
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/tags_clut.go
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
package icc
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// CLUTTag represents a color lookup table tag (TagColorLookupTable)
|
||||
type CLUTTag struct {
|
||||
GridPoints []uint8 // e.g., [17,17,17] for 3D CLUT
|
||||
InputChannels int
|
||||
OutputChannels int
|
||||
Values []float64 // flattened [in1, in2, ..., out1, out2, ...]
|
||||
}
|
||||
|
||||
var _ ChannelTransformer = (*CLUTTag)(nil)
|
||||
|
||||
// section 10.12.3 (CLUT) in ICC.1-2202-05.pdf
|
||||
func embeddedClutDecoder(raw []byte, InputChannels, OutputChannels int) (any, error) {
|
||||
if len(raw) < 20 {
|
||||
return nil, errors.New("clut tag too short")
|
||||
}
|
||||
gridPoints := make([]uint8, InputChannels)
|
||||
copy(gridPoints, raw[:InputChannels])
|
||||
bytes_per_channel := raw[16]
|
||||
raw = raw[20:]
|
||||
// expected size: (product of grid points) * output channels * bytes_per_channel
|
||||
expected_num_of_values := expectedValues(gridPoints, OutputChannels)
|
||||
values := make([]float64, expected_num_of_values)
|
||||
if len(values)*int(bytes_per_channel) > len(raw) {
|
||||
return nil, fmt.Errorf("CLUT unexpected body length: expected %d, got %d", expected_num_of_values*int(bytes_per_channel), len(raw))
|
||||
}
|
||||
|
||||
switch bytes_per_channel {
|
||||
case 1:
|
||||
for i, b := range raw[:len(values)] {
|
||||
values[i] = float64(b) / 255
|
||||
}
|
||||
case 2:
|
||||
for i := range len(values) {
|
||||
values[i] = float64(binary.BigEndian.Uint16(raw[i*2:i*2+2])) / 65535
|
||||
}
|
||||
}
|
||||
ans := &CLUTTag{
|
||||
GridPoints: gridPoints,
|
||||
InputChannels: InputChannels,
|
||||
OutputChannels: OutputChannels,
|
||||
Values: values,
|
||||
}
|
||||
if ans.InputChannels > 6 {
|
||||
return nil, fmt.Errorf("unsupported num of CLUT input channels: %d", ans.InputChannels)
|
||||
}
|
||||
return ans, nil
|
||||
}
|
||||
|
||||
func expectedValues(gridPoints []uint8, outputChannels int) int {
|
||||
expectedPoints := 1
|
||||
for _, g := range gridPoints {
|
||||
expectedPoints *= int(g)
|
||||
}
|
||||
return expectedPoints * outputChannels
|
||||
}
|
||||
|
||||
func (c *CLUTTag) WorkspaceSize() int { return 16 }
|
||||
|
||||
func (c *CLUTTag) IsSuitableFor(num_input_channels, num_output_channels int) bool {
|
||||
return num_input_channels == int(c.InputChannels) && num_output_channels == c.OutputChannels
|
||||
}
|
||||
|
||||
func (c *CLUTTag) Transform(output, workspace []float64, inputs ...float64) error {
|
||||
return c.Lookup(output, workspace, inputs)
|
||||
}
|
||||
|
||||
func (c *CLUTTag) Lookup(output, workspace, inputs []float64) error {
|
||||
// clamp input values to 0-1...
|
||||
clamped := workspace[:len(inputs)]
|
||||
for i, v := range inputs {
|
||||
clamped[i] = clamp01(v)
|
||||
}
|
||||
// find the grid positions and interpolation factors...
|
||||
gridFrac := workspace[len(clamped) : 2*len(clamped)]
|
||||
var buf [4]int
|
||||
gridPos := buf[:]
|
||||
for i, v := range clamped {
|
||||
nPoints := int(c.GridPoints[i])
|
||||
if nPoints < 2 {
|
||||
return fmt.Errorf("CLUT input channel %d has invalid grid points: %d", i, nPoints)
|
||||
}
|
||||
pos := v * float64(nPoints-1)
|
||||
gridPos[i] = int(pos)
|
||||
if gridPos[i] >= nPoints-1 {
|
||||
gridPos[i] = nPoints - 2 // clamp
|
||||
gridFrac[i] = 1.0
|
||||
} else {
|
||||
gridFrac[i] = pos - float64(gridPos[i])
|
||||
}
|
||||
}
|
||||
// perform multi-dimensional interpolation (recursive)...
|
||||
return c.triLinearInterpolate(output[:c.OutputChannels], gridPos, gridFrac)
|
||||
}
|
||||
|
||||
func (c *CLUTTag) triLinearInterpolate(out []float64, gridPos []int, gridFrac []float64) error {
|
||||
numCorners := 1 << c.InputChannels // 2^inputs
|
||||
for o := range c.OutputChannels {
|
||||
out[o] = 0
|
||||
}
|
||||
// walk all corners of the hypercube
|
||||
for corner := range numCorners {
|
||||
weight := 1.0
|
||||
idx := 0
|
||||
stride := 1
|
||||
for dim := c.InputChannels - 1; dim >= 0; dim-- {
|
||||
bit := (corner >> dim) & 1
|
||||
pos := gridPos[dim] + bit
|
||||
if pos >= int(c.GridPoints[dim]) {
|
||||
return fmt.Errorf("CLUT corner position out of bounds at dimension %d", dim)
|
||||
}
|
||||
idx += pos * stride
|
||||
stride *= int(c.GridPoints[dim])
|
||||
if bit == 0 {
|
||||
weight *= 1 - gridFrac[dim]
|
||||
} else {
|
||||
weight *= gridFrac[dim]
|
||||
}
|
||||
}
|
||||
base := idx * c.OutputChannels
|
||||
if base+c.OutputChannels > len(c.Values) {
|
||||
return errors.New("CLUT value index out of bounds")
|
||||
}
|
||||
for o := range c.OutputChannels {
|
||||
out[o] += weight * c.Values[base+o]
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func clamp01(v float64) float64 {
|
||||
return max(0, min(v, 1))
|
||||
}
|
||||
103
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/tagtable.go
generated
vendored
Normal file
103
vendor/github.com/kovidgoyal/imaging/prism/meta/icc/tagtable.go
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
package icc
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type not_found struct {
|
||||
sig Signature
|
||||
}
|
||||
|
||||
func (e *not_found) Error() string {
|
||||
return fmt.Sprintf("no tag for signature: %s found in this ICC profile", e.sig)
|
||||
}
|
||||
|
||||
type unsupported struct {
|
||||
sig Signature
|
||||
}
|
||||
|
||||
func (e *unsupported) Error() string {
|
||||
return fmt.Sprintf("the tag: %s is not supported", e.sig)
|
||||
}
|
||||
|
||||
func parse_tag(sig Signature, data []byte) (result any, err error) {
|
||||
if len(data) == 0 {
|
||||
return nil, ¬_found{sig}
|
||||
}
|
||||
switch sig {
|
||||
default:
|
||||
return nil, &unsupported{sig}
|
||||
case DescSignature, DeviceManufacturerDescriptionSignature, DeviceModelDescriptionSignature:
|
||||
return parse_text_tag(data)
|
||||
case SignateTagSignature:
|
||||
return sigDecoder(data)
|
||||
}
|
||||
}
|
||||
|
||||
type parsed_tag struct {
|
||||
tag any
|
||||
err error
|
||||
}
|
||||
|
||||
type TagTable struct {
|
||||
entries map[Signature][]byte
|
||||
lock sync.Mutex
|
||||
parsed map[Signature]parsed_tag
|
||||
}
|
||||
|
||||
func (t *TagTable) add(sig Signature, data []byte) {
|
||||
t.entries[sig] = data
|
||||
}
|
||||
|
||||
func (t *TagTable) get_parsed(sig Signature) (ans any, err error) {
|
||||
t.lock.Lock()
|
||||
defer t.lock.Unlock()
|
||||
existing, found := t.parsed[sig]
|
||||
if found {
|
||||
return existing.tag, existing.err
|
||||
}
|
||||
if t.parsed == nil {
|
||||
t.parsed = make(map[Signature]parsed_tag)
|
||||
}
|
||||
defer func() {
|
||||
t.parsed[sig] = parsed_tag{ans, err}
|
||||
}()
|
||||
return parse_tag(sig, t.entries[sig])
|
||||
}
|
||||
|
||||
func (t *TagTable) getDescription(s Signature) (string, error) {
|
||||
q, err := t.get_parsed(s)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("could not get description for %s with error: %w", s, err)
|
||||
}
|
||||
if t, ok := q.(TextTag); ok {
|
||||
return t.BestGuessValue(), nil
|
||||
} else {
|
||||
return "", fmt.Errorf("tag for %s is not a text tag", s)
|
||||
}
|
||||
}
|
||||
|
||||
func (t *TagTable) getProfileDescription() (string, error) {
|
||||
return t.getDescription(DescSignature)
|
||||
}
|
||||
|
||||
func (t *TagTable) getDeviceManufacturerDescription() (string, error) {
|
||||
return t.getDescription(DeviceManufacturerDescriptionSignature)
|
||||
}
|
||||
|
||||
func (t *TagTable) getDeviceModelDescription() (string, error) {
|
||||
return t.getDescription(DeviceModelDescriptionSignature)
|
||||
}
|
||||
|
||||
func emptyTagTable() TagTable {
|
||||
return TagTable{
|
||||
entries: make(map[Signature][]byte),
|
||||
}
|
||||
}
|
||||
|
||||
type ChannelTransformer interface {
|
||||
Transform(output, workspace []float64, input ...float64) error
|
||||
IsSuitableFor(num_input_channels int, num_output_channels int) bool
|
||||
WorkspaceSize() int
|
||||
}
|
||||
3
vendor/github.com/kovidgoyal/imaging/prism/meta/imageformat.go
generated
vendored
Normal file
3
vendor/github.com/kovidgoyal/imaging/prism/meta/imageformat.go
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
package meta
|
||||
|
||||
type ImageFormat string
|
||||
2
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/doc.go
generated
vendored
Normal file
2
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/doc.go
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package jpegmeta provides support for working with embedded JPEG metadata.
|
||||
package jpegmeta
|
||||
166
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/jpegmeta.go
generated
vendored
Normal file
166
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/jpegmeta.go
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
package jpegmeta
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/kovidgoyal/go-parallel"
|
||||
"github.com/kovidgoyal/imaging/prism/meta"
|
||||
"github.com/kovidgoyal/imaging/streams"
|
||||
)
|
||||
|
||||
// Format specifies the image format handled by this package
|
||||
var Format = meta.ImageFormat("JPEG")
|
||||
|
||||
const exifSignature = "Exif\x00\x00"
|
||||
|
||||
var iccProfileIdentifier = []byte("ICC_PROFILE\x00")
|
||||
|
||||
// Load loads the metadata for a JPEG image stream.
|
||||
//
|
||||
// Only as much of the stream is consumed as necessary to extract the metadata;
|
||||
// the returned stream contains a buffered copy of the consumed data such that
|
||||
// reading from it will produce the same results as fully reading the input
|
||||
// stream. This provides a convenient way to load the full image after loading
|
||||
// the metadata.
|
||||
//
|
||||
// An error is returned if basic metadata could not be extracted. The returned
|
||||
// stream still provides the full image data.
|
||||
func Load(r io.Reader) (md *meta.Data, imgStream io.Reader, err error) {
|
||||
imgStream, err = streams.CallbackWithSeekable(r, func(r io.Reader) (err error) {
|
||||
md, err = ExtractMetadata(r)
|
||||
return
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Same as Load() except that no new stream is provided
|
||||
func ExtractMetadata(r io.Reader) (md *meta.Data, err error) {
|
||||
metadataExtracted := false
|
||||
md = &meta.Data{Format: Format}
|
||||
segReader := NewSegmentReader(r)
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if !metadataExtracted {
|
||||
md = nil
|
||||
}
|
||||
err = parallel.Format_stacktrace_on_panic(r, 1)
|
||||
}
|
||||
}()
|
||||
|
||||
var iccProfileChunks [][]byte
|
||||
var iccProfileChunksExtracted int
|
||||
var exif []byte
|
||||
|
||||
allMetadataExtracted := func() bool {
|
||||
return metadataExtracted &&
|
||||
iccProfileChunks != nil &&
|
||||
iccProfileChunksExtracted == len(iccProfileChunks) &&
|
||||
exif != nil
|
||||
}
|
||||
|
||||
soiSegment, err := segReader.ReadSegment()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if soiSegment.Marker.Type != markerTypeStartOfImage {
|
||||
return nil, fmt.Errorf("stream does not begin with start-of-image")
|
||||
}
|
||||
|
||||
parseSegments:
|
||||
for {
|
||||
segment, err := segReader.ReadSegment()
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return nil, fmt.Errorf("unexpected EOF")
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch segment.Marker.Type {
|
||||
|
||||
case markerTypeStartOfFrameBaseline,
|
||||
markerTypeStartOfFrameProgressive:
|
||||
md.BitsPerComponent = uint32(segment.Data[0])
|
||||
md.PixelHeight = uint32(segment.Data[1])<<8 | uint32(segment.Data[2])
|
||||
md.PixelWidth = uint32(segment.Data[3])<<8 | uint32(segment.Data[4])
|
||||
metadataExtracted = true
|
||||
|
||||
if allMetadataExtracted() {
|
||||
break parseSegments
|
||||
}
|
||||
|
||||
case markerTypeStartOfScan,
|
||||
markerTypeEndOfImage:
|
||||
break parseSegments
|
||||
|
||||
case markerTypeApp1:
|
||||
if bytes.HasPrefix(segment.Data, []byte(exifSignature)) {
|
||||
exif = segment.Data
|
||||
}
|
||||
case markerTypeApp2:
|
||||
if len(segment.Data) < len(iccProfileIdentifier)+2 {
|
||||
continue
|
||||
}
|
||||
|
||||
for i := range iccProfileIdentifier {
|
||||
if segment.Data[i] != iccProfileIdentifier[i] {
|
||||
continue parseSegments
|
||||
}
|
||||
}
|
||||
|
||||
iccData, iccErr := md.ICCProfileData()
|
||||
if iccData != nil || iccErr != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
chunkTotal := segment.Data[len(iccProfileIdentifier)+1]
|
||||
if iccProfileChunks == nil {
|
||||
iccProfileChunks = make([][]byte, chunkTotal)
|
||||
} else if int(chunkTotal) != len(iccProfileChunks) {
|
||||
md.SetICCProfileError(fmt.Errorf("inconsistent ICC profile chunk count"))
|
||||
continue
|
||||
}
|
||||
|
||||
chunkNum := segment.Data[len(iccProfileIdentifier)]
|
||||
if chunkNum == 0 || int(chunkNum) > len(iccProfileChunks) {
|
||||
md.SetICCProfileError(fmt.Errorf("invalid ICC profile chunk number"))
|
||||
continue
|
||||
}
|
||||
if iccProfileChunks[chunkNum-1] != nil {
|
||||
md.SetICCProfileError(fmt.Errorf("duplicated ICC profile chunk"))
|
||||
continue
|
||||
}
|
||||
iccProfileChunksExtracted++
|
||||
iccProfileChunks[chunkNum-1] = segment.Data[len(iccProfileIdentifier)+2:]
|
||||
|
||||
if allMetadataExtracted() {
|
||||
break parseSegments
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !metadataExtracted {
|
||||
return nil, fmt.Errorf("no metadata found")
|
||||
}
|
||||
md.ExifData = exif
|
||||
|
||||
// Incomplete or missing ICC profile
|
||||
if len(iccProfileChunks) != iccProfileChunksExtracted {
|
||||
_, iccErr := md.ICCProfileData()
|
||||
if iccErr == nil {
|
||||
md.SetICCProfileError(fmt.Errorf("incomplete ICC profile data"))
|
||||
}
|
||||
return md, nil
|
||||
}
|
||||
|
||||
iccProfileData := bytes.Buffer{}
|
||||
for i := range iccProfileChunks {
|
||||
iccProfileData.Write(iccProfileChunks[i])
|
||||
}
|
||||
md.SetICCProfileData(iccProfileData.Bytes())
|
||||
|
||||
return md, nil
|
||||
}
|
||||
89
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/marker.go
generated
vendored
Normal file
89
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/marker.go
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
package jpegmeta
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/kovidgoyal/imaging/streams"
|
||||
)
|
||||
|
||||
var invalidMarker = marker{Type: markerTypeInvalid}
|
||||
|
||||
type marker struct {
|
||||
Type markerType
|
||||
DataLength int
|
||||
}
|
||||
|
||||
func makeMarker(mType byte, r io.Reader) (marker, error) {
|
||||
var length uint16
|
||||
switch mType {
|
||||
|
||||
case
|
||||
byte(markerTypeRestart0),
|
||||
byte(markerTypeRestart1),
|
||||
byte(markerTypeRestart2),
|
||||
byte(markerTypeRestart3),
|
||||
byte(markerTypeRestart4),
|
||||
byte(markerTypeRestart5),
|
||||
byte(markerTypeRestart6),
|
||||
byte(markerTypeRestart7),
|
||||
byte(markerTypeStartOfImage),
|
||||
byte(markerTypeEndOfImage):
|
||||
|
||||
length = 2
|
||||
|
||||
case byte(markerTypeStartOfFrameBaseline),
|
||||
byte(markerTypeStartOfFrameProgressive),
|
||||
byte(markerTypeDefineHuffmanTable),
|
||||
byte(markerTypeStartOfScan),
|
||||
byte(markerTypeDefineQuantisationTable),
|
||||
byte(markerTypeDefineRestartInterval),
|
||||
byte(markerTypeApp0),
|
||||
byte(markerTypeApp1),
|
||||
byte(markerTypeApp2),
|
||||
byte(markerTypeApp3),
|
||||
byte(markerTypeApp4),
|
||||
byte(markerTypeApp5),
|
||||
byte(markerTypeApp6),
|
||||
byte(markerTypeApp7),
|
||||
byte(markerTypeApp8),
|
||||
byte(markerTypeApp9),
|
||||
byte(markerTypeApp10),
|
||||
byte(markerTypeApp11),
|
||||
byte(markerTypeApp12),
|
||||
byte(markerTypeApp13),
|
||||
byte(markerTypeApp14),
|
||||
byte(markerTypeApp15),
|
||||
byte(markerTypeComment):
|
||||
|
||||
var err error
|
||||
if err = binary.Read(r, binary.BigEndian, &length); err != nil {
|
||||
return invalidMarker, err
|
||||
}
|
||||
|
||||
default:
|
||||
return invalidMarker, fmt.Errorf("unrecognised marker type %0x", mType)
|
||||
}
|
||||
|
||||
return marker{
|
||||
Type: markerType(mType),
|
||||
DataLength: int(length) - 2,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func readMarker(r io.Reader) (marker, error) {
|
||||
b, err := streams.ReadByte(r)
|
||||
if err != nil {
|
||||
return invalidMarker, err
|
||||
}
|
||||
|
||||
if b != 0xff {
|
||||
return invalidMarker, fmt.Errorf("invalid marker identifier %0x", b)
|
||||
}
|
||||
if b, err = streams.ReadByte(r); err != nil {
|
||||
return invalidMarker, err
|
||||
}
|
||||
|
||||
return makeMarker(b, r)
|
||||
}
|
||||
115
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/markertype.go
generated
vendored
Normal file
115
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/markertype.go
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
package jpegmeta
|
||||
|
||||
import "fmt"
|
||||
|
||||
type markerType int
|
||||
|
||||
const (
|
||||
markerTypeInvalid markerType = 0x00
|
||||
markerTypeStartOfFrameBaseline markerType = 0xc0
|
||||
markerTypeStartOfFrameProgressive markerType = 0xc2
|
||||
markerTypeDefineHuffmanTable markerType = 0xc4
|
||||
markerTypeRestart0 markerType = 0xd0
|
||||
markerTypeRestart1 markerType = 0xd1
|
||||
markerTypeRestart2 markerType = 0xd2
|
||||
markerTypeRestart3 markerType = 0xd3
|
||||
markerTypeRestart4 markerType = 0xd4
|
||||
markerTypeRestart5 markerType = 0xd5
|
||||
markerTypeRestart6 markerType = 0xd6
|
||||
markerTypeRestart7 markerType = 0xd7
|
||||
markerTypeStartOfImage markerType = 0xd8
|
||||
markerTypeEndOfImage markerType = 0xd9
|
||||
markerTypeStartOfScan markerType = 0xda
|
||||
markerTypeDefineQuantisationTable markerType = 0xdb
|
||||
markerTypeDefineRestartInterval markerType = 0xdd
|
||||
markerTypeApp0 markerType = 0xe0
|
||||
markerTypeApp1 markerType = 0xe1
|
||||
markerTypeApp2 markerType = 0xe2
|
||||
markerTypeApp3 markerType = 0xe3
|
||||
markerTypeApp4 markerType = 0xe4
|
||||
markerTypeApp5 markerType = 0xe5
|
||||
markerTypeApp6 markerType = 0xe6
|
||||
markerTypeApp7 markerType = 0xe7
|
||||
markerTypeApp8 markerType = 0xe8
|
||||
markerTypeApp9 markerType = 0xe9
|
||||
markerTypeApp10 markerType = 0xea
|
||||
markerTypeApp11 markerType = 0xeb
|
||||
markerTypeApp12 markerType = 0xec
|
||||
markerTypeApp13 markerType = 0xed
|
||||
markerTypeApp14 markerType = 0xee
|
||||
markerTypeApp15 markerType = 0xef
|
||||
markerTypeComment markerType = 0xfe
|
||||
)
|
||||
|
||||
func (mt markerType) String() string {
|
||||
switch mt {
|
||||
case markerTypeStartOfFrameBaseline:
|
||||
return "SOF0"
|
||||
case markerTypeStartOfFrameProgressive:
|
||||
return "SOF2"
|
||||
case markerTypeDefineHuffmanTable:
|
||||
return "DHT"
|
||||
case markerTypeRestart0:
|
||||
return "RST0"
|
||||
case markerTypeRestart1:
|
||||
return "RST1"
|
||||
case markerTypeRestart2:
|
||||
return "RST2"
|
||||
case markerTypeRestart3:
|
||||
return "RST3"
|
||||
case markerTypeRestart4:
|
||||
return "RST4"
|
||||
case markerTypeRestart5:
|
||||
return "RST5"
|
||||
case markerTypeRestart6:
|
||||
return "RST6"
|
||||
case markerTypeRestart7:
|
||||
return "RST7"
|
||||
case markerTypeStartOfImage:
|
||||
return "SOI"
|
||||
case markerTypeEndOfImage:
|
||||
return "EOI"
|
||||
case markerTypeStartOfScan:
|
||||
return "SOS"
|
||||
case markerTypeDefineQuantisationTable:
|
||||
return "DQT"
|
||||
case markerTypeDefineRestartInterval:
|
||||
return "DRI"
|
||||
case markerTypeApp0:
|
||||
return "APP0"
|
||||
case markerTypeApp1:
|
||||
return "APP1"
|
||||
case markerTypeApp2:
|
||||
return "APP2"
|
||||
case markerTypeApp3:
|
||||
return "APP3"
|
||||
case markerTypeApp4:
|
||||
return "APP4"
|
||||
case markerTypeApp5:
|
||||
return "APP5"
|
||||
case markerTypeApp6:
|
||||
return "APP6"
|
||||
case markerTypeApp7:
|
||||
return "APP7"
|
||||
case markerTypeApp8:
|
||||
return "APP8"
|
||||
case markerTypeApp9:
|
||||
return "APP9"
|
||||
case markerTypeApp10:
|
||||
return "APP10"
|
||||
case markerTypeApp11:
|
||||
return "APP11"
|
||||
case markerTypeApp12:
|
||||
return "APP12"
|
||||
case markerTypeApp13:
|
||||
return "APP13"
|
||||
case markerTypeApp14:
|
||||
return "APP14"
|
||||
case markerTypeApp15:
|
||||
return "APP15"
|
||||
case markerTypeComment:
|
||||
return "COM"
|
||||
default:
|
||||
return fmt.Sprintf("Unknown (%0x)", byte(mt))
|
||||
}
|
||||
}
|
||||
38
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/segment.go
generated
vendored
Normal file
38
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/segment.go
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
package jpegmeta
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
var invalidSegment = segment{Marker: invalidMarker}
|
||||
|
||||
type segment struct {
|
||||
Marker marker
|
||||
Data []byte
|
||||
}
|
||||
|
||||
func makeSegment(markerType byte, r io.Reader) (segment, error) {
|
||||
m, err := makeMarker(markerType, r)
|
||||
return segment{Marker: m}, err
|
||||
}
|
||||
|
||||
func readSegment(r io.Reader) (segment, error) {
|
||||
m, err := readMarker(r)
|
||||
if err != nil {
|
||||
return invalidSegment, err
|
||||
}
|
||||
|
||||
seg := segment{
|
||||
Marker: m,
|
||||
}
|
||||
if m.DataLength > 0 {
|
||||
seg.Data = make([]byte, m.DataLength)
|
||||
|
||||
_, err := io.ReadFull(r, seg.Data)
|
||||
if err != nil {
|
||||
return invalidSegment, err
|
||||
}
|
||||
}
|
||||
|
||||
return seg, nil
|
||||
}
|
||||
56
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/segmentreader.go
generated
vendored
Normal file
56
vendor/github.com/kovidgoyal/imaging/prism/meta/jpegmeta/segmentreader.go
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
package jpegmeta
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/kovidgoyal/imaging/streams"
|
||||
)
|
||||
|
||||
type segmentReader struct {
|
||||
reader io.Reader
|
||||
inEntropyCodedData bool
|
||||
}
|
||||
|
||||
func (sr *segmentReader) ReadSegment() (segment, error) {
|
||||
if sr.inEntropyCodedData {
|
||||
for {
|
||||
b, err := streams.ReadByte(sr.reader)
|
||||
if err != nil {
|
||||
return segment{}, err
|
||||
}
|
||||
|
||||
if b == 0xFF {
|
||||
if b, err = streams.ReadByte(sr.reader); err != nil {
|
||||
return segment{}, err
|
||||
}
|
||||
|
||||
if b != 0x00 {
|
||||
seg, err := makeSegment(b, sr.reader)
|
||||
if err != nil {
|
||||
return segment{}, err
|
||||
}
|
||||
|
||||
sr.inEntropyCodedData = seg.Marker.Type == markerTypeStartOfScan ||
|
||||
(seg.Marker.Type >= markerTypeRestart0 && seg.Marker.Type <= markerTypeRestart7)
|
||||
|
||||
return seg, err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
seg, err := readSegment(sr.reader)
|
||||
if err != nil {
|
||||
return seg, err
|
||||
}
|
||||
|
||||
sr.inEntropyCodedData = seg.Marker.Type == markerTypeStartOfScan
|
||||
|
||||
return seg, nil
|
||||
}
|
||||
|
||||
func NewSegmentReader(r io.Reader) *segmentReader {
|
||||
return &segmentReader{
|
||||
reader: r,
|
||||
}
|
||||
}
|
||||
21
vendor/github.com/kovidgoyal/imaging/prism/meta/pngmeta/chunkheader.go
generated
vendored
Normal file
21
vendor/github.com/kovidgoyal/imaging/prism/meta/pngmeta/chunkheader.go
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
package pngmeta
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
type chunkHeader struct {
|
||||
Length uint32
|
||||
ChunkType [4]byte
|
||||
}
|
||||
|
||||
func (ch chunkHeader) String() string {
|
||||
return fmt.Sprintf("%c%c%c%c(%d)", ch.ChunkType[0], ch.ChunkType[1], ch.ChunkType[2], ch.ChunkType[3], ch.Length)
|
||||
}
|
||||
|
||||
func readChunkHeader(r io.Reader) (ch chunkHeader, err error) {
|
||||
err = binary.Read(r, binary.BigEndian, &ch)
|
||||
return
|
||||
}
|
||||
6
vendor/github.com/kovidgoyal/imaging/prism/meta/pngmeta/chunktypes.go
generated
vendored
Normal file
6
vendor/github.com/kovidgoyal/imaging/prism/meta/pngmeta/chunktypes.go
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
package pngmeta
|
||||
|
||||
var chunkTypeiCCP = [4]byte{'i', 'C', 'C', 'P'}
|
||||
var chunkTypeIDAT = [4]byte{'I', 'D', 'A', 'T'}
|
||||
var chunkTypeIEND = [4]byte{'I', 'E', 'N', 'D'}
|
||||
var chunkTypeIHDR = [4]byte{'I', 'H', 'D', 'R'}
|
||||
2
vendor/github.com/kovidgoyal/imaging/prism/meta/pngmeta/doc.go
generated
vendored
Normal file
2
vendor/github.com/kovidgoyal/imaging/prism/meta/pngmeta/doc.go
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package pngmeta provides support for working with embedded PNG metadata.
|
||||
package pngmeta
|
||||
163
vendor/github.com/kovidgoyal/imaging/prism/meta/pngmeta/pngmeta.go
generated
vendored
Normal file
163
vendor/github.com/kovidgoyal/imaging/prism/meta/pngmeta/pngmeta.go
generated
vendored
Normal file
@@ -0,0 +1,163 @@
|
||||
package pngmeta
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"compress/zlib"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/kovidgoyal/imaging/prism/meta"
|
||||
"github.com/kovidgoyal/imaging/streams"
|
||||
)
|
||||
|
||||
// Format specifies the image format handled by this package
|
||||
var Format = meta.ImageFormat("PNG")
|
||||
|
||||
var pngSignature = [8]byte{0x89, 'P', 'N', 'G', 0x0D, 0x0A, 0x1A, 0x0A}
|
||||
|
||||
// Load loads the metadata for a PNG image stream.
|
||||
//
|
||||
// Only as much of the stream is consumed as necessary to extract the metadata;
|
||||
// the returned stream contains a buffered copy of the consumed data such that
|
||||
// reading from it will produce the same results as fully reading the input
|
||||
// stream. This provides a convenient way to load the full image after loading
|
||||
// the metadata.
|
||||
//
|
||||
// An error is returned if basic metadata could not be extracted. The returned
|
||||
// stream still provides the full image data.
|
||||
func Load(r io.Reader) (md *meta.Data, imgStream io.Reader, err error) {
|
||||
imgStream, err = streams.CallbackWithSeekable(r, func(r io.Reader) (err error) {
|
||||
md, err = ExtractMetadata(r)
|
||||
return
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
func read_chunk(r io.Reader, length uint32) (ans []byte, err error) {
|
||||
ans = make([]byte, length+4)
|
||||
_, err = io.ReadFull(r, ans)
|
||||
ans = ans[:len(ans)-4] // we dont care about the chunk CRC
|
||||
return
|
||||
}
|
||||
|
||||
func skip_chunk(r io.Reader, length uint32) (err error) {
|
||||
return streams.Skip(r, int64(length)+4)
|
||||
}
|
||||
|
||||
// Same as Load() except that no new stream is provided
|
||||
func ExtractMetadata(r io.Reader) (md *meta.Data, err error) {
|
||||
metadataExtracted := false
|
||||
md = &meta.Data{Format: Format}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if !metadataExtracted {
|
||||
md = nil
|
||||
}
|
||||
err = fmt.Errorf("panic while extracting image metadata: %v", r)
|
||||
}
|
||||
}()
|
||||
|
||||
allMetadataExtracted := func() bool {
|
||||
iccData, iccErr := md.ICCProfileData()
|
||||
return metadataExtracted && (iccData != nil || iccErr != nil)
|
||||
}
|
||||
|
||||
pngSig := [8]byte{}
|
||||
if _, err := io.ReadFull(r, pngSig[:]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if pngSig != pngSignature {
|
||||
return nil, fmt.Errorf("invalid PNG signature")
|
||||
}
|
||||
var chunk []byte
|
||||
|
||||
decode := func(target any) error {
|
||||
if n, err := binary.Decode(chunk, binary.BigEndian, target); err == nil {
|
||||
chunk = chunk[n:]
|
||||
return nil
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
parseChunks:
|
||||
for {
|
||||
ch, err := readChunkHeader(r)
|
||||
if err != nil {
|
||||
if errors.Is(err, io.EOF) {
|
||||
break
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch ch.ChunkType {
|
||||
|
||||
case chunkTypeIHDR:
|
||||
if chunk, err = read_chunk(r, ch.Length); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = decode(&md.PixelWidth); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err = decode(&md.PixelHeight); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
md.BitsPerComponent = uint32(chunk[0])
|
||||
metadataExtracted = true
|
||||
if allMetadataExtracted() {
|
||||
break parseChunks
|
||||
}
|
||||
|
||||
case chunkTypeiCCP:
|
||||
if chunk, err = read_chunk(r, ch.Length); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
idx := bytes.IndexByte(chunk, 0)
|
||||
if idx < 0 || idx > 80 {
|
||||
return nil, fmt.Errorf("null terminator not found reading ICC profile name")
|
||||
}
|
||||
chunk = chunk[idx+1:]
|
||||
if len(chunk) < 1 {
|
||||
return nil, fmt.Errorf("incomplete ICCP chunk in PNG file")
|
||||
}
|
||||
if compressionMethod := chunk[0]; compressionMethod != 0x00 {
|
||||
return nil, fmt.Errorf("unknown compression method (%d)", compressionMethod)
|
||||
}
|
||||
chunk = chunk[1:]
|
||||
// Decompress ICC profile data
|
||||
zReader, err := zlib.NewReader(bytes.NewReader(chunk))
|
||||
if err != nil {
|
||||
md.SetICCProfileError(err)
|
||||
break
|
||||
}
|
||||
defer zReader.Close()
|
||||
profileData := &bytes.Buffer{}
|
||||
_, err = io.Copy(profileData, zReader)
|
||||
if err == nil {
|
||||
md.SetICCProfileData(profileData.Bytes())
|
||||
if allMetadataExtracted() {
|
||||
break parseChunks
|
||||
}
|
||||
} else {
|
||||
md.SetICCProfileError(err)
|
||||
}
|
||||
|
||||
case chunkTypeIDAT, chunkTypeIEND:
|
||||
break parseChunks
|
||||
|
||||
default:
|
||||
if err = skip_chunk(r, ch.Length); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !metadataExtracted {
|
||||
return nil, fmt.Errorf("no metadata found")
|
||||
}
|
||||
|
||||
return md, nil
|
||||
}
|
||||
21
vendor/github.com/kovidgoyal/imaging/prism/meta/webpmeta/chunkheader.go
generated
vendored
Normal file
21
vendor/github.com/kovidgoyal/imaging/prism/meta/webpmeta/chunkheader.go
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
package webpmeta
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
type chunkHeader struct {
|
||||
ChunkType [4]byte
|
||||
Length uint32
|
||||
}
|
||||
|
||||
func (ch chunkHeader) String() string {
|
||||
return fmt.Sprintf("%c%c%c%c(%d)", ch.ChunkType[0], ch.ChunkType[1], ch.ChunkType[2], ch.ChunkType[3], ch.Length)
|
||||
}
|
||||
|
||||
func readChunkHeader(r io.Reader) (ch chunkHeader, err error) {
|
||||
err = binary.Read(r, binary.LittleEndian, &ch)
|
||||
return
|
||||
}
|
||||
10
vendor/github.com/kovidgoyal/imaging/prism/meta/webpmeta/chunktypes.go
generated
vendored
Normal file
10
vendor/github.com/kovidgoyal/imaging/prism/meta/webpmeta/chunktypes.go
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
package webpmeta
|
||||
|
||||
var (
|
||||
chunkTypeRIFF = [4]byte{'R', 'I', 'F', 'F'}
|
||||
chunkTypeWEBP = [4]byte{'W', 'E', 'B', 'P'}
|
||||
chunkTypeVP8 = [4]byte{'V', 'P', '8', ' '}
|
||||
chunkTypeVP8L = [4]byte{'V', 'P', '8', 'L'}
|
||||
chunkTypeVP8X = [4]byte{'V', 'P', '8', 'X'}
|
||||
chunkTypeICCP = [4]byte{'I', 'C', 'C', 'P'}
|
||||
)
|
||||
2
vendor/github.com/kovidgoyal/imaging/prism/meta/webpmeta/doc.go
generated
vendored
Normal file
2
vendor/github.com/kovidgoyal/imaging/prism/meta/webpmeta/doc.go
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
// Package webpmeta provides support for working with embedded WebP metadata.
|
||||
package webpmeta
|
||||
213
vendor/github.com/kovidgoyal/imaging/prism/meta/webpmeta/webpmeta.go
generated
vendored
Normal file
213
vendor/github.com/kovidgoyal/imaging/prism/meta/webpmeta/webpmeta.go
generated
vendored
Normal file
@@ -0,0 +1,213 @@
|
||||
package webpmeta
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/kovidgoyal/imaging/prism/meta"
|
||||
"github.com/kovidgoyal/imaging/streams"
|
||||
)
|
||||
|
||||
// Format specifies the image format handled by this package
|
||||
var Format = meta.ImageFormat("WebP")
|
||||
|
||||
// Signature is FourCC bytes in the RIFF chunk, "RIFF????WEBP"
|
||||
var webpSignature = [4]byte{'W', 'E', 'B', 'P'}
|
||||
|
||||
type webpFormat int
|
||||
|
||||
const (
|
||||
webpFormatSimple = webpFormat(iota)
|
||||
webpFormatLossless
|
||||
webpFormatExtended
|
||||
)
|
||||
|
||||
// Bits per component is fixed in WebP
|
||||
const bitsPerComponent = 8
|
||||
|
||||
// Load loads the metadata for a WebP image stream.
|
||||
//
|
||||
// Only as much of the stream is consumed as necessary to extract the metadata;
|
||||
// the returned stream contains a buffered copy of the consumed data such that
|
||||
// reading from it will produce the same results as fully reading the input
|
||||
// stream. This provides a convenient way to load the full image after loading
|
||||
// the metadata.
|
||||
//
|
||||
// An error is returned if basic metadata could not be extracted. The returned
|
||||
// stream still provides the full image data.
|
||||
func Load(r io.Reader) (md *meta.Data, imgStream io.Reader, err error) {
|
||||
imgStream, err = streams.CallbackWithSeekable(r, func(r io.Reader) (err error) {
|
||||
md, err = ExtractMetadata(r)
|
||||
return
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Same as Load() except that no new stream is provided
|
||||
func ExtractMetadata(r io.Reader) (md *meta.Data, err error) {
|
||||
md = &meta.Data{Format: Format}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
err = fmt.Errorf("panic while extracting image metadata: %v", r)
|
||||
}
|
||||
}()
|
||||
|
||||
if err := verifySignature(r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
format, chunkLen, err := readWebPFormat(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = parseFormat(r, md, format, chunkLen)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return md, nil
|
||||
}
|
||||
|
||||
func parseFormat(r io.Reader, md *meta.Data, format webpFormat, chunkLen uint32) error {
|
||||
switch format {
|
||||
case webpFormatExtended:
|
||||
return parseWebpExtended(r, md, chunkLen)
|
||||
case webpFormatSimple:
|
||||
return parseWebpSimple(r, md, chunkLen)
|
||||
case webpFormatLossless:
|
||||
return parseWebpLossless(r, md, chunkLen)
|
||||
default:
|
||||
return errors.New("unknown WebP format")
|
||||
}
|
||||
}
|
||||
|
||||
func parseWebpSimple(r io.Reader, md *meta.Data, chunkLen uint32) error {
|
||||
var buf [10]byte
|
||||
b := buf[:]
|
||||
if _, err := io.ReadFull(r, b); err != nil {
|
||||
return err
|
||||
}
|
||||
b = b[3:]
|
||||
if b[0] != 0x9d || b[1] != 0x01 || b[2] != 0x2a {
|
||||
return errors.New("corrupted WebP VP8 frame")
|
||||
}
|
||||
md.PixelWidth = uint32(b[4]&((1<<6)-1))<<8 | uint32(b[3])
|
||||
md.PixelWidth = uint32(b[6]&((1<<6)-1))<<8 | uint32(b[5])
|
||||
md.BitsPerComponent = bitsPerComponent
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseWebpLossless(r io.Reader, md *meta.Data, chunkLen uint32) error {
|
||||
var b [5]byte
|
||||
if _, err := io.ReadFull(r, b[:]); err != nil {
|
||||
return err
|
||||
}
|
||||
if b[0] != 0x2f {
|
||||
return errors.New("corrupted lossless WebP")
|
||||
}
|
||||
// Next 28 bits are width-1 and height-1.
|
||||
w := uint32(b[1])
|
||||
w |= uint32(b[2]&((1<<6)-1)) << 8
|
||||
w &= 0x3FFF
|
||||
|
||||
h := uint32((b[2] >> 6) & ((1 << 2) - 1))
|
||||
h |= uint32(b[3]) << 2
|
||||
h |= uint32(b[4]&((1<<4)-1)) << 10
|
||||
h &= 0x3FFF
|
||||
|
||||
md.PixelWidth = w + 1
|
||||
md.PixelHeight = h + 1
|
||||
md.BitsPerComponent = bitsPerComponent
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseWebpExtended(r io.Reader, md *meta.Data, chunkLen uint32) error {
|
||||
if chunkLen != 10 {
|
||||
return fmt.Errorf("unexpected VP8X chunk length: %d", chunkLen)
|
||||
}
|
||||
var hb [10]byte
|
||||
h := hb[:]
|
||||
if _, err := io.ReadFull(r, h); err != nil {
|
||||
return err
|
||||
}
|
||||
hasProfile := h[0]&(1<<5) != 0
|
||||
h = h[4:]
|
||||
w := uint32(h[0]) | uint32(h[1])<<8 | uint32(h[2])<<16
|
||||
ht := uint32(h[3]) | uint32(h[4])<<8 | uint32(h[5])<<16
|
||||
md.PixelWidth = w + 1
|
||||
md.PixelHeight = ht + 1
|
||||
md.BitsPerComponent = bitsPerComponent
|
||||
|
||||
if hasProfile {
|
||||
data, err := readICCP(r, chunkLen)
|
||||
if err != nil {
|
||||
md.SetICCProfileError(err)
|
||||
} else {
|
||||
md.SetICCProfileData(data)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readICCP(r io.Reader, chunkLen uint32) ([]byte, error) {
|
||||
// Skip to the end of the chunk.
|
||||
if err := skip(r, chunkLen-10); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// ICCP _must_ be the next chunk.
|
||||
ch, err := readChunkHeader(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ch.ChunkType != chunkTypeICCP {
|
||||
return nil, errors.New("no expected ICCP chunk")
|
||||
}
|
||||
|
||||
// Extract ICCP.
|
||||
data := make([]byte, ch.Length)
|
||||
if _, err := io.ReadFull(r, data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func verifySignature(r io.Reader) error {
|
||||
ch, err := readChunkHeader(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if ch.ChunkType != chunkTypeRIFF {
|
||||
return errors.New("missing RIFF header")
|
||||
}
|
||||
var fourcc [4]byte
|
||||
if _, err := io.ReadFull(r, fourcc[:]); err != nil {
|
||||
return err
|
||||
}
|
||||
if fourcc != webpSignature {
|
||||
return errors.New("not a WEBP file")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func readWebPFormat(r io.Reader) (format webpFormat, length uint32, err error) {
|
||||
ch, err := readChunkHeader(r)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
switch ch.ChunkType {
|
||||
case chunkTypeVP8:
|
||||
return webpFormatSimple, ch.Length, nil
|
||||
case chunkTypeVP8L:
|
||||
return webpFormatLossless, ch.Length, nil
|
||||
case chunkTypeVP8X:
|
||||
return webpFormatExtended, ch.Length, nil
|
||||
default:
|
||||
return 0, 0, fmt.Errorf("unexpected WEBP format: %s", string(ch.ChunkType[:]))
|
||||
}
|
||||
}
|
||||
|
||||
func skip(r io.Reader, length uint32) error {
|
||||
return streams.Skip(r, int64(length))
|
||||
}
|
||||
7
vendor/github.com/kovidgoyal/imaging/publish.py
generated
vendored
7
vendor/github.com/kovidgoyal/imaging/publish.py
generated
vendored
@@ -5,6 +5,9 @@ import os
|
||||
import subprocess
|
||||
|
||||
|
||||
VERSION = "1.7.2"
|
||||
|
||||
|
||||
def run(*args: str):
|
||||
cp = subprocess.run(args)
|
||||
if cp.returncode != 0:
|
||||
@@ -12,14 +15,14 @@ def run(*args: str):
|
||||
|
||||
|
||||
def main():
|
||||
version = input('Enter the version to publish: ')
|
||||
version = VERSION
|
||||
try:
|
||||
ans = input(f'Publish version \033[91m{version}\033[m (y/n): ')
|
||||
except KeyboardInterrupt:
|
||||
ans = 'n'
|
||||
if ans.lower() != 'y':
|
||||
return
|
||||
os.environ['GITHUB_TOKEN'] = open(os.path.join(os.environ['PENV'], 'github-token')).read().strip()
|
||||
os.environ['GITHUB_TOKEN'] = open(os.path.join(os.environ['PENV'], 'github-token')).read().strip().partition(':')[2]
|
||||
run('git', 'tag', '-a', 'v' + version, '-m', f'version {version}')
|
||||
run('git', 'push')
|
||||
run('goreleaser', 'release', '--clean')
|
||||
|
||||
48
vendor/github.com/kovidgoyal/imaging/resize.go
generated
vendored
48
vendor/github.com/kovidgoyal/imaging/resize.go
generated
vendored
@@ -61,7 +61,6 @@ func precomputeWeights(dstSize, srcSize int, filter ResampleFilter) [][]indexWei
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Resize(srcImage, 800, 600, imaging.Lanczos)
|
||||
//
|
||||
func Resize(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
if dstW < 0 || dstH < 0 {
|
||||
@@ -110,10 +109,10 @@ func resizeHorizontal(img image.Image, width int, filter ResampleFilter) *image.
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, width, src.h))
|
||||
weights := precomputeWeights(width, src.w, filter)
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
scanLine := make([]uint8, src.w*4)
|
||||
for y := range ys {
|
||||
src.scan(0, y, src.w, y+1, scanLine)
|
||||
for y := start; y < limit; y++ {
|
||||
src.Scan(0, y, src.w, y+1, scanLine)
|
||||
j0 := y * dst.Stride
|
||||
for x := range weights {
|
||||
var r, g, b, a float64
|
||||
@@ -137,7 +136,9 @@ func resizeHorizontal(img image.Image, width int, filter ResampleFilter) *image.
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, src.h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -145,10 +146,10 @@ func resizeVertical(img image.Image, height int, filter ResampleFilter) *image.N
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, height))
|
||||
weights := precomputeWeights(height, src.h, filter)
|
||||
parallel(0, src.w, func(xs <-chan int) {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
scanLine := make([]uint8, src.h*4)
|
||||
for x := range xs {
|
||||
src.scan(x, 0, x+1, src.h, scanLine)
|
||||
for x := start; x < limit; x++ {
|
||||
src.Scan(x, 0, x+1, src.h, scanLine)
|
||||
for y := range weights {
|
||||
var r, g, b, a float64
|
||||
for _, w := range weights[y] {
|
||||
@@ -171,7 +172,9 @@ func resizeVertical(img image.Image, height int, filter ResampleFilter) *image.N
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, src.w); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -183,34 +186,37 @@ func resizeNearest(img image.Image, width, height int) *image.NRGBA {
|
||||
|
||||
if dx > 1 && dy > 1 {
|
||||
src := newScanner(img)
|
||||
parallel(0, height, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
srcY := int((float64(y) + 0.5) * dy)
|
||||
dstOff := y * dst.Stride
|
||||
for x := 0; x < width; x++ {
|
||||
for x := range width {
|
||||
srcX := int((float64(x) + 0.5) * dx)
|
||||
src.scan(srcX, srcY, srcX+1, srcY+1, dst.Pix[dstOff:dstOff+4])
|
||||
src.Scan(srcX, srcY, srcX+1, srcY+1, dst.Pix[dstOff:dstOff+4])
|
||||
dstOff += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, height); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
} else {
|
||||
src := toNRGBA(img)
|
||||
parallel(0, height, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
srcY := int((float64(y) + 0.5) * dy)
|
||||
srcOff0 := srcY * src.Stride
|
||||
dstOff := y * dst.Stride
|
||||
for x := 0; x < width; x++ {
|
||||
for x := range width {
|
||||
srcX := int((float64(x) + 0.5) * dx)
|
||||
srcOff := srcOff0 + srcX*4
|
||||
copy(dst.Pix[dstOff:dstOff+4], src.Pix[srcOff:srcOff+4])
|
||||
dstOff += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, height); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -220,7 +226,6 @@ func resizeNearest(img image.Image, width, height int) *image.NRGBA {
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Fit(srcImage, 800, 600, imaging.Lanczos)
|
||||
//
|
||||
func Fit(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
|
||||
maxW, maxH := width, height
|
||||
|
||||
@@ -261,7 +266,6 @@ func Fit(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Fill(srcImage, 800, 600, imaging.Center, imaging.Lanczos)
|
||||
//
|
||||
func Fill(img image.Image, width, height int, anchor Anchor, filter ResampleFilter) *image.NRGBA {
|
||||
dstW, dstH := width, height
|
||||
|
||||
@@ -340,7 +344,6 @@ func resizeAndCrop(img image.Image, width, height int, anchor Anchor, filter Res
|
||||
// Example:
|
||||
//
|
||||
// dstImage := imaging.Thumbnail(srcImage, 100, 100, imaging.Lanczos)
|
||||
//
|
||||
func Thumbnail(img image.Image, width, height int, filter ResampleFilter) *image.NRGBA {
|
||||
return Fill(img, width, height, Center, filter)
|
||||
}
|
||||
@@ -367,7 +370,6 @@ func Thumbnail(img image.Image, width, height int, filter ResampleFilter) *image
|
||||
//
|
||||
// - NearestNeighbor
|
||||
// Fastest resampling filter, no antialiasing.
|
||||
//
|
||||
type ResampleFilter struct {
|
||||
Support float64
|
||||
Kernel func(float64) float64
|
||||
|
||||
41
vendor/github.com/kovidgoyal/imaging/scanner.go
generated
vendored
41
vendor/github.com/kovidgoyal/imaging/scanner.go
generated
vendored
@@ -11,6 +11,10 @@ type scanner struct {
|
||||
palette []color.NRGBA
|
||||
}
|
||||
|
||||
func (s scanner) Bytes_per_channel() int { return 1 }
|
||||
func (s scanner) Num_of_channels() int { return 4 }
|
||||
func (s scanner) Bounds() image.Rectangle { return s.image.Bounds() }
|
||||
|
||||
func newScanner(img image.Image) *scanner {
|
||||
s := &scanner{
|
||||
image: img,
|
||||
@@ -27,8 +31,32 @@ func newScanner(img image.Image) *scanner {
|
||||
}
|
||||
|
||||
// scan scans the given rectangular region of the image into dst.
|
||||
func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
|
||||
func (s *scanner) Scan(x1, y1, x2, y2 int, dst []uint8) {
|
||||
switch img := s.image.(type) {
|
||||
case *NRGB:
|
||||
j := 0
|
||||
if x2 == x1+1 {
|
||||
i := y1*img.Stride + x1*3
|
||||
for y := y1; y < y2; y++ {
|
||||
d := dst[j : j+4 : j+4]
|
||||
s := img.Pix[i : i+3 : i+3]
|
||||
d[0] = s[0]
|
||||
d[1] = s[1]
|
||||
d[2] = s[2]
|
||||
d[3] = 255
|
||||
j += 4
|
||||
i += img.Stride
|
||||
}
|
||||
} else {
|
||||
d := dst
|
||||
for y := y1; y < y2; y++ {
|
||||
s := img.Pix[y*img.Stride+x1*3:]
|
||||
for range x2 - x1 {
|
||||
d[0], d[1], d[2], d[3] = s[0], s[1], s[2], 255
|
||||
d, s = d[4:], s[3:]
|
||||
}
|
||||
}
|
||||
}
|
||||
case *image.NRGBA:
|
||||
size := (x2 - x1) * 4
|
||||
j := 0
|
||||
@@ -283,3 +311,14 @@ func (s *scanner) scan(x1, y1, x2, y2 int, dst []uint8) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Scanner interface {
|
||||
Scan(x1, y1, x2, y2 int, dst []uint8)
|
||||
Bytes_per_channel() int
|
||||
Num_of_channels() int
|
||||
Bounds() image.Rectangle
|
||||
}
|
||||
|
||||
func NewNRGBAScanner(source_image image.Image) Scanner {
|
||||
return newScanner(source_image)
|
||||
}
|
||||
|
||||
133
vendor/github.com/kovidgoyal/imaging/streams/api.go
generated
vendored
Normal file
133
vendor/github.com/kovidgoyal/imaging/streams/api.go
generated
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
package streams
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
// BufferedReadSeeker wraps an io.ReadSeeker to provide buffering.
|
||||
// It implements the io.ReadSeeker interface.
|
||||
type BufferedReadSeeker struct {
|
||||
reader *bufio.Reader
|
||||
seeker io.ReadSeeker
|
||||
}
|
||||
|
||||
// NewBufferedReadSeeker creates a new BufferedReadSeeker with a default buffer size.
|
||||
func NewBufferedReadSeeker(rs io.ReadSeeker) *BufferedReadSeeker {
|
||||
return &BufferedReadSeeker{
|
||||
reader: bufio.NewReader(rs),
|
||||
seeker: rs,
|
||||
}
|
||||
}
|
||||
|
||||
// Read reads data into p. It reads from the underlying buffered reader.
|
||||
func (brs *BufferedReadSeeker) Read(p []byte) (n int, err error) {
|
||||
return brs.reader.Read(p)
|
||||
}
|
||||
|
||||
// Seek sets the offset for the next Read. It is optimized to use the
|
||||
// buffer for seeks that land within the buffered data range.
|
||||
func (brs *BufferedReadSeeker) Seek(offset int64, whence int) (int64, error) {
|
||||
// Determine the current position (where the next Read would start)
|
||||
underlyingPos, err := brs.seeker.Seek(0, io.SeekCurrent)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
// The position of the stream as seen by clients
|
||||
logicalPos := underlyingPos - int64(brs.reader.Buffered())
|
||||
|
||||
// 2. Calculate the absolute target position for the seek
|
||||
var absTargetPos int64
|
||||
switch whence {
|
||||
case io.SeekStart:
|
||||
absTargetPos = offset
|
||||
case io.SeekCurrent:
|
||||
absTargetPos = logicalPos + offset
|
||||
case io.SeekEnd:
|
||||
// Seeking from the end requires a fallback, as we don't know the end
|
||||
// position without invalidating the buffer's state relative to the seeker.
|
||||
return brs.fallbackSeek(offset, whence)
|
||||
default:
|
||||
return 0, fmt.Errorf("invalid whence: %d", whence)
|
||||
}
|
||||
|
||||
// 3. Check if the target position is within the current buffer
|
||||
if absTargetPos >= logicalPos && absTargetPos < underlyingPos {
|
||||
// The target is within the buffer. Calculate how many bytes to discard.
|
||||
bytesToDiscard := absTargetPos - logicalPos
|
||||
_, err := brs.reader.Discard(int(bytesToDiscard))
|
||||
if err != nil {
|
||||
// This is unlikely, but if Discard fails, fall back to a full seek
|
||||
return brs.fallbackSeek(offset, whence)
|
||||
}
|
||||
return absTargetPos, nil
|
||||
}
|
||||
|
||||
// 4. If the target is outside the buffer, perform a fallback seek
|
||||
return brs.fallbackSeek(absTargetPos, io.SeekStart)
|
||||
}
|
||||
|
||||
// fallbackSeek performs a seek on the underlying seeker and resets the buffer.
|
||||
func (brs *BufferedReadSeeker) fallbackSeek(offset int64, whence int) (int64, error) {
|
||||
newOffset, err := brs.seeker.Seek(offset, whence)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
brs.reader.Reset(brs.seeker)
|
||||
return newOffset, nil
|
||||
}
|
||||
|
||||
// Run the callback function with a buffered reader that supports Seek() and
|
||||
// Read(). Return an io.Reader that represents all content from the original
|
||||
// io.Reader.
|
||||
func CallbackWithSeekable(r io.Reader, callback func(io.Reader) error) (stream io.Reader, err error) {
|
||||
switch s := r.(type) {
|
||||
case io.ReadSeeker:
|
||||
pos, err := s.Seek(0, io.SeekCurrent)
|
||||
if err == nil {
|
||||
defer func() {
|
||||
_, serr := s.Seek(pos, io.SeekStart)
|
||||
if err == nil {
|
||||
err = serr
|
||||
}
|
||||
}()
|
||||
// Add bufferring to s for efficiency
|
||||
bs := s
|
||||
switch r.(type) {
|
||||
case *BufferedReadSeeker, *bytes.Reader:
|
||||
default:
|
||||
bs = NewBufferedReadSeeker(s)
|
||||
}
|
||||
err = callback(bs)
|
||||
return s, err
|
||||
}
|
||||
case *bytes.Buffer:
|
||||
err = callback(bytes.NewReader(s.Bytes()))
|
||||
return s, err
|
||||
}
|
||||
rewindBuffer := &bytes.Buffer{}
|
||||
tee := io.TeeReader(r, rewindBuffer)
|
||||
err = callback(bufio.NewReader(tee))
|
||||
return io.MultiReader(rewindBuffer, r), err
|
||||
}
|
||||
|
||||
// Skip reading the specified number of bytes efficiently
|
||||
func Skip(r io.Reader, amt int64) (err error) {
|
||||
if s, ok := r.(io.Seeker); ok {
|
||||
if _, serr := s.Seek(amt, io.SeekCurrent); serr == nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
_, err = io.CopyN(io.Discard, r, amt)
|
||||
return
|
||||
}
|
||||
|
||||
// Read a single byte from the reader
|
||||
func ReadByte(r io.Reader) (ans byte, err error) {
|
||||
var v [1]byte
|
||||
_, err = io.ReadFull(r, v[:])
|
||||
ans = v[0]
|
||||
return
|
||||
}
|
||||
41
vendor/github.com/kovidgoyal/imaging/tools.go
generated
vendored
41
vendor/github.com/kovidgoyal/imaging/tools.go
generated
vendored
@@ -30,12 +30,14 @@ func Clone(img image.Image) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, src.w, src.h))
|
||||
size := src.w * 4
|
||||
parallel(0, src.h, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
i := y * dst.Stride
|
||||
src.scan(0, y, src.w, y+1, dst.Pix[i:i+size])
|
||||
src.Scan(0, y, src.w, y+1, dst.Pix[i:i+size])
|
||||
}
|
||||
})
|
||||
}, 0, src.h); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -103,12 +105,14 @@ func Crop(img image.Image, rect image.Rectangle) *image.NRGBA {
|
||||
src := newScanner(img)
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, r.Dx(), r.Dy()))
|
||||
rowSize := r.Dx() * 4
|
||||
parallel(r.Min.Y, r.Max.Y, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
i := (y - r.Min.Y) * dst.Stride
|
||||
src.scan(r.Min.X, y, r.Max.X, y+1, dst.Pix[i:i+rowSize])
|
||||
src.Scan(r.Min.X, y, r.Max.X, y+1, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
}, r.Min.Y, r.Max.Y); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -142,17 +146,19 @@ func Paste(background, img image.Image, pos image.Point) *image.NRGBA {
|
||||
}
|
||||
|
||||
src := newScanner(img)
|
||||
parallel(interRect.Min.Y, interRect.Max.Y, func(ys <-chan int) {
|
||||
for y := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for y := start; y < limit; y++ {
|
||||
x1 := interRect.Min.X - pasteRect.Min.X
|
||||
x2 := interRect.Max.X - pasteRect.Min.X
|
||||
y1 := y - pasteRect.Min.Y
|
||||
y2 := y1 + 1
|
||||
i1 := y*dst.Stride + interRect.Min.X*4
|
||||
i2 := i1 + interRect.Dx()*4
|
||||
src.scan(x1, y1, x2, y2, dst.Pix[i1:i2])
|
||||
src.Scan(x1, y1, x2, y2, dst.Pix[i1:i2])
|
||||
}
|
||||
})
|
||||
}, interRect.Min.Y, interRect.Max.Y); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -184,7 +190,6 @@ func PasteCenter(background, img image.Image) *image.NRGBA {
|
||||
//
|
||||
// // Blend two opaque images of the same size.
|
||||
// dstImage := imaging.Overlay(imageOne, imageTwo, image.Pt(0, 0), 0.5)
|
||||
//
|
||||
func Overlay(background, img image.Image, pos image.Point, opacity float64) *image.NRGBA {
|
||||
opacity = math.Min(math.Max(opacity, 0.0), 1.0) // Ensure 0.0 <= opacity <= 1.0.
|
||||
dst := Clone(background)
|
||||
@@ -195,14 +200,14 @@ func Overlay(background, img image.Image, pos image.Point, opacity float64) *ima
|
||||
return dst
|
||||
}
|
||||
src := newScanner(img)
|
||||
parallel(interRect.Min.Y, interRect.Max.Y, func(ys <-chan int) {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
scanLine := make([]uint8, interRect.Dx()*4)
|
||||
for y := range ys {
|
||||
for y := start; y < limit; y++ {
|
||||
x1 := interRect.Min.X - pasteRect.Min.X
|
||||
x2 := interRect.Max.X - pasteRect.Min.X
|
||||
y1 := y - pasteRect.Min.Y
|
||||
y2 := y1 + 1
|
||||
src.scan(x1, y1, x2, y2, scanLine)
|
||||
src.Scan(x1, y1, x2, y2, scanLine)
|
||||
i := y*dst.Stride + interRect.Min.X*4
|
||||
j := 0
|
||||
for x := interRect.Min.X; x < interRect.Max.X; x++ {
|
||||
@@ -233,7 +238,9 @@ func Overlay(background, img image.Image, pos image.Point, opacity float64) *ima
|
||||
j += 4
|
||||
}
|
||||
}
|
||||
})
|
||||
}, interRect.Min.Y, interRect.Max.Y); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
|
||||
82
vendor/github.com/kovidgoyal/imaging/transform.go
generated
vendored
82
vendor/github.com/kovidgoyal/imaging/transform.go
generated
vendored
@@ -13,14 +13,16 @@ func FlipH(img image.Image) *image.NRGBA {
|
||||
dstH := src.h
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for dstY := start; dstY < limit; dstY++ {
|
||||
i := dstY * dst.Stride
|
||||
srcY := dstY
|
||||
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
src.Scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
}, 0, dstH); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -31,13 +33,15 @@ func FlipV(img image.Image) *image.NRGBA {
|
||||
dstH := src.h
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for dstY := start; dstY < limit; dstY++ {
|
||||
i := dstY * dst.Stride
|
||||
srcY := dstH - dstY - 1
|
||||
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
src.Scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
}, 0, dstH); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -48,13 +52,15 @@ func Transpose(img image.Image) *image.NRGBA {
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for dstY := start; dstY < limit; dstY++ {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstY
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
src.Scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
}, 0, dstH); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -65,14 +71,16 @@ func Transverse(img image.Image) *image.NRGBA {
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for dstY := start; dstY < limit; dstY++ {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstH - dstY - 1
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
src.Scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
}, 0, dstH); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -83,13 +91,15 @@ func Rotate90(img image.Image) *image.NRGBA {
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for dstY := start; dstY < limit; dstY++ {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstH - dstY - 1
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
src.Scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
}
|
||||
})
|
||||
}, 0, dstH); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -100,14 +110,16 @@ func Rotate180(img image.Image) *image.NRGBA {
|
||||
dstH := src.h
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for dstY := start; dstY < limit; dstY++ {
|
||||
i := dstY * dst.Stride
|
||||
srcY := dstH - dstY - 1
|
||||
src.scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
src.Scan(0, srcY, src.w, srcY+1, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
}, 0, dstH); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -118,14 +130,16 @@ func Rotate270(img image.Image) *image.NRGBA {
|
||||
dstH := src.w
|
||||
rowSize := dstW * 4
|
||||
dst := image.NewNRGBA(image.Rect(0, 0, dstW, dstH))
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for dstY := start; dstY < limit; dstY++ {
|
||||
i := dstY * dst.Stride
|
||||
srcX := dstY
|
||||
src.scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
src.Scan(srcX, 0, srcX+1, src.h, dst.Pix[i:i+rowSize])
|
||||
reverse(dst.Pix[i : i+rowSize])
|
||||
}
|
||||
})
|
||||
}, 0, dstH); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
@@ -164,15 +178,17 @@ func Rotate(img image.Image, angle float64, bgColor color.Color) *image.NRGBA {
|
||||
bgColorNRGBA := color.NRGBAModel.Convert(bgColor).(color.NRGBA)
|
||||
sin, cos := math.Sincos(math.Pi * angle / 180)
|
||||
|
||||
parallel(0, dstH, func(ys <-chan int) {
|
||||
for dstY := range ys {
|
||||
for dstX := 0; dstX < dstW; dstX++ {
|
||||
if err := run_in_parallel_over_range(0, func(start, limit int) {
|
||||
for dstY := start; dstY < limit; dstY++ {
|
||||
for dstX := range dstW {
|
||||
xf, yf := rotatePoint(float64(dstX)-dstXOff, float64(dstY)-dstYOff, sin, cos)
|
||||
xf, yf = xf+srcXOff, yf+srcYOff
|
||||
interpolatePoint(dst, dstX, dstY, src, xf, yf, bgColorNRGBA)
|
||||
}
|
||||
}
|
||||
})
|
||||
}, 0, dstH); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
@@ -239,7 +255,7 @@ func interpolatePoint(dst *image.NRGBA, dstX, dstY int, src *image.NRGBA, xf, yf
|
||||
}
|
||||
|
||||
var r, g, b, a float64
|
||||
for i := 0; i < 4; i++ {
|
||||
for i := range 4 {
|
||||
p := points[i]
|
||||
w := weights[i]
|
||||
if p.In(bounds) {
|
||||
|
||||
46
vendor/github.com/kovidgoyal/imaging/utils.go
generated
vendored
46
vendor/github.com/kovidgoyal/imaging/utils.go
generated
vendored
@@ -4,49 +4,29 @@ import (
|
||||
"image"
|
||||
"math"
|
||||
"runtime"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/kovidgoyal/go-parallel"
|
||||
)
|
||||
|
||||
var maxProcs int64
|
||||
var max_procs atomic.Int64
|
||||
|
||||
// SetMaxProcs limits the number of concurrent processing goroutines to the given value.
|
||||
// A value <= 0 clears the limit.
|
||||
func SetMaxProcs(value int) {
|
||||
atomic.StoreInt64(&maxProcs, int64(value))
|
||||
max_procs.Store(int64(value))
|
||||
}
|
||||
|
||||
// parallel processes the data in separate goroutines.
|
||||
func parallel(start, stop int, fn func(<-chan int)) {
|
||||
count := stop - start
|
||||
if count < 1 {
|
||||
return
|
||||
// Run the specified function in parallel over chunks from the specified range.
|
||||
// If the function panics, it is turned into a regular error.
|
||||
func run_in_parallel_over_range(num_procs int, f func(int, int), start, limit int) (err error) {
|
||||
if num_procs <= 0 {
|
||||
num_procs = runtime.GOMAXPROCS(0)
|
||||
if mp := int(max_procs.Load()); mp > 0 {
|
||||
num_procs = min(num_procs, mp)
|
||||
}
|
||||
}
|
||||
|
||||
procs := runtime.GOMAXPROCS(0)
|
||||
limit := int(atomic.LoadInt64(&maxProcs))
|
||||
if procs > limit && limit > 0 {
|
||||
procs = limit
|
||||
}
|
||||
if procs > count {
|
||||
procs = count
|
||||
}
|
||||
|
||||
c := make(chan int, count)
|
||||
for i := start; i < stop; i++ {
|
||||
c <- i
|
||||
}
|
||||
close(c)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for i := 0; i < procs; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
fn(c)
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
return parallel.Run_in_parallel_over_range(num_procs, f, start, limit)
|
||||
}
|
||||
|
||||
// absint returns the absolute value of i.
|
||||
|
||||
24
vendor/github.com/rwcarlsen/goexif/LICENSE
generated
vendored
Normal file
24
vendor/github.com/rwcarlsen/goexif/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
|
||||
Copyright (c) 2012, Robert Carlsen & Contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
4
vendor/github.com/rwcarlsen/goexif/exif/README.md
generated
vendored
Normal file
4
vendor/github.com/rwcarlsen/goexif/exif/README.md
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
|
||||
To regenerate the regression test data, run `go generate` inside the exif
|
||||
package directory and commit the changes to *regress_expected_test.go*.
|
||||
|
||||
655
vendor/github.com/rwcarlsen/goexif/exif/exif.go
generated
vendored
Normal file
655
vendor/github.com/rwcarlsen/goexif/exif/exif.go
generated
vendored
Normal file
@@ -0,0 +1,655 @@
|
||||
// Package exif implements decoding of EXIF data as defined in the EXIF 2.2
|
||||
// specification (http://www.exif.org/Exif2-2.PDF).
|
||||
package exif
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/rwcarlsen/goexif/tiff"
|
||||
)
|
||||
|
||||
const (
|
||||
jpeg_APP1 = 0xE1
|
||||
|
||||
exifPointer = 0x8769
|
||||
gpsPointer = 0x8825
|
||||
interopPointer = 0xA005
|
||||
)
|
||||
|
||||
// A decodeError is returned when the image cannot be decoded as a tiff image.
|
||||
type decodeError struct {
|
||||
cause error
|
||||
}
|
||||
|
||||
func (de decodeError) Error() string {
|
||||
return fmt.Sprintf("exif: decode failed (%v) ", de.cause.Error())
|
||||
}
|
||||
|
||||
// IsShortReadTagValueError identifies a ErrShortReadTagValue error.
|
||||
func IsShortReadTagValueError(err error) bool {
|
||||
de, ok := err.(decodeError)
|
||||
if ok {
|
||||
return de.cause == tiff.ErrShortReadTagValue
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// A TagNotPresentError is returned when the requested field is not
|
||||
// present in the EXIF.
|
||||
type TagNotPresentError FieldName
|
||||
|
||||
func (tag TagNotPresentError) Error() string {
|
||||
return fmt.Sprintf("exif: tag %q is not present", string(tag))
|
||||
}
|
||||
|
||||
func IsTagNotPresentError(err error) bool {
|
||||
_, ok := err.(TagNotPresentError)
|
||||
return ok
|
||||
}
|
||||
|
||||
// Parser allows the registration of custom parsing and field loading
|
||||
// in the Decode function.
|
||||
type Parser interface {
|
||||
// Parse should read data from x and insert parsed fields into x via
|
||||
// LoadTags.
|
||||
Parse(x *Exif) error
|
||||
}
|
||||
|
||||
var parsers []Parser
|
||||
|
||||
func init() {
|
||||
RegisterParsers(&parser{})
|
||||
}
|
||||
|
||||
// RegisterParsers registers one or more parsers to be automatically called
|
||||
// when decoding EXIF data via the Decode function.
|
||||
func RegisterParsers(ps ...Parser) {
|
||||
parsers = append(parsers, ps...)
|
||||
}
|
||||
|
||||
type parser struct{}
|
||||
|
||||
type tiffErrors map[tiffError]string
|
||||
|
||||
func (te tiffErrors) Error() string {
|
||||
var allErrors []string
|
||||
for k, v := range te {
|
||||
allErrors = append(allErrors, fmt.Sprintf("%s: %v\n", stagePrefix[k], v))
|
||||
}
|
||||
return strings.Join(allErrors, "\n")
|
||||
}
|
||||
|
||||
// IsCriticalError, given the error returned by Decode, reports whether the
|
||||
// returned *Exif may contain usable information.
|
||||
func IsCriticalError(err error) bool {
|
||||
_, ok := err.(tiffErrors)
|
||||
return !ok
|
||||
}
|
||||
|
||||
// IsExifError reports whether the error happened while decoding the EXIF
|
||||
// sub-IFD.
|
||||
func IsExifError(err error) bool {
|
||||
if te, ok := err.(tiffErrors); ok {
|
||||
_, isExif := te[loadExif]
|
||||
return isExif
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsGPSError reports whether the error happened while decoding the GPS sub-IFD.
|
||||
func IsGPSError(err error) bool {
|
||||
if te, ok := err.(tiffErrors); ok {
|
||||
_, isGPS := te[loadExif]
|
||||
return isGPS
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// IsInteroperabilityError reports whether the error happened while decoding the
|
||||
// Interoperability sub-IFD.
|
||||
func IsInteroperabilityError(err error) bool {
|
||||
if te, ok := err.(tiffErrors); ok {
|
||||
_, isInterop := te[loadInteroperability]
|
||||
return isInterop
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type tiffError int
|
||||
|
||||
const (
|
||||
loadExif tiffError = iota
|
||||
loadGPS
|
||||
loadInteroperability
|
||||
)
|
||||
|
||||
var stagePrefix = map[tiffError]string{
|
||||
loadExif: "loading EXIF sub-IFD",
|
||||
loadGPS: "loading GPS sub-IFD",
|
||||
loadInteroperability: "loading Interoperability sub-IFD",
|
||||
}
|
||||
|
||||
// Parse reads data from the tiff data in x and populates the tags
|
||||
// in x. If parsing a sub-IFD fails, the error is recorded and
|
||||
// parsing continues with the remaining sub-IFDs.
|
||||
func (p *parser) Parse(x *Exif) error {
|
||||
if len(x.Tiff.Dirs) == 0 {
|
||||
return errors.New("Invalid exif data")
|
||||
}
|
||||
x.LoadTags(x.Tiff.Dirs[0], exifFields, false)
|
||||
|
||||
// thumbnails
|
||||
if len(x.Tiff.Dirs) >= 2 {
|
||||
x.LoadTags(x.Tiff.Dirs[1], thumbnailFields, false)
|
||||
}
|
||||
|
||||
te := make(tiffErrors)
|
||||
|
||||
// recurse into exif, gps, and interop sub-IFDs
|
||||
if err := loadSubDir(x, ExifIFDPointer, exifFields); err != nil {
|
||||
te[loadExif] = err.Error()
|
||||
}
|
||||
if err := loadSubDir(x, GPSInfoIFDPointer, gpsFields); err != nil {
|
||||
te[loadGPS] = err.Error()
|
||||
}
|
||||
|
||||
if err := loadSubDir(x, InteroperabilityIFDPointer, interopFields); err != nil {
|
||||
te[loadInteroperability] = err.Error()
|
||||
}
|
||||
if len(te) > 0 {
|
||||
return te
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func loadSubDir(x *Exif, ptr FieldName, fieldMap map[uint16]FieldName) error {
|
||||
r := bytes.NewReader(x.Raw)
|
||||
|
||||
tag, err := x.Get(ptr)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
offset, err := tag.Int64(0)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = r.Seek(offset, 0)
|
||||
if err != nil {
|
||||
return fmt.Errorf("exif: seek to sub-IFD %s failed: %v", ptr, err)
|
||||
}
|
||||
subDir, _, err := tiff.DecodeDir(r, x.Tiff.Order)
|
||||
if err != nil {
|
||||
return fmt.Errorf("exif: sub-IFD %s decode failed: %v", ptr, err)
|
||||
}
|
||||
x.LoadTags(subDir, fieldMap, false)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Exif provides access to decoded EXIF metadata fields and values.
|
||||
type Exif struct {
|
||||
Tiff *tiff.Tiff
|
||||
main map[FieldName]*tiff.Tag
|
||||
Raw []byte
|
||||
}
|
||||
|
||||
// Decode parses EXIF data from r (a TIFF, JPEG, or raw EXIF block)
|
||||
// and returns a queryable Exif object. After the EXIF data section is
|
||||
// called and the TIFF structure is decoded, each registered parser is
|
||||
// called (in order of registration). If one parser returns an error,
|
||||
// decoding terminates and the remaining parsers are not called.
|
||||
//
|
||||
// The error can be inspected with functions such as IsCriticalError
|
||||
// to determine whether the returned object might still be usable.
|
||||
func Decode(r io.Reader) (*Exif, error) {
|
||||
|
||||
// EXIF data in JPEG is stored in the APP1 marker. EXIF data uses the TIFF
|
||||
// format to store data.
|
||||
// If we're parsing a TIFF image, we don't need to strip away any data.
|
||||
// If we're parsing a JPEG image, we need to strip away the JPEG APP1
|
||||
// marker and also the EXIF header.
|
||||
|
||||
header := make([]byte, 4)
|
||||
n, err := io.ReadFull(r, header)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("exif: error reading 4 byte header, got %d, %v", n, err)
|
||||
}
|
||||
|
||||
var isTiff bool
|
||||
var isRawExif bool
|
||||
var assumeJPEG bool
|
||||
switch string(header) {
|
||||
case "II*\x00":
|
||||
// TIFF - Little endian (Intel)
|
||||
isTiff = true
|
||||
case "MM\x00*":
|
||||
// TIFF - Big endian (Motorola)
|
||||
isTiff = true
|
||||
case "Exif":
|
||||
isRawExif = true
|
||||
default:
|
||||
// Not TIFF, assume JPEG
|
||||
assumeJPEG = true
|
||||
}
|
||||
|
||||
// Put the header bytes back into the reader.
|
||||
r = io.MultiReader(bytes.NewReader(header), r)
|
||||
var (
|
||||
er *bytes.Reader
|
||||
tif *tiff.Tiff
|
||||
sec *appSec
|
||||
)
|
||||
|
||||
switch {
|
||||
case isRawExif:
|
||||
var header [6]byte
|
||||
if _, err := io.ReadFull(r, header[:]); err != nil {
|
||||
return nil, fmt.Errorf("exif: unexpected raw exif header read error")
|
||||
}
|
||||
if got, want := string(header[:]), "Exif\x00\x00"; got != want {
|
||||
return nil, fmt.Errorf("exif: unexpected raw exif header; got %q, want %q", got, want)
|
||||
}
|
||||
fallthrough
|
||||
case isTiff:
|
||||
// Functions below need the IFDs from the TIFF data to be stored in a
|
||||
// *bytes.Reader. We use TeeReader to get a copy of the bytes as a
|
||||
// side-effect of tiff.Decode() doing its work.
|
||||
b := &bytes.Buffer{}
|
||||
tr := io.TeeReader(r, b)
|
||||
tif, err = tiff.Decode(tr)
|
||||
er = bytes.NewReader(b.Bytes())
|
||||
case assumeJPEG:
|
||||
// Locate the JPEG APP1 header.
|
||||
sec, err = newAppSec(jpeg_APP1, r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Strip away EXIF header.
|
||||
er, err = sec.exifReader()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tif, err = tiff.Decode(er)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, decodeError{cause: err}
|
||||
}
|
||||
|
||||
er.Seek(0, 0)
|
||||
raw, err := ioutil.ReadAll(er)
|
||||
if err != nil {
|
||||
return nil, decodeError{cause: err}
|
||||
}
|
||||
|
||||
// build an exif structure from the tiff
|
||||
x := &Exif{
|
||||
main: map[FieldName]*tiff.Tag{},
|
||||
Tiff: tif,
|
||||
Raw: raw,
|
||||
}
|
||||
|
||||
for i, p := range parsers {
|
||||
if err := p.Parse(x); err != nil {
|
||||
if _, ok := err.(tiffErrors); ok {
|
||||
return x, err
|
||||
}
|
||||
// This should never happen, as Parse always returns a tiffError
|
||||
// for now, but that could change.
|
||||
return x, fmt.Errorf("exif: parser %v failed (%v)", i, err)
|
||||
}
|
||||
}
|
||||
|
||||
return x, nil
|
||||
}
|
||||
|
||||
// LoadTags loads tags into the available fields from the tiff Directory
|
||||
// using the given tagid-fieldname mapping. Used to load makernote and
|
||||
// other meta-data. If showMissing is true, tags in d that are not in the
|
||||
// fieldMap will be loaded with the FieldName UnknownPrefix followed by the
|
||||
// tag ID (in hex format).
|
||||
func (x *Exif) LoadTags(d *tiff.Dir, fieldMap map[uint16]FieldName, showMissing bool) {
|
||||
for _, tag := range d.Tags {
|
||||
name := fieldMap[tag.Id]
|
||||
if name == "" {
|
||||
if !showMissing {
|
||||
continue
|
||||
}
|
||||
name = FieldName(fmt.Sprintf("%v%x", UnknownPrefix, tag.Id))
|
||||
}
|
||||
x.main[name] = tag
|
||||
}
|
||||
}
|
||||
|
||||
// Get retrieves the EXIF tag for the given field name.
|
||||
//
|
||||
// If the tag is not known or not present, an error is returned. If the
|
||||
// tag name is known, the error will be a TagNotPresentError.
|
||||
func (x *Exif) Get(name FieldName) (*tiff.Tag, error) {
|
||||
if tg, ok := x.main[name]; ok {
|
||||
return tg, nil
|
||||
}
|
||||
return nil, TagNotPresentError(name)
|
||||
}
|
||||
|
||||
// Walker is the interface used to traverse all fields of an Exif object.
|
||||
type Walker interface {
|
||||
// Walk is called for each non-nil EXIF field. Returning a non-nil
|
||||
// error aborts the walk/traversal.
|
||||
Walk(name FieldName, tag *tiff.Tag) error
|
||||
}
|
||||
|
||||
// Walk calls the Walk method of w with the name and tag for every non-nil
|
||||
// EXIF field. If w aborts the walk with an error, that error is returned.
|
||||
func (x *Exif) Walk(w Walker) error {
|
||||
for name, tag := range x.main {
|
||||
if err := w.Walk(name, tag); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DateTime returns the EXIF's "DateTimeOriginal" field, which
|
||||
// is the creation time of the photo. If not found, it tries
|
||||
// the "DateTime" (which is meant as the modtime) instead.
|
||||
// The error will be TagNotPresentErr if none of those tags
|
||||
// were found, or a generic error if the tag value was
|
||||
// not a string, or the error returned by time.Parse.
|
||||
//
|
||||
// If the EXIF lacks timezone information or GPS time, the returned
|
||||
// time's Location will be time.Local.
|
||||
func (x *Exif) DateTime() (time.Time, error) {
|
||||
var dt time.Time
|
||||
tag, err := x.Get(DateTimeOriginal)
|
||||
if err != nil {
|
||||
tag, err = x.Get(DateTime)
|
||||
if err != nil {
|
||||
return dt, err
|
||||
}
|
||||
}
|
||||
if tag.Format() != tiff.StringVal {
|
||||
return dt, errors.New("DateTime[Original] not in string format")
|
||||
}
|
||||
exifTimeLayout := "2006:01:02 15:04:05"
|
||||
dateStr := strings.TrimRight(string(tag.Val), "\x00")
|
||||
// TODO(bradfitz,mpl): look for timezone offset, GPS time, etc.
|
||||
timeZone := time.Local
|
||||
if tz, _ := x.TimeZone(); tz != nil {
|
||||
timeZone = tz
|
||||
}
|
||||
return time.ParseInLocation(exifTimeLayout, dateStr, timeZone)
|
||||
}
|
||||
|
||||
func (x *Exif) TimeZone() (*time.Location, error) {
|
||||
// TODO: parse more timezone fields (e.g. Nikon WorldTime).
|
||||
timeInfo, err := x.Get("Canon.TimeInfo")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if timeInfo.Count < 2 {
|
||||
return nil, errors.New("Canon.TimeInfo does not contain timezone")
|
||||
}
|
||||
offsetMinutes, err := timeInfo.Int(1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return time.FixedZone("", offsetMinutes*60), nil
|
||||
}
|
||||
|
||||
func ratFloat(num, dem int64) float64 {
|
||||
return float64(num) / float64(dem)
|
||||
}
|
||||
|
||||
// Tries to parse a Geo degrees value from a string as it was found in some
|
||||
// EXIF data.
|
||||
// Supported formats so far:
|
||||
// - "52,00000,50,00000,34,01180" ==> 52 deg 50'34.0118"
|
||||
// Probably due to locale the comma is used as decimal mark as well as the
|
||||
// separator of three floats (degrees, minutes, seconds)
|
||||
// http://en.wikipedia.org/wiki/Decimal_mark#Hindu.E2.80.93Arabic_numeral_system
|
||||
// - "52.0,50.0,34.01180" ==> 52deg50'34.0118"
|
||||
// - "52,50,34.01180" ==> 52deg50'34.0118"
|
||||
func parseTagDegreesString(s string) (float64, error) {
|
||||
const unparsableErrorFmt = "Unknown coordinate format: %s"
|
||||
isSplitRune := func(c rune) bool {
|
||||
return c == ',' || c == ';'
|
||||
}
|
||||
parts := strings.FieldsFunc(s, isSplitRune)
|
||||
var degrees, minutes, seconds float64
|
||||
var err error
|
||||
switch len(parts) {
|
||||
case 6:
|
||||
degrees, err = strconv.ParseFloat(parts[0]+"."+parts[1], 64)
|
||||
if err != nil {
|
||||
return 0.0, fmt.Errorf(unparsableErrorFmt, s)
|
||||
}
|
||||
minutes, err = strconv.ParseFloat(parts[2]+"."+parts[3], 64)
|
||||
if err != nil {
|
||||
return 0.0, fmt.Errorf(unparsableErrorFmt, s)
|
||||
}
|
||||
minutes = math.Copysign(minutes, degrees)
|
||||
seconds, err = strconv.ParseFloat(parts[4]+"."+parts[5], 64)
|
||||
if err != nil {
|
||||
return 0.0, fmt.Errorf(unparsableErrorFmt, s)
|
||||
}
|
||||
seconds = math.Copysign(seconds, degrees)
|
||||
case 3:
|
||||
degrees, err = strconv.ParseFloat(parts[0], 64)
|
||||
if err != nil {
|
||||
return 0.0, fmt.Errorf(unparsableErrorFmt, s)
|
||||
}
|
||||
minutes, err = strconv.ParseFloat(parts[1], 64)
|
||||
if err != nil {
|
||||
return 0.0, fmt.Errorf(unparsableErrorFmt, s)
|
||||
}
|
||||
minutes = math.Copysign(minutes, degrees)
|
||||
seconds, err = strconv.ParseFloat(parts[2], 64)
|
||||
if err != nil {
|
||||
return 0.0, fmt.Errorf(unparsableErrorFmt, s)
|
||||
}
|
||||
seconds = math.Copysign(seconds, degrees)
|
||||
default:
|
||||
return 0.0, fmt.Errorf(unparsableErrorFmt, s)
|
||||
}
|
||||
return degrees + minutes/60.0 + seconds/3600.0, nil
|
||||
}
|
||||
|
||||
func parse3Rat2(tag *tiff.Tag) ([3]float64, error) {
|
||||
v := [3]float64{}
|
||||
for i := range v {
|
||||
num, den, err := tag.Rat2(i)
|
||||
if err != nil {
|
||||
return v, err
|
||||
}
|
||||
v[i] = ratFloat(num, den)
|
||||
if tag.Count < uint32(i+2) {
|
||||
break
|
||||
}
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func tagDegrees(tag *tiff.Tag) (float64, error) {
|
||||
switch tag.Format() {
|
||||
case tiff.RatVal:
|
||||
// The usual case, according to the Exif spec
|
||||
// (http://www.kodak.com/global/plugins/acrobat/en/service/digCam/exifStandard2.pdf,
|
||||
// sec 4.6.6, p. 52 et seq.)
|
||||
v, err := parse3Rat2(tag)
|
||||
if err != nil {
|
||||
return 0.0, err
|
||||
}
|
||||
return v[0] + v[1]/60 + v[2]/3600.0, nil
|
||||
case tiff.StringVal:
|
||||
// Encountered this weird case with a panorama picture taken with a HTC phone
|
||||
s, err := tag.StringVal()
|
||||
if err != nil {
|
||||
return 0.0, err
|
||||
}
|
||||
return parseTagDegreesString(s)
|
||||
default:
|
||||
// don't know how to parse value, give up
|
||||
return 0.0, fmt.Errorf("Malformed EXIF Tag Degrees")
|
||||
}
|
||||
}
|
||||
|
||||
// LatLong returns the latitude and longitude of the photo and
|
||||
// whether it was present.
|
||||
func (x *Exif) LatLong() (lat, long float64, err error) {
|
||||
// All calls of x.Get might return an TagNotPresentError
|
||||
longTag, err := x.Get(FieldName("GPSLongitude"))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
ewTag, err := x.Get(FieldName("GPSLongitudeRef"))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
latTag, err := x.Get(FieldName("GPSLatitude"))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
nsTag, err := x.Get(FieldName("GPSLatitudeRef"))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if long, err = tagDegrees(longTag); err != nil {
|
||||
return 0, 0, fmt.Errorf("Cannot parse longitude: %v", err)
|
||||
}
|
||||
if lat, err = tagDegrees(latTag); err != nil {
|
||||
return 0, 0, fmt.Errorf("Cannot parse latitude: %v", err)
|
||||
}
|
||||
ew, err := ewTag.StringVal()
|
||||
if err == nil && ew == "W" {
|
||||
long *= -1.0
|
||||
} else if err != nil {
|
||||
return 0, 0, fmt.Errorf("Cannot parse longitude: %v", err)
|
||||
}
|
||||
ns, err := nsTag.StringVal()
|
||||
if err == nil && ns == "S" {
|
||||
lat *= -1.0
|
||||
} else if err != nil {
|
||||
return 0, 0, fmt.Errorf("Cannot parse longitude: %v", err)
|
||||
}
|
||||
return lat, long, nil
|
||||
}
|
||||
|
||||
// String returns a pretty text representation of the decoded exif data.
|
||||
func (x *Exif) String() string {
|
||||
var buf bytes.Buffer
|
||||
for name, tag := range x.main {
|
||||
fmt.Fprintf(&buf, "%s: %s\n", name, tag)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// JpegThumbnail returns the jpeg thumbnail if it exists. If it doesn't exist,
|
||||
// TagNotPresentError will be returned
|
||||
func (x *Exif) JpegThumbnail() ([]byte, error) {
|
||||
offset, err := x.Get(ThumbJPEGInterchangeFormat)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
start, err := offset.Int(0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
length, err := x.Get(ThumbJPEGInterchangeFormatLength)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
l, err := length.Int(0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return x.Raw[start : start+l], nil
|
||||
}
|
||||
|
||||
// MarshalJson implements the encoding/json.Marshaler interface providing output of
|
||||
// all EXIF fields present (names and values).
|
||||
func (x Exif) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(x.main)
|
||||
}
|
||||
|
||||
type appSec struct {
|
||||
marker byte
|
||||
data []byte
|
||||
}
|
||||
|
||||
// newAppSec finds marker in r and returns the corresponding application data
|
||||
// section.
|
||||
func newAppSec(marker byte, r io.Reader) (*appSec, error) {
|
||||
br := bufio.NewReader(r)
|
||||
app := &appSec{marker: marker}
|
||||
var dataLen int
|
||||
|
||||
// seek to marker
|
||||
for dataLen == 0 {
|
||||
if _, err := br.ReadBytes(0xFF); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c, err := br.ReadByte()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if c != marker {
|
||||
continue
|
||||
}
|
||||
|
||||
dataLenBytes := make([]byte, 2)
|
||||
for k, _ := range dataLenBytes {
|
||||
c, err := br.ReadByte()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dataLenBytes[k] = c
|
||||
}
|
||||
dataLen = int(binary.BigEndian.Uint16(dataLenBytes)) - 2
|
||||
}
|
||||
|
||||
// read section data
|
||||
nread := 0
|
||||
for nread < dataLen {
|
||||
s := make([]byte, dataLen-nread)
|
||||
n, err := br.Read(s)
|
||||
nread += n
|
||||
if err != nil && nread < dataLen {
|
||||
return nil, err
|
||||
}
|
||||
app.data = append(app.data, s[:n]...)
|
||||
}
|
||||
return app, nil
|
||||
}
|
||||
|
||||
// reader returns a reader on this appSec.
|
||||
func (app *appSec) reader() *bytes.Reader {
|
||||
return bytes.NewReader(app.data)
|
||||
}
|
||||
|
||||
// exifReader returns a reader on this appSec with the read cursor advanced to
|
||||
// the start of the exif's tiff encoded portion.
|
||||
func (app *appSec) exifReader() (*bytes.Reader, error) {
|
||||
if len(app.data) < 6 {
|
||||
return nil, errors.New("exif: failed to find exif intro marker")
|
||||
}
|
||||
|
||||
// read/check for exif special mark
|
||||
exif := app.data[:6]
|
||||
if !bytes.Equal(exif, append([]byte("Exif"), 0x00, 0x00)) {
|
||||
return nil, errors.New("exif: failed to find exif intro marker")
|
||||
}
|
||||
return bytes.NewReader(app.data[6:]), nil
|
||||
}
|
||||
309
vendor/github.com/rwcarlsen/goexif/exif/fields.go
generated
vendored
Normal file
309
vendor/github.com/rwcarlsen/goexif/exif/fields.go
generated
vendored
Normal file
@@ -0,0 +1,309 @@
|
||||
package exif
|
||||
|
||||
type FieldName string
|
||||
|
||||
// UnknownPrefix is used as the first part of field names for decoded tags for
|
||||
// which there is no known/supported EXIF field.
|
||||
const UnknownPrefix = "UnknownTag_"
|
||||
|
||||
// Primary EXIF fields
|
||||
const (
|
||||
ImageWidth FieldName = "ImageWidth"
|
||||
ImageLength FieldName = "ImageLength" // Image height called Length by EXIF spec
|
||||
BitsPerSample FieldName = "BitsPerSample"
|
||||
Compression FieldName = "Compression"
|
||||
PhotometricInterpretation FieldName = "PhotometricInterpretation"
|
||||
Orientation FieldName = "Orientation"
|
||||
SamplesPerPixel FieldName = "SamplesPerPixel"
|
||||
PlanarConfiguration FieldName = "PlanarConfiguration"
|
||||
YCbCrSubSampling FieldName = "YCbCrSubSampling"
|
||||
YCbCrPositioning FieldName = "YCbCrPositioning"
|
||||
XResolution FieldName = "XResolution"
|
||||
YResolution FieldName = "YResolution"
|
||||
ResolutionUnit FieldName = "ResolutionUnit"
|
||||
DateTime FieldName = "DateTime"
|
||||
ImageDescription FieldName = "ImageDescription"
|
||||
Make FieldName = "Make"
|
||||
Model FieldName = "Model"
|
||||
Software FieldName = "Software"
|
||||
Artist FieldName = "Artist"
|
||||
Copyright FieldName = "Copyright"
|
||||
ExifIFDPointer FieldName = "ExifIFDPointer"
|
||||
GPSInfoIFDPointer FieldName = "GPSInfoIFDPointer"
|
||||
InteroperabilityIFDPointer FieldName = "InteroperabilityIFDPointer"
|
||||
ExifVersion FieldName = "ExifVersion"
|
||||
FlashpixVersion FieldName = "FlashpixVersion"
|
||||
ColorSpace FieldName = "ColorSpace"
|
||||
ComponentsConfiguration FieldName = "ComponentsConfiguration"
|
||||
CompressedBitsPerPixel FieldName = "CompressedBitsPerPixel"
|
||||
PixelXDimension FieldName = "PixelXDimension"
|
||||
PixelYDimension FieldName = "PixelYDimension"
|
||||
MakerNote FieldName = "MakerNote"
|
||||
UserComment FieldName = "UserComment"
|
||||
RelatedSoundFile FieldName = "RelatedSoundFile"
|
||||
DateTimeOriginal FieldName = "DateTimeOriginal"
|
||||
DateTimeDigitized FieldName = "DateTimeDigitized"
|
||||
SubSecTime FieldName = "SubSecTime"
|
||||
SubSecTimeOriginal FieldName = "SubSecTimeOriginal"
|
||||
SubSecTimeDigitized FieldName = "SubSecTimeDigitized"
|
||||
ImageUniqueID FieldName = "ImageUniqueID"
|
||||
ExposureTime FieldName = "ExposureTime"
|
||||
FNumber FieldName = "FNumber"
|
||||
ExposureProgram FieldName = "ExposureProgram"
|
||||
SpectralSensitivity FieldName = "SpectralSensitivity"
|
||||
ISOSpeedRatings FieldName = "ISOSpeedRatings"
|
||||
OECF FieldName = "OECF"
|
||||
ShutterSpeedValue FieldName = "ShutterSpeedValue"
|
||||
ApertureValue FieldName = "ApertureValue"
|
||||
BrightnessValue FieldName = "BrightnessValue"
|
||||
ExposureBiasValue FieldName = "ExposureBiasValue"
|
||||
MaxApertureValue FieldName = "MaxApertureValue"
|
||||
SubjectDistance FieldName = "SubjectDistance"
|
||||
MeteringMode FieldName = "MeteringMode"
|
||||
LightSource FieldName = "LightSource"
|
||||
Flash FieldName = "Flash"
|
||||
FocalLength FieldName = "FocalLength"
|
||||
SubjectArea FieldName = "SubjectArea"
|
||||
FlashEnergy FieldName = "FlashEnergy"
|
||||
SpatialFrequencyResponse FieldName = "SpatialFrequencyResponse"
|
||||
FocalPlaneXResolution FieldName = "FocalPlaneXResolution"
|
||||
FocalPlaneYResolution FieldName = "FocalPlaneYResolution"
|
||||
FocalPlaneResolutionUnit FieldName = "FocalPlaneResolutionUnit"
|
||||
SubjectLocation FieldName = "SubjectLocation"
|
||||
ExposureIndex FieldName = "ExposureIndex"
|
||||
SensingMethod FieldName = "SensingMethod"
|
||||
FileSource FieldName = "FileSource"
|
||||
SceneType FieldName = "SceneType"
|
||||
CFAPattern FieldName = "CFAPattern"
|
||||
CustomRendered FieldName = "CustomRendered"
|
||||
ExposureMode FieldName = "ExposureMode"
|
||||
WhiteBalance FieldName = "WhiteBalance"
|
||||
DigitalZoomRatio FieldName = "DigitalZoomRatio"
|
||||
FocalLengthIn35mmFilm FieldName = "FocalLengthIn35mmFilm"
|
||||
SceneCaptureType FieldName = "SceneCaptureType"
|
||||
GainControl FieldName = "GainControl"
|
||||
Contrast FieldName = "Contrast"
|
||||
Saturation FieldName = "Saturation"
|
||||
Sharpness FieldName = "Sharpness"
|
||||
DeviceSettingDescription FieldName = "DeviceSettingDescription"
|
||||
SubjectDistanceRange FieldName = "SubjectDistanceRange"
|
||||
LensMake FieldName = "LensMake"
|
||||
LensModel FieldName = "LensModel"
|
||||
)
|
||||
|
||||
// Windows-specific tags
|
||||
const (
|
||||
XPTitle FieldName = "XPTitle"
|
||||
XPComment FieldName = "XPComment"
|
||||
XPAuthor FieldName = "XPAuthor"
|
||||
XPKeywords FieldName = "XPKeywords"
|
||||
XPSubject FieldName = "XPSubject"
|
||||
)
|
||||
|
||||
// thumbnail fields
|
||||
const (
|
||||
ThumbJPEGInterchangeFormat FieldName = "ThumbJPEGInterchangeFormat" // offset to thumb jpeg SOI
|
||||
ThumbJPEGInterchangeFormatLength FieldName = "ThumbJPEGInterchangeFormatLength" // byte length of thumb
|
||||
)
|
||||
|
||||
// GPS fields
|
||||
const (
|
||||
GPSVersionID FieldName = "GPSVersionID"
|
||||
GPSLatitudeRef FieldName = "GPSLatitudeRef"
|
||||
GPSLatitude FieldName = "GPSLatitude"
|
||||
GPSLongitudeRef FieldName = "GPSLongitudeRef"
|
||||
GPSLongitude FieldName = "GPSLongitude"
|
||||
GPSAltitudeRef FieldName = "GPSAltitudeRef"
|
||||
GPSAltitude FieldName = "GPSAltitude"
|
||||
GPSTimeStamp FieldName = "GPSTimeStamp"
|
||||
GPSSatelites FieldName = "GPSSatelites"
|
||||
GPSStatus FieldName = "GPSStatus"
|
||||
GPSMeasureMode FieldName = "GPSMeasureMode"
|
||||
GPSDOP FieldName = "GPSDOP"
|
||||
GPSSpeedRef FieldName = "GPSSpeedRef"
|
||||
GPSSpeed FieldName = "GPSSpeed"
|
||||
GPSTrackRef FieldName = "GPSTrackRef"
|
||||
GPSTrack FieldName = "GPSTrack"
|
||||
GPSImgDirectionRef FieldName = "GPSImgDirectionRef"
|
||||
GPSImgDirection FieldName = "GPSImgDirection"
|
||||
GPSMapDatum FieldName = "GPSMapDatum"
|
||||
GPSDestLatitudeRef FieldName = "GPSDestLatitudeRef"
|
||||
GPSDestLatitude FieldName = "GPSDestLatitude"
|
||||
GPSDestLongitudeRef FieldName = "GPSDestLongitudeRef"
|
||||
GPSDestLongitude FieldName = "GPSDestLongitude"
|
||||
GPSDestBearingRef FieldName = "GPSDestBearingRef"
|
||||
GPSDestBearing FieldName = "GPSDestBearing"
|
||||
GPSDestDistanceRef FieldName = "GPSDestDistanceRef"
|
||||
GPSDestDistance FieldName = "GPSDestDistance"
|
||||
GPSProcessingMethod FieldName = "GPSProcessingMethod"
|
||||
GPSAreaInformation FieldName = "GPSAreaInformation"
|
||||
GPSDateStamp FieldName = "GPSDateStamp"
|
||||
GPSDifferential FieldName = "GPSDifferential"
|
||||
)
|
||||
|
||||
// interoperability fields
|
||||
const (
|
||||
InteroperabilityIndex FieldName = "InteroperabilityIndex"
|
||||
)
|
||||
|
||||
var exifFields = map[uint16]FieldName{
|
||||
/////////////////////////////////////
|
||||
////////// IFD 0 ////////////////////
|
||||
/////////////////////////////////////
|
||||
|
||||
// image data structure for the thumbnail
|
||||
0x0100: ImageWidth,
|
||||
0x0101: ImageLength,
|
||||
0x0102: BitsPerSample,
|
||||
0x0103: Compression,
|
||||
0x0106: PhotometricInterpretation,
|
||||
0x0112: Orientation,
|
||||
0x0115: SamplesPerPixel,
|
||||
0x011C: PlanarConfiguration,
|
||||
0x0212: YCbCrSubSampling,
|
||||
0x0213: YCbCrPositioning,
|
||||
0x011A: XResolution,
|
||||
0x011B: YResolution,
|
||||
0x0128: ResolutionUnit,
|
||||
|
||||
// Other tags
|
||||
0x0132: DateTime,
|
||||
0x010E: ImageDescription,
|
||||
0x010F: Make,
|
||||
0x0110: Model,
|
||||
0x0131: Software,
|
||||
0x013B: Artist,
|
||||
0x8298: Copyright,
|
||||
|
||||
// Windows-specific tags
|
||||
0x9c9b: XPTitle,
|
||||
0x9c9c: XPComment,
|
||||
0x9c9d: XPAuthor,
|
||||
0x9c9e: XPKeywords,
|
||||
0x9c9f: XPSubject,
|
||||
|
||||
// private tags
|
||||
exifPointer: ExifIFDPointer,
|
||||
|
||||
/////////////////////////////////////
|
||||
////////// Exif sub IFD /////////////
|
||||
/////////////////////////////////////
|
||||
|
||||
gpsPointer: GPSInfoIFDPointer,
|
||||
interopPointer: InteroperabilityIFDPointer,
|
||||
|
||||
0x9000: ExifVersion,
|
||||
0xA000: FlashpixVersion,
|
||||
|
||||
0xA001: ColorSpace,
|
||||
|
||||
0x9101: ComponentsConfiguration,
|
||||
0x9102: CompressedBitsPerPixel,
|
||||
0xA002: PixelXDimension,
|
||||
0xA003: PixelYDimension,
|
||||
|
||||
0x927C: MakerNote,
|
||||
0x9286: UserComment,
|
||||
|
||||
0xA004: RelatedSoundFile,
|
||||
0x9003: DateTimeOriginal,
|
||||
0x9004: DateTimeDigitized,
|
||||
0x9290: SubSecTime,
|
||||
0x9291: SubSecTimeOriginal,
|
||||
0x9292: SubSecTimeDigitized,
|
||||
|
||||
0xA420: ImageUniqueID,
|
||||
|
||||
// picture conditions
|
||||
0x829A: ExposureTime,
|
||||
0x829D: FNumber,
|
||||
0x8822: ExposureProgram,
|
||||
0x8824: SpectralSensitivity,
|
||||
0x8827: ISOSpeedRatings,
|
||||
0x8828: OECF,
|
||||
0x9201: ShutterSpeedValue,
|
||||
0x9202: ApertureValue,
|
||||
0x9203: BrightnessValue,
|
||||
0x9204: ExposureBiasValue,
|
||||
0x9205: MaxApertureValue,
|
||||
0x9206: SubjectDistance,
|
||||
0x9207: MeteringMode,
|
||||
0x9208: LightSource,
|
||||
0x9209: Flash,
|
||||
0x920A: FocalLength,
|
||||
0x9214: SubjectArea,
|
||||
0xA20B: FlashEnergy,
|
||||
0xA20C: SpatialFrequencyResponse,
|
||||
0xA20E: FocalPlaneXResolution,
|
||||
0xA20F: FocalPlaneYResolution,
|
||||
0xA210: FocalPlaneResolutionUnit,
|
||||
0xA214: SubjectLocation,
|
||||
0xA215: ExposureIndex,
|
||||
0xA217: SensingMethod,
|
||||
0xA300: FileSource,
|
||||
0xA301: SceneType,
|
||||
0xA302: CFAPattern,
|
||||
0xA401: CustomRendered,
|
||||
0xA402: ExposureMode,
|
||||
0xA403: WhiteBalance,
|
||||
0xA404: DigitalZoomRatio,
|
||||
0xA405: FocalLengthIn35mmFilm,
|
||||
0xA406: SceneCaptureType,
|
||||
0xA407: GainControl,
|
||||
0xA408: Contrast,
|
||||
0xA409: Saturation,
|
||||
0xA40A: Sharpness,
|
||||
0xA40B: DeviceSettingDescription,
|
||||
0xA40C: SubjectDistanceRange,
|
||||
0xA433: LensMake,
|
||||
0xA434: LensModel,
|
||||
}
|
||||
|
||||
var gpsFields = map[uint16]FieldName{
|
||||
/////////////////////////////////////
|
||||
//// GPS sub-IFD ////////////////////
|
||||
/////////////////////////////////////
|
||||
0x0: GPSVersionID,
|
||||
0x1: GPSLatitudeRef,
|
||||
0x2: GPSLatitude,
|
||||
0x3: GPSLongitudeRef,
|
||||
0x4: GPSLongitude,
|
||||
0x5: GPSAltitudeRef,
|
||||
0x6: GPSAltitude,
|
||||
0x7: GPSTimeStamp,
|
||||
0x8: GPSSatelites,
|
||||
0x9: GPSStatus,
|
||||
0xA: GPSMeasureMode,
|
||||
0xB: GPSDOP,
|
||||
0xC: GPSSpeedRef,
|
||||
0xD: GPSSpeed,
|
||||
0xE: GPSTrackRef,
|
||||
0xF: GPSTrack,
|
||||
0x10: GPSImgDirectionRef,
|
||||
0x11: GPSImgDirection,
|
||||
0x12: GPSMapDatum,
|
||||
0x13: GPSDestLatitudeRef,
|
||||
0x14: GPSDestLatitude,
|
||||
0x15: GPSDestLongitudeRef,
|
||||
0x16: GPSDestLongitude,
|
||||
0x17: GPSDestBearingRef,
|
||||
0x18: GPSDestBearing,
|
||||
0x19: GPSDestDistanceRef,
|
||||
0x1A: GPSDestDistance,
|
||||
0x1B: GPSProcessingMethod,
|
||||
0x1C: GPSAreaInformation,
|
||||
0x1D: GPSDateStamp,
|
||||
0x1E: GPSDifferential,
|
||||
}
|
||||
|
||||
var interopFields = map[uint16]FieldName{
|
||||
/////////////////////////////////////
|
||||
//// Interoperability sub-IFD ///////
|
||||
/////////////////////////////////////
|
||||
0x1: InteroperabilityIndex,
|
||||
}
|
||||
|
||||
var thumbnailFields = map[uint16]FieldName{
|
||||
0x0201: ThumbJPEGInterchangeFormat,
|
||||
0x0202: ThumbJPEGInterchangeFormatLength,
|
||||
}
|
||||
BIN
vendor/github.com/rwcarlsen/goexif/exif/sample1.jpg
generated
vendored
Normal file
BIN
vendor/github.com/rwcarlsen/goexif/exif/sample1.jpg
generated
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 79 KiB |
BIN
vendor/github.com/rwcarlsen/goexif/tiff/sample1.tif
generated
vendored
Normal file
BIN
vendor/github.com/rwcarlsen/goexif/tiff/sample1.tif
generated
vendored
Normal file
Binary file not shown.
445
vendor/github.com/rwcarlsen/goexif/tiff/tag.go
generated
vendored
Normal file
445
vendor/github.com/rwcarlsen/goexif/tiff/tag.go
generated
vendored
Normal file
@@ -0,0 +1,445 @@
|
||||
package tiff
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/big"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Format specifies the Go type equivalent used to represent the basic
|
||||
// tiff data types.
|
||||
type Format int
|
||||
|
||||
const (
|
||||
IntVal Format = iota
|
||||
FloatVal
|
||||
RatVal
|
||||
StringVal
|
||||
UndefVal
|
||||
OtherVal
|
||||
)
|
||||
|
||||
var ErrShortReadTagValue = errors.New("tiff: short read of tag value")
|
||||
|
||||
var formatNames = map[Format]string{
|
||||
IntVal: "int",
|
||||
FloatVal: "float",
|
||||
RatVal: "rational",
|
||||
StringVal: "string",
|
||||
UndefVal: "undefined",
|
||||
OtherVal: "other",
|
||||
}
|
||||
|
||||
// DataType represents the basic tiff tag data types.
|
||||
type DataType uint16
|
||||
|
||||
const (
|
||||
DTByte DataType = 1
|
||||
DTAscii DataType = 2
|
||||
DTShort DataType = 3
|
||||
DTLong DataType = 4
|
||||
DTRational DataType = 5
|
||||
DTSByte DataType = 6
|
||||
DTUndefined DataType = 7
|
||||
DTSShort DataType = 8
|
||||
DTSLong DataType = 9
|
||||
DTSRational DataType = 10
|
||||
DTFloat DataType = 11
|
||||
DTDouble DataType = 12
|
||||
)
|
||||
|
||||
var typeNames = map[DataType]string{
|
||||
DTByte: "byte",
|
||||
DTAscii: "ascii",
|
||||
DTShort: "short",
|
||||
DTLong: "long",
|
||||
DTRational: "rational",
|
||||
DTSByte: "signed byte",
|
||||
DTUndefined: "undefined",
|
||||
DTSShort: "signed short",
|
||||
DTSLong: "signed long",
|
||||
DTSRational: "signed rational",
|
||||
DTFloat: "float",
|
||||
DTDouble: "double",
|
||||
}
|
||||
|
||||
// typeSize specifies the size in bytes of each type.
|
||||
var typeSize = map[DataType]uint32{
|
||||
DTByte: 1,
|
||||
DTAscii: 1,
|
||||
DTShort: 2,
|
||||
DTLong: 4,
|
||||
DTRational: 8,
|
||||
DTSByte: 1,
|
||||
DTUndefined: 1,
|
||||
DTSShort: 2,
|
||||
DTSLong: 4,
|
||||
DTSRational: 8,
|
||||
DTFloat: 4,
|
||||
DTDouble: 8,
|
||||
}
|
||||
|
||||
// Tag reflects the parsed content of a tiff IFD tag.
|
||||
type Tag struct {
|
||||
// Id is the 2-byte tiff tag identifier.
|
||||
Id uint16
|
||||
// Type is an integer (1 through 12) indicating the tag value's data type.
|
||||
Type DataType
|
||||
// Count is the number of type Type stored in the tag's value (i.e. the
|
||||
// tag's value is an array of type Type and length Count).
|
||||
Count uint32
|
||||
// Val holds the bytes that represent the tag's value.
|
||||
Val []byte
|
||||
// ValOffset holds byte offset of the tag value w.r.t. the beginning of the
|
||||
// reader it was decoded from. Zero if the tag value fit inside the offset
|
||||
// field.
|
||||
ValOffset uint32
|
||||
|
||||
order binary.ByteOrder
|
||||
intVals []int64
|
||||
floatVals []float64
|
||||
ratVals [][]int64
|
||||
strVal string
|
||||
format Format
|
||||
}
|
||||
|
||||
// DecodeTag parses a tiff-encoded IFD tag from r and returns a Tag object. The
|
||||
// first read from r should be the first byte of the tag. ReadAt offsets should
|
||||
// generally be relative to the beginning of the tiff structure (not relative
|
||||
// to the beginning of the tag).
|
||||
func DecodeTag(r ReadAtReader, order binary.ByteOrder) (*Tag, error) {
|
||||
t := new(Tag)
|
||||
t.order = order
|
||||
|
||||
err := binary.Read(r, order, &t.Id)
|
||||
if err != nil {
|
||||
return nil, errors.New("tiff: tag id read failed: " + err.Error())
|
||||
}
|
||||
|
||||
err = binary.Read(r, order, &t.Type)
|
||||
if err != nil {
|
||||
return nil, errors.New("tiff: tag type read failed: " + err.Error())
|
||||
}
|
||||
|
||||
err = binary.Read(r, order, &t.Count)
|
||||
if err != nil {
|
||||
return nil, errors.New("tiff: tag component count read failed: " + err.Error())
|
||||
}
|
||||
|
||||
// There seems to be a relatively common corrupt tag which has a Count of
|
||||
// MaxUint32. This is probably not a valid value, so return early.
|
||||
if t.Count == 1<<32-1 {
|
||||
return t, errors.New("invalid Count offset in tag")
|
||||
}
|
||||
|
||||
valLen := typeSize[t.Type] * t.Count
|
||||
if valLen == 0 {
|
||||
return t, errors.New("zero length tag value")
|
||||
}
|
||||
|
||||
if valLen > 4 {
|
||||
binary.Read(r, order, &t.ValOffset)
|
||||
|
||||
// Use a bytes.Buffer so we don't allocate a huge slice if the tag
|
||||
// is corrupt.
|
||||
var buff bytes.Buffer
|
||||
sr := io.NewSectionReader(r, int64(t.ValOffset), int64(valLen))
|
||||
n, err := io.Copy(&buff, sr)
|
||||
if err != nil {
|
||||
return t, errors.New("tiff: tag value read failed: " + err.Error())
|
||||
} else if n != int64(valLen) {
|
||||
return t, ErrShortReadTagValue
|
||||
}
|
||||
t.Val = buff.Bytes()
|
||||
|
||||
} else {
|
||||
val := make([]byte, valLen)
|
||||
if _, err = io.ReadFull(r, val); err != nil {
|
||||
return t, errors.New("tiff: tag offset read failed: " + err.Error())
|
||||
}
|
||||
// ignore padding.
|
||||
if _, err = io.ReadFull(r, make([]byte, 4-valLen)); err != nil {
|
||||
return t, errors.New("tiff: tag offset read failed: " + err.Error())
|
||||
}
|
||||
|
||||
t.Val = val
|
||||
}
|
||||
|
||||
return t, t.convertVals()
|
||||
}
|
||||
|
||||
func (t *Tag) convertVals() error {
|
||||
r := bytes.NewReader(t.Val)
|
||||
|
||||
switch t.Type {
|
||||
case DTAscii:
|
||||
if len(t.Val) <= 0 {
|
||||
break
|
||||
}
|
||||
nullPos := bytes.IndexByte(t.Val, 0)
|
||||
if nullPos == -1 {
|
||||
t.strVal = string(t.Val)
|
||||
} else {
|
||||
// ignore all trailing NULL bytes, in case of a broken t.Count
|
||||
t.strVal = string(t.Val[:nullPos])
|
||||
}
|
||||
case DTByte:
|
||||
var v uint8
|
||||
t.intVals = make([]int64, int(t.Count))
|
||||
for i := range t.intVals {
|
||||
err := binary.Read(r, t.order, &v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.intVals[i] = int64(v)
|
||||
}
|
||||
case DTShort:
|
||||
var v uint16
|
||||
t.intVals = make([]int64, int(t.Count))
|
||||
for i := range t.intVals {
|
||||
err := binary.Read(r, t.order, &v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.intVals[i] = int64(v)
|
||||
}
|
||||
case DTLong:
|
||||
var v uint32
|
||||
t.intVals = make([]int64, int(t.Count))
|
||||
for i := range t.intVals {
|
||||
err := binary.Read(r, t.order, &v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.intVals[i] = int64(v)
|
||||
}
|
||||
case DTSByte:
|
||||
var v int8
|
||||
t.intVals = make([]int64, int(t.Count))
|
||||
for i := range t.intVals {
|
||||
err := binary.Read(r, t.order, &v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.intVals[i] = int64(v)
|
||||
}
|
||||
case DTSShort:
|
||||
var v int16
|
||||
t.intVals = make([]int64, int(t.Count))
|
||||
for i := range t.intVals {
|
||||
err := binary.Read(r, t.order, &v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.intVals[i] = int64(v)
|
||||
}
|
||||
case DTSLong:
|
||||
var v int32
|
||||
t.intVals = make([]int64, int(t.Count))
|
||||
for i := range t.intVals {
|
||||
err := binary.Read(r, t.order, &v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.intVals[i] = int64(v)
|
||||
}
|
||||
case DTRational:
|
||||
t.ratVals = make([][]int64, int(t.Count))
|
||||
for i := range t.ratVals {
|
||||
var n, d uint32
|
||||
err := binary.Read(r, t.order, &n)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = binary.Read(r, t.order, &d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.ratVals[i] = []int64{int64(n), int64(d)}
|
||||
}
|
||||
case DTSRational:
|
||||
t.ratVals = make([][]int64, int(t.Count))
|
||||
for i := range t.ratVals {
|
||||
var n, d int32
|
||||
err := binary.Read(r, t.order, &n)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = binary.Read(r, t.order, &d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.ratVals[i] = []int64{int64(n), int64(d)}
|
||||
}
|
||||
case DTFloat: // float32
|
||||
t.floatVals = make([]float64, int(t.Count))
|
||||
for i := range t.floatVals {
|
||||
var v float32
|
||||
err := binary.Read(r, t.order, &v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.floatVals[i] = float64(v)
|
||||
}
|
||||
case DTDouble:
|
||||
t.floatVals = make([]float64, int(t.Count))
|
||||
for i := range t.floatVals {
|
||||
var u float64
|
||||
err := binary.Read(r, t.order, &u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.floatVals[i] = u
|
||||
}
|
||||
}
|
||||
|
||||
switch t.Type {
|
||||
case DTByte, DTShort, DTLong, DTSByte, DTSShort, DTSLong:
|
||||
t.format = IntVal
|
||||
case DTRational, DTSRational:
|
||||
t.format = RatVal
|
||||
case DTFloat, DTDouble:
|
||||
t.format = FloatVal
|
||||
case DTAscii:
|
||||
t.format = StringVal
|
||||
case DTUndefined:
|
||||
t.format = UndefVal
|
||||
default:
|
||||
t.format = OtherVal
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Format returns a value indicating which method can be called to retrieve the
|
||||
// tag's value properly typed (e.g. integer, rational, etc.).
|
||||
func (t *Tag) Format() Format { return t.format }
|
||||
|
||||
func (t *Tag) typeErr(to Format) error {
|
||||
return &wrongFmtErr{typeNames[t.Type], formatNames[to]}
|
||||
}
|
||||
|
||||
// Rat returns the tag's i'th value as a rational number. It returns a nil and
|
||||
// an error if this tag's Format is not RatVal. It panics for zero deminators
|
||||
// or if i is out of range.
|
||||
func (t *Tag) Rat(i int) (*big.Rat, error) {
|
||||
n, d, err := t.Rat2(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return big.NewRat(n, d), nil
|
||||
}
|
||||
|
||||
// Rat2 returns the tag's i'th value as a rational number represented by a
|
||||
// numerator-denominator pair. It returns an error if the tag's Format is not
|
||||
// RatVal. It panics if i is out of range.
|
||||
func (t *Tag) Rat2(i int) (num, den int64, err error) {
|
||||
if t.format != RatVal {
|
||||
return 0, 0, t.typeErr(RatVal)
|
||||
}
|
||||
return t.ratVals[i][0], t.ratVals[i][1], nil
|
||||
}
|
||||
|
||||
// Int64 returns the tag's i'th value as an integer. It returns an error if the
|
||||
// tag's Format is not IntVal. It panics if i is out of range.
|
||||
func (t *Tag) Int64(i int) (int64, error) {
|
||||
if t.format != IntVal {
|
||||
return 0, t.typeErr(IntVal)
|
||||
}
|
||||
return t.intVals[i], nil
|
||||
}
|
||||
|
||||
// Int returns the tag's i'th value as an integer. It returns an error if the
|
||||
// tag's Format is not IntVal. It panics if i is out of range.
|
||||
func (t *Tag) Int(i int) (int, error) {
|
||||
if t.format != IntVal {
|
||||
return 0, t.typeErr(IntVal)
|
||||
}
|
||||
return int(t.intVals[i]), nil
|
||||
}
|
||||
|
||||
// Float returns the tag's i'th value as a float. It returns an error if the
|
||||
// tag's Format is not IntVal. It panics if i is out of range.
|
||||
func (t *Tag) Float(i int) (float64, error) {
|
||||
if t.format != FloatVal {
|
||||
return 0, t.typeErr(FloatVal)
|
||||
}
|
||||
return t.floatVals[i], nil
|
||||
}
|
||||
|
||||
// StringVal returns the tag's value as a string. It returns an error if the
|
||||
// tag's Format is not StringVal. It panics if i is out of range.
|
||||
func (t *Tag) StringVal() (string, error) {
|
||||
if t.format != StringVal {
|
||||
return "", t.typeErr(StringVal)
|
||||
}
|
||||
return t.strVal, nil
|
||||
}
|
||||
|
||||
// String returns a nicely formatted version of the tag.
|
||||
func (t *Tag) String() string {
|
||||
data, err := t.MarshalJSON()
|
||||
if err != nil {
|
||||
return "ERROR: " + err.Error()
|
||||
}
|
||||
|
||||
if t.Count == 1 {
|
||||
return strings.Trim(fmt.Sprintf("%s", data), "[]")
|
||||
}
|
||||
return fmt.Sprintf("%s", data)
|
||||
}
|
||||
|
||||
func (t *Tag) MarshalJSON() ([]byte, error) {
|
||||
switch t.format {
|
||||
case StringVal, UndefVal:
|
||||
return nullString(t.Val), nil
|
||||
case OtherVal:
|
||||
return []byte(fmt.Sprintf("unknown tag type '%v'", t.Type)), nil
|
||||
}
|
||||
|
||||
rv := []string{}
|
||||
for i := 0; i < int(t.Count); i++ {
|
||||
switch t.format {
|
||||
case RatVal:
|
||||
n, d, _ := t.Rat2(i)
|
||||
rv = append(rv, fmt.Sprintf(`"%v/%v"`, n, d))
|
||||
case FloatVal:
|
||||
v, _ := t.Float(i)
|
||||
rv = append(rv, fmt.Sprintf("%v", v))
|
||||
case IntVal:
|
||||
v, _ := t.Int(i)
|
||||
rv = append(rv, fmt.Sprintf("%v", v))
|
||||
}
|
||||
}
|
||||
return []byte(fmt.Sprintf(`[%s]`, strings.Join(rv, ","))), nil
|
||||
}
|
||||
|
||||
func nullString(in []byte) []byte {
|
||||
rv := bytes.Buffer{}
|
||||
rv.WriteByte('"')
|
||||
for _, b := range in {
|
||||
if unicode.IsPrint(rune(b)) {
|
||||
rv.WriteByte(b)
|
||||
}
|
||||
}
|
||||
rv.WriteByte('"')
|
||||
rvb := rv.Bytes()
|
||||
if utf8.Valid(rvb) {
|
||||
return rvb
|
||||
}
|
||||
return []byte(`""`)
|
||||
}
|
||||
|
||||
type wrongFmtErr struct {
|
||||
From, To string
|
||||
}
|
||||
|
||||
func (e *wrongFmtErr) Error() string {
|
||||
return fmt.Sprintf("cannot convert tag type '%v' into '%v'", e.From, e.To)
|
||||
}
|
||||
153
vendor/github.com/rwcarlsen/goexif/tiff/tiff.go
generated
vendored
Normal file
153
vendor/github.com/rwcarlsen/goexif/tiff/tiff.go
generated
vendored
Normal file
@@ -0,0 +1,153 @@
|
||||
// Package tiff implements TIFF decoding as defined in TIFF 6.0 specification at
|
||||
// http://partners.adobe.com/public/developer/en/tiff/TIFF6.pdf
|
||||
package tiff
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
)
|
||||
|
||||
// ReadAtReader is used when decoding Tiff tags and directories
|
||||
type ReadAtReader interface {
|
||||
io.Reader
|
||||
io.ReaderAt
|
||||
}
|
||||
|
||||
// Tiff provides access to a decoded tiff data structure.
|
||||
type Tiff struct {
|
||||
// Dirs is an ordered slice of the tiff's Image File Directories (IFDs).
|
||||
// The IFD at index 0 is IFD0.
|
||||
Dirs []*Dir
|
||||
// The tiff's byte-encoding (i.e. big/little endian).
|
||||
Order binary.ByteOrder
|
||||
}
|
||||
|
||||
// Decode parses tiff-encoded data from r and returns a Tiff struct that
|
||||
// reflects the structure and content of the tiff data. The first read from r
|
||||
// should be the first byte of the tiff-encoded data and not necessarily the
|
||||
// first byte of an os.File object.
|
||||
func Decode(r io.Reader) (*Tiff, error) {
|
||||
data, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, errors.New("tiff: could not read data")
|
||||
}
|
||||
buf := bytes.NewReader(data)
|
||||
|
||||
t := new(Tiff)
|
||||
|
||||
// read byte order
|
||||
bo := make([]byte, 2)
|
||||
if _, err = io.ReadFull(buf, bo); err != nil {
|
||||
return nil, errors.New("tiff: could not read tiff byte order")
|
||||
}
|
||||
if string(bo) == "II" {
|
||||
t.Order = binary.LittleEndian
|
||||
} else if string(bo) == "MM" {
|
||||
t.Order = binary.BigEndian
|
||||
} else {
|
||||
return nil, errors.New("tiff: could not read tiff byte order")
|
||||
}
|
||||
|
||||
// check for special tiff marker
|
||||
var sp int16
|
||||
err = binary.Read(buf, t.Order, &sp)
|
||||
if err != nil || 42 != sp {
|
||||
return nil, errors.New("tiff: could not find special tiff marker")
|
||||
}
|
||||
|
||||
// load offset to first IFD
|
||||
var offset int32
|
||||
err = binary.Read(buf, t.Order, &offset)
|
||||
if err != nil {
|
||||
return nil, errors.New("tiff: could not read offset to first IFD")
|
||||
}
|
||||
|
||||
// load IFD's
|
||||
var d *Dir
|
||||
prev := offset
|
||||
for offset != 0 {
|
||||
// seek to offset
|
||||
_, err := buf.Seek(int64(offset), 0)
|
||||
if err != nil {
|
||||
return nil, errors.New("tiff: seek to IFD failed")
|
||||
}
|
||||
|
||||
if buf.Len() == 0 {
|
||||
return nil, errors.New("tiff: seek offset after EOF")
|
||||
}
|
||||
|
||||
// load the dir
|
||||
d, offset, err = DecodeDir(buf, t.Order)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if offset == prev {
|
||||
return nil, errors.New("tiff: recursive IFD")
|
||||
}
|
||||
prev = offset
|
||||
|
||||
t.Dirs = append(t.Dirs, d)
|
||||
}
|
||||
|
||||
return t, nil
|
||||
}
|
||||
|
||||
func (tf *Tiff) String() string {
|
||||
var buf bytes.Buffer
|
||||
fmt.Fprint(&buf, "Tiff{")
|
||||
for _, d := range tf.Dirs {
|
||||
fmt.Fprintf(&buf, "%s, ", d.String())
|
||||
}
|
||||
fmt.Fprintf(&buf, "}")
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
// Dir provides access to the parsed content of a tiff Image File Directory (IFD).
|
||||
type Dir struct {
|
||||
Tags []*Tag
|
||||
}
|
||||
|
||||
// DecodeDir parses a tiff-encoded IFD from r and returns a Dir object. offset
|
||||
// is the offset to the next IFD. The first read from r should be at the first
|
||||
// byte of the IFD. ReadAt offsets should generally be relative to the
|
||||
// beginning of the tiff structure (not relative to the beginning of the IFD).
|
||||
func DecodeDir(r ReadAtReader, order binary.ByteOrder) (d *Dir, offset int32, err error) {
|
||||
d = new(Dir)
|
||||
|
||||
// get num of tags in ifd
|
||||
var nTags int16
|
||||
err = binary.Read(r, order, &nTags)
|
||||
if err != nil {
|
||||
return nil, 0, errors.New("tiff: failed to read IFD tag count: " + err.Error())
|
||||
}
|
||||
|
||||
// load tags
|
||||
for n := 0; n < int(nTags); n++ {
|
||||
t, err := DecodeTag(r, order)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
d.Tags = append(d.Tags, t)
|
||||
}
|
||||
|
||||
// get offset to next ifd
|
||||
err = binary.Read(r, order, &offset)
|
||||
if err != nil {
|
||||
return nil, 0, errors.New("tiff: falied to read offset to next IFD: " + err.Error())
|
||||
}
|
||||
|
||||
return d, offset, nil
|
||||
}
|
||||
|
||||
func (d *Dir) String() string {
|
||||
s := "Dir{"
|
||||
for _, t := range d.Tags {
|
||||
s += t.String() + ", "
|
||||
}
|
||||
return s + "}"
|
||||
}
|
||||
20
vendor/modules.txt
vendored
20
vendor/modules.txt
vendored
@@ -876,9 +876,19 @@ github.com/klauspost/compress/zstd/internal/xxhash
|
||||
# github.com/klauspost/cpuid/v2 v2.2.11
|
||||
## explicit; go 1.22
|
||||
github.com/klauspost/cpuid/v2
|
||||
# github.com/kovidgoyal/imaging v1.6.4
|
||||
## explicit; go 1.21
|
||||
# github.com/kovidgoyal/go-parallel v1.0.1
|
||||
## explicit; go 1.23
|
||||
github.com/kovidgoyal/go-parallel
|
||||
# github.com/kovidgoyal/imaging v1.7.2
|
||||
## explicit; go 1.24.0
|
||||
github.com/kovidgoyal/imaging
|
||||
github.com/kovidgoyal/imaging/prism/meta
|
||||
github.com/kovidgoyal/imaging/prism/meta/autometa
|
||||
github.com/kovidgoyal/imaging/prism/meta/icc
|
||||
github.com/kovidgoyal/imaging/prism/meta/jpegmeta
|
||||
github.com/kovidgoyal/imaging/prism/meta/pngmeta
|
||||
github.com/kovidgoyal/imaging/prism/meta/webpmeta
|
||||
github.com/kovidgoyal/imaging/streams
|
||||
# github.com/leodido/go-urn v1.4.0
|
||||
## explicit; go 1.18
|
||||
github.com/leodido/go-urn
|
||||
@@ -1865,6 +1875,10 @@ github.com/russellhaering/goxmldsig/types
|
||||
# github.com/russross/blackfriday/v2 v2.1.0
|
||||
## explicit
|
||||
github.com/russross/blackfriday/v2
|
||||
# github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
|
||||
## explicit
|
||||
github.com/rwcarlsen/goexif/exif
|
||||
github.com/rwcarlsen/goexif/tiff
|
||||
# github.com/segmentio/asm v1.2.0
|
||||
## explicit; go 1.18
|
||||
github.com/segmentio/asm/base64
|
||||
@@ -2383,7 +2397,7 @@ golang.org/x/exp/slices
|
||||
golang.org/x/exp/slog
|
||||
golang.org/x/exp/slog/internal
|
||||
golang.org/x/exp/slog/internal/buffer
|
||||
# golang.org/x/image v0.31.0
|
||||
# golang.org/x/image v0.32.0
|
||||
## explicit; go 1.24.0
|
||||
golang.org/x/image/bmp
|
||||
golang.org/x/image/ccitt
|
||||
|
||||
Reference in New Issue
Block a user