mirror of
https://github.com/opencloud-eu/opencloud.git
synced 2025-12-21 12:09:40 -06:00
build(deps): bump github.com/olekukonko/tablewriter from 1.1.0 to 1.1.1
Bumps [github.com/olekukonko/tablewriter](https://github.com/olekukonko/tablewriter) from 1.1.0 to 1.1.1. - [Commits](https://github.com/olekukonko/tablewriter/compare/v1.1.0...v1.1.1) --- updated-dependencies: - dependency-name: github.com/olekukonko/tablewriter dependency-version: 1.1.1 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com>
This commit is contained in:
committed by
Ralf Haferkamp
parent
9d173f0ea6
commit
28148d02bd
13
go.mod
13
go.mod
@@ -57,7 +57,7 @@ require (
|
||||
github.com/nats-io/nats-server/v2 v2.12.1
|
||||
github.com/nats-io/nats.go v1.47.0
|
||||
github.com/oklog/run v1.2.0
|
||||
github.com/olekukonko/tablewriter v1.1.0
|
||||
github.com/olekukonko/tablewriter v1.1.1
|
||||
github.com/onsi/ginkgo v1.16.5
|
||||
github.com/onsi/ginkgo/v2 v2.27.2
|
||||
github.com/onsi/gomega v1.38.2
|
||||
@@ -113,6 +113,7 @@ require (
|
||||
google.golang.org/grpc v1.76.0
|
||||
google.golang.org/protobuf v1.36.10
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
gotest.tools/v3 v3.5.2
|
||||
stash.kopano.io/kgol/rndm v1.1.2
|
||||
)
|
||||
@@ -163,6 +164,9 @@ require (
|
||||
github.com/ceph/go-ceph v0.36.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cevaris/ordered_map v0.0.0-20190319150403-3adeae072e73 // indirect
|
||||
github.com/clipperhouse/displaywidth v0.3.1 // indirect
|
||||
github.com/clipperhouse/stringish v0.1.1 // indirect
|
||||
github.com/clipperhouse/uax29/v2 v2.2.0 // indirect
|
||||
github.com/cloudflare/circl v1.6.1 // indirect
|
||||
github.com/containerd/errdefs v1.0.0 // indirect
|
||||
github.com/containerd/errdefs/pkg v0.3.0 // indirect
|
||||
@@ -276,7 +280,7 @@ require (
|
||||
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.19 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.32 // indirect
|
||||
github.com/maxymania/go-system v0.0.0-20170110133659-647cc364bf0b // indirect
|
||||
github.com/mendsley/gojwk v0.0.0-20141217222730-4d5ec6e58103 // indirect
|
||||
@@ -304,8 +308,9 @@ require (
|
||||
github.com/nats-io/nkeys v0.4.11 // indirect
|
||||
github.com/nats-io/nuid v1.0.1 // indirect
|
||||
github.com/nxadm/tail v1.4.8 // indirect
|
||||
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 // indirect
|
||||
github.com/olekukonko/errors v1.1.0 // indirect
|
||||
github.com/olekukonko/ll v0.0.9 // indirect
|
||||
github.com/olekukonko/ll v0.1.2 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||
@@ -325,7 +330,6 @@ require (
|
||||
github.com/prometheus/procfs v0.17.0 // indirect
|
||||
github.com/prometheus/statsd_exporter v0.22.8 // indirect
|
||||
github.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/rs/xid v1.6.0 // indirect
|
||||
github.com/russellhaering/goxmldsig v1.5.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
@@ -385,7 +389,6 @@ require (
|
||||
gopkg.in/cenkalti/backoff.v1 v1.1.0 // indirect
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
|
||||
gopkg.in/warnings.v0 v0.1.2 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
sigs.k8s.io/yaml v1.6.0 // indirect
|
||||
)
|
||||
|
||||
|
||||
23
go.sum
23
go.sum
@@ -223,6 +223,12 @@ github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWR
|
||||
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/clipperhouse/displaywidth v0.3.1 h1:k07iN9gD32177o1y4O1jQMzbLdCrsGJh+blirVYybsk=
|
||||
github.com/clipperhouse/displaywidth v0.3.1/go.mod h1:tgLJKKyaDOCadywag3agw4snxS5kYEuYR6Y9+qWDDYM=
|
||||
github.com/clipperhouse/stringish v0.1.1 h1:+NSqMOr3GR6k1FdRhhnXrLfztGzuG+VuFDfatpWHKCs=
|
||||
github.com/clipperhouse/stringish v0.1.1/go.mod h1:v/WhFtE1q0ovMta2+m+UbpZ+2/HEXNWYXQgCt4hdOzA=
|
||||
github.com/clipperhouse/uax29/v2 v2.2.0 h1:ChwIKnQN3kcZteTXMgb1wztSgaU+ZemkgWdohwgs8tY=
|
||||
github.com/clipperhouse/uax29/v2 v2.2.0/go.mod h1:EFJ2TJMRUaplDxHKj1qAEhCtQPW2tJSwu5BF98AuoVM=
|
||||
github.com/cloudflare/circl v1.6.1 h1:zqIqSPIndyBh1bjLVVDHMPpVKqp8Su/V+6MeDzzQBQ0=
|
||||
github.com/cloudflare/circl v1.6.1/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs=
|
||||
github.com/cloudflare/cloudflare-go v0.14.0/go.mod h1:EnwdgGMaFOruiPZRFSgn+TsQ3hQ7C/YWzIGLeu5c304=
|
||||
@@ -821,8 +827,8 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D
|
||||
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-runewidth v0.0.6/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
|
||||
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
|
||||
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
|
||||
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
|
||||
github.com/mattn/go-tty v0.0.0-20180219170247-931426f7535a/go.mod h1:XPvLUNfbS4fJH25nqRHfWLMa1ONC8Amw+mIA639KxkE=
|
||||
@@ -924,13 +930,15 @@ github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+
|
||||
github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E=
|
||||
github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6 h1:zrbMGy9YXpIeTnGj4EljqMiZsIcE09mmF8XsD5AYOJc=
|
||||
github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6/go.mod h1:rEKTHC9roVVicUIfZK7DYrdIoM0EOr8mK1Hj5s3JjH0=
|
||||
github.com/olekukonko/errors v1.1.0 h1:RNuGIh15QdDenh+hNvKrJkmxxjV4hcS50Db478Ou5sM=
|
||||
github.com/olekukonko/errors v1.1.0/go.mod h1:ppzxA5jBKcO1vIpCXQ9ZqgDh8iwODz6OXIGKU8r5m4Y=
|
||||
github.com/olekukonko/ll v0.0.9 h1:Y+1YqDfVkqMWuEQMclsF9HUR5+a82+dxJuL1HHSRpxI=
|
||||
github.com/olekukonko/ll v0.0.9/go.mod h1:En+sEW0JNETl26+K8eZ6/W4UQ7CYSrrgg/EdIYT2H8g=
|
||||
github.com/olekukonko/ll v0.1.2 h1:lkg/k/9mlsy0SxO5aC+WEpbdT5K83ddnNhAepz7TQc0=
|
||||
github.com/olekukonko/ll v0.1.2/go.mod h1:b52bVQRRPObe+yyBl0TxNfhesL0nedD4Cht0/zx55Ew=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/olekukonko/tablewriter v1.1.0 h1:N0LHrshF4T39KvI96fn6GT8HEjXRXYNDrDjKFDB7RIY=
|
||||
github.com/olekukonko/tablewriter v1.1.0/go.mod h1:5c+EBPeSqvXnLLgkm9isDdzR3wjfBkHR9Nhfp3NWrzo=
|
||||
github.com/olekukonko/tablewriter v1.1.1 h1:b3reP6GCfrHwmKkYwNRFh2rxidGHcT6cgxj/sHiDDx0=
|
||||
github.com/olekukonko/tablewriter v1.1.1/go.mod h1:De/bIcTF+gpBDB3Alv3fEsZA+9unTsSzAg/ZGADCtn4=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
||||
@@ -1064,9 +1072,6 @@ github.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9 h1:bsUq1dX0N8A
|
||||
github.com/rcrowley/go-metrics v0.0.0-20250401214520-65e299d6c5c9/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/riandyrn/otelchi v0.12.2 h1:6QhGv0LVw/dwjtPd12mnNrl0oEQF4ZAlmHcnlTYbeAg=
|
||||
github.com/riandyrn/otelchi v0.12.2/go.mod h1:weZZeUJURvtCcbWsdb7Y6F8KFZGedJlSrgUjq9VirV8=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
|
||||
1
vendor/github.com/clipperhouse/displaywidth/.gitignore
generated
vendored
Normal file
1
vendor/github.com/clipperhouse/displaywidth/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.DS_Store
|
||||
37
vendor/github.com/clipperhouse/displaywidth/AGENTS.md
generated
vendored
Normal file
37
vendor/github.com/clipperhouse/displaywidth/AGENTS.md
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
The goals and overview of this package can be found in the README.md file,
|
||||
start by reading that.
|
||||
|
||||
The goal of this package is to determine the display (column) width of a
|
||||
string, UTF-8 bytes, or runes, as would happen in a monospace font, especially
|
||||
in a terminal.
|
||||
|
||||
When troubleshooting, write Go unit tests instead of executing debug scripts.
|
||||
The tests can return whatever logs or output you need. If those tests are
|
||||
only for temporary troubleshooting, clean up the tests after the debugging is
|
||||
done.
|
||||
|
||||
(Separate executable debugging scripts are messy, tend to have conflicting
|
||||
dependencies and are hard to cleanup.)
|
||||
|
||||
If you make changes to the trie generation in internal/gen, it can be invoked
|
||||
by running `go generate` from the top package directory.
|
||||
|
||||
## Pull Requests and branches
|
||||
|
||||
For PRs (pull requests), you can use the gh CLI tool to retrieve details,
|
||||
or post comments. Then, compare the current branch with main. Reviewing a PR
|
||||
and reviewing a branch are about the same, but the PR may add context.
|
||||
|
||||
Look for bugs. Think like GitHub Copilot or Cursor BugBot.
|
||||
|
||||
Offer to post a brief summary of the review to the PR, via the gh CLI tool.
|
||||
|
||||
## Comparisons to go-runewidth
|
||||
|
||||
We originally attempted to make this package compatible with go-runewidth.
|
||||
However, we found that there were too many differences in the handling of
|
||||
certain characters and properties.
|
||||
|
||||
We believe, preliminarily, that our choices are more correct and complete,
|
||||
by using more complete categories such as Unicode Cf (format) for zero-width
|
||||
and Mn (Nonspacing_Mark) for combining marks.
|
||||
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 Oliver Kuederle
|
||||
Copyright (c) 2025 Matt Sherman
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
115
vendor/github.com/clipperhouse/displaywidth/README.md
generated
vendored
Normal file
115
vendor/github.com/clipperhouse/displaywidth/README.md
generated
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
# displaywidth
|
||||
|
||||
A high-performance Go package for measuring the monospace display width of strings, UTF-8 bytes, and runes.
|
||||
|
||||
[](https://pkg.go.dev/github.com/clipperhouse/displaywidth)
|
||||
[](https://github.com/clipperhouse/displaywidth/actions/workflows/gotest.yml)
|
||||
[](https://github.com/clipperhouse/displaywidth/actions/workflows/gofuzz.yml)
|
||||
## Install
|
||||
```bash
|
||||
go get github.com/clipperhouse/displaywidth
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/clipperhouse/displaywidth"
|
||||
)
|
||||
|
||||
func main() {
|
||||
width := displaywidth.String("Hello, 世界!")
|
||||
fmt.Println(width)
|
||||
|
||||
width = displaywidth.Bytes([]byte("🌍"))
|
||||
fmt.Println(width)
|
||||
|
||||
width = displaywidth.Rune('🌍')
|
||||
fmt.Println(width)
|
||||
}
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
You can specify East Asian Width and Strict Emoji Neutral settings. If
|
||||
unspecified, the default is `EastAsianWidth: false, StrictEmojiNeutral: true`.
|
||||
|
||||
```go
|
||||
options := displaywidth.Options{
|
||||
EastAsianWidth: true,
|
||||
StrictEmojiNeutral: false,
|
||||
}
|
||||
|
||||
width := options.String("Hello, 世界!")
|
||||
fmt.Println(width)
|
||||
```
|
||||
|
||||
## Details
|
||||
|
||||
This package implements the Unicode East Asian Width standard (UAX #11) and is
|
||||
intended to be compatible with `go-runewidth`. It operates on bytes without
|
||||
decoding runes for better performance.
|
||||
|
||||
## Prior Art
|
||||
|
||||
[mattn/go-runewidth](https://github.com/mattn/go-runewidth)
|
||||
|
||||
[x/text/width](https://pkg.go.dev/golang.org/x/text/width)
|
||||
|
||||
[x/text/internal/triegen](https://pkg.go.dev/golang.org/x/text/internal/triegen)
|
||||
|
||||
## Benchmarks
|
||||
|
||||
Part of my motivation is the insight that we can avoid decoding runes for better performance.
|
||||
|
||||
```bash
|
||||
go test -bench=. -benchmem
|
||||
```
|
||||
|
||||
```
|
||||
goos: darwin
|
||||
goarch: arm64
|
||||
pkg: github.com/clipperhouse/displaywidth
|
||||
cpu: Apple M2
|
||||
BenchmarkStringDefault/displaywidth-8 10537 ns/op 160.10 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkStringDefault/go-runewidth-8 14162 ns/op 119.12 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_EAW/displaywidth-8 10776 ns/op 156.55 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_EAW/go-runewidth-8 23987 ns/op 70.33 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_StrictEmoji/displaywidth-8 10892 ns/op 154.88 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_StrictEmoji/go-runewidth-8 14552 ns/op 115.93 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_ASCII/displaywidth-8 1116 ns/op 114.72 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_ASCII/go-runewidth-8 1178 ns/op 108.67 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Unicode/displaywidth-8 896.9 ns/op 148.29 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Unicode/go-runewidth-8 1434 ns/op 92.72 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkStringWidth_Emoji/displaywidth-8 3033 ns/op 238.74 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkStringWidth_Emoji/go-runewidth-8 4841 ns/op 149.56 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Mixed/displaywidth-8 4064 ns/op 124.74 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_Mixed/go-runewidth-8 4696 ns/op 107.97 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_ControlChars/displaywidth-8 320.6 ns/op 102.93 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkString_ControlChars/go-runewidth-8 373.8 ns/op 88.28 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRuneDefault/displaywidth-8 335.5 ns/op 411.35 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRuneDefault/go-runewidth-8 681.2 ns/op 202.58 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRuneWidth_EAW/displaywidth-8 146.7 ns/op 374.80 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRuneWidth_EAW/go-runewidth-8 495.6 ns/op 110.98 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRuneWidth_ASCII/displaywidth-8 63.00 ns/op 460.33 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkRuneWidth_ASCII/go-runewidth-8 68.90 ns/op 420.91 MB/s 0 B/op 0 allocs/op
|
||||
```
|
||||
|
||||
I use a similar technique in [this grapheme cluster library](https://github.com/clipperhouse/uax29).
|
||||
|
||||
## Compatibility
|
||||
|
||||
`displaywidth` will mostly give the same outputs as `go-runewidth`, but there are some differences:
|
||||
|
||||
- Unicode category Mn (Nonspacing Mark): `displaywidth` will return width 0, `go-runewidth` may return width 1 for some runes.
|
||||
- Unicode category Cf (Format): `displaywidth` will return width 0, `go-runewidth` may return width 1 for some runes.
|
||||
- Unicode category Mc (Spacing Mark): `displaywidth` will return width 1, `go-runewidth` may return width 0 for some runes.
|
||||
- Unicode category Cs (Surrogate): `displaywidth` will return width 0, `go-runewidth` may return width 1 for some runes. Surrogates are not valid UTF-8; some packages may turn them into the replacement character (U+FFFD).
|
||||
- Unicode category Zl (Line separator): `displaywidth` will return width 0, `go-runewidth` may return width 1.
|
||||
- Unicode category Zp (Paragraph separator): `displaywidth` will return width 0, `go-runewidth` may return width 1.
|
||||
- Unicode Noncharacters (U+FFFE and U+FFFF): `displaywidth` will return width 0, `go-runewidth` may return width 1.
|
||||
|
||||
See `TestCompatibility` for more details.
|
||||
3
vendor/github.com/clipperhouse/displaywidth/gen.go
generated
vendored
Normal file
3
vendor/github.com/clipperhouse/displaywidth/gen.go
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
package displaywidth
|
||||
|
||||
//go:generate go run -C internal/gen .
|
||||
1897
vendor/github.com/clipperhouse/displaywidth/trie.go
generated
vendored
Normal file
1897
vendor/github.com/clipperhouse/displaywidth/trie.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
159
vendor/github.com/clipperhouse/displaywidth/width.go
generated
vendored
Normal file
159
vendor/github.com/clipperhouse/displaywidth/width.go
generated
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
package displaywidth
|
||||
|
||||
import (
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/clipperhouse/stringish"
|
||||
"github.com/clipperhouse/uax29/v2/graphemes"
|
||||
)
|
||||
|
||||
// String calculates the display width of a string
|
||||
// using the [DefaultOptions]
|
||||
func String(s string) int {
|
||||
return DefaultOptions.String(s)
|
||||
}
|
||||
|
||||
// Bytes calculates the display width of a []byte
|
||||
// using the [DefaultOptions]
|
||||
func Bytes(s []byte) int {
|
||||
return DefaultOptions.Bytes(s)
|
||||
}
|
||||
|
||||
func Rune(r rune) int {
|
||||
return DefaultOptions.Rune(r)
|
||||
}
|
||||
|
||||
type Options struct {
|
||||
EastAsianWidth bool
|
||||
StrictEmojiNeutral bool
|
||||
}
|
||||
|
||||
var DefaultOptions = Options{
|
||||
EastAsianWidth: false,
|
||||
StrictEmojiNeutral: true,
|
||||
}
|
||||
|
||||
// String calculates the display width of a string
|
||||
// for the given options
|
||||
func (options Options) String(s string) int {
|
||||
if len(s) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
total := 0
|
||||
g := graphemes.FromString(s)
|
||||
for g.Next() {
|
||||
// The first character in the grapheme cluster determines the width;
|
||||
// modifiers and joiners do not contribute to the width.
|
||||
props, _ := lookupProperties(g.Value())
|
||||
total += props.width(options)
|
||||
}
|
||||
return total
|
||||
}
|
||||
|
||||
// BytesOptions calculates the display width of a []byte
|
||||
// for the given options
|
||||
func (options Options) Bytes(s []byte) int {
|
||||
if len(s) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
total := 0
|
||||
g := graphemes.FromBytes(s)
|
||||
for g.Next() {
|
||||
// The first character in the grapheme cluster determines the width;
|
||||
// modifiers and joiners do not contribute to the width.
|
||||
props, _ := lookupProperties(g.Value())
|
||||
total += props.width(options)
|
||||
}
|
||||
return total
|
||||
}
|
||||
|
||||
func (options Options) Rune(r rune) int {
|
||||
// Fast path for ASCII
|
||||
if r < utf8.RuneSelf {
|
||||
if isASCIIControl(byte(r)) {
|
||||
// Control (0x00-0x1F) and DEL (0x7F)
|
||||
return 0
|
||||
}
|
||||
// ASCII printable (0x20-0x7E)
|
||||
return 1
|
||||
}
|
||||
|
||||
// Surrogates (U+D800-U+DFFF) are invalid UTF-8 and have zero width
|
||||
// Other packages might turn them into the replacement character (U+FFFD)
|
||||
// in which case, we won't see it.
|
||||
if r >= 0xD800 && r <= 0xDFFF {
|
||||
return 0
|
||||
}
|
||||
|
||||
// Stack-allocated to avoid heap allocation
|
||||
var buf [4]byte // UTF-8 is at most 4 bytes
|
||||
n := utf8.EncodeRune(buf[:], r)
|
||||
// Skip the grapheme iterator and directly lookup properties
|
||||
props, _ := lookupProperties(buf[:n])
|
||||
return props.width(options)
|
||||
}
|
||||
|
||||
func isASCIIControl(b byte) bool {
|
||||
return b < 0x20 || b == 0x7F
|
||||
}
|
||||
|
||||
const defaultWidth = 1
|
||||
|
||||
// is returns true if the property flag is set
|
||||
func (p property) is(flag property) bool {
|
||||
return p&flag != 0
|
||||
}
|
||||
|
||||
// lookupProperties returns the properties for the first character in a string
|
||||
func lookupProperties[T stringish.Interface](s T) (property, int) {
|
||||
if len(s) == 0 {
|
||||
return 0, 0
|
||||
}
|
||||
|
||||
// Fast path for ASCII characters (single byte)
|
||||
b := s[0]
|
||||
if b < utf8.RuneSelf { // Single-byte ASCII
|
||||
if isASCIIControl(b) {
|
||||
// Control characters (0x00-0x1F) and DEL (0x7F) - width 0
|
||||
return _ZeroWidth, 1
|
||||
}
|
||||
// ASCII printable characters (0x20-0x7E) - width 1
|
||||
// Return 0 properties, width calculation will default to 1
|
||||
return 0, 1
|
||||
}
|
||||
|
||||
// Use the generated trie for lookup
|
||||
props, size := lookup(s)
|
||||
return property(props), size
|
||||
}
|
||||
|
||||
// width determines the display width of a character based on its properties
|
||||
// and configuration options
|
||||
func (p property) width(options Options) int {
|
||||
if p == 0 {
|
||||
// Character not in trie, use default behavior
|
||||
return defaultWidth
|
||||
}
|
||||
|
||||
if p.is(_ZeroWidth) {
|
||||
return 0
|
||||
}
|
||||
|
||||
if options.EastAsianWidth {
|
||||
if p.is(_East_Asian_Ambiguous) {
|
||||
return 2
|
||||
}
|
||||
if p.is(_East_Asian_Ambiguous|_Emoji) && !options.StrictEmojiNeutral {
|
||||
return 2
|
||||
}
|
||||
}
|
||||
|
||||
if p.is(_East_Asian_Full_Wide) {
|
||||
return 2
|
||||
}
|
||||
|
||||
// Default width for all other characters
|
||||
return defaultWidth
|
||||
}
|
||||
2
vendor/github.com/clipperhouse/stringish/.gitignore
generated
vendored
Normal file
2
vendor/github.com/clipperhouse/stringish/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
.DS_Store
|
||||
*.test
|
||||
21
vendor/github.com/clipperhouse/stringish/LICENSE
generated
vendored
Normal file
21
vendor/github.com/clipperhouse/stringish/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 Matt Sherman
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
64
vendor/github.com/clipperhouse/stringish/README.md
generated
vendored
Normal file
64
vendor/github.com/clipperhouse/stringish/README.md
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
# stringish
|
||||
|
||||
A small Go module that provides a generic type constraint for “string-like”
|
||||
data, and a utf8 package that works with both strings and byte slices
|
||||
without conversions.
|
||||
|
||||
```go
|
||||
type Interface interface {
|
||||
~[]byte | ~string
|
||||
}
|
||||
```
|
||||
|
||||
[](https://pkg.go.dev/github.com/clipperhouse/stringish/utf8)
|
||||
[](https://github.com/clipperhouse/stringish/actions/workflows/gotest.yml)
|
||||
|
||||
## Install
|
||||
|
||||
```
|
||||
go get github.com/clipperhouse/stringish
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
```go
|
||||
import (
|
||||
"github.com/clipperhouse/stringish"
|
||||
"github.com/clipperhouse/stringish/utf8"
|
||||
)
|
||||
|
||||
s := "Hello, 世界"
|
||||
r, size := utf8.DecodeRune(s) // not DecodeRuneInString 🎉
|
||||
|
||||
b := []byte("Hello, 世界")
|
||||
r, size = utf8.DecodeRune(b) // same API!
|
||||
|
||||
func MyFoo[T stringish.Interface](s T) T {
|
||||
// pass a string or a []byte
|
||||
// iterate, slice, transform, whatever
|
||||
}
|
||||
```
|
||||
|
||||
## Motivation
|
||||
|
||||
Sometimes we want APIs to accept `string` or `[]byte` without having to convert
|
||||
between those types. That conversion usually allocates!
|
||||
|
||||
By implementing with `stringish.Interface`, we can have a single API, and
|
||||
single implementation for both types: one `Foo` instead of `Foo` and
|
||||
`FooString`.
|
||||
|
||||
We have converted the
|
||||
[`unicode/utf8` package](https://github.com/clipperhouse/stringish/blob/main/utf8/utf8.go)
|
||||
as an example -- note the absence of`*InString` funcs. We might look at `x/text`
|
||||
next.
|
||||
|
||||
## Used by
|
||||
|
||||
- clipperhouse/uax29: [stringish trie](https://github.com/clipperhouse/uax29/blob/master/graphemes/trie.go#L27), [stringish iterator](https://github.com/clipperhouse/uax29/blob/master/internal/iterators/iterator.go#L9), [stringish SplitFunc](https://github.com/clipperhouse/uax29/blob/master/graphemes/splitfunc.go#L21)
|
||||
|
||||
- [clipperhouse/displaywidth](https://github.com/clipperhouse/displaywidth)
|
||||
|
||||
## Prior discussion
|
||||
|
||||
- [Consideration of similar by the Go team](https://github.com/golang/go/issues/48643)
|
||||
5
vendor/github.com/clipperhouse/stringish/interface.go
generated
vendored
Normal file
5
vendor/github.com/clipperhouse/stringish/interface.go
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
package stringish
|
||||
|
||||
type Interface interface {
|
||||
~[]byte | ~string
|
||||
}
|
||||
21
vendor/github.com/clipperhouse/uax29/v2/LICENSE
generated
vendored
Normal file
21
vendor/github.com/clipperhouse/uax29/v2/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2020 Matt Sherman
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
82
vendor/github.com/clipperhouse/uax29/v2/graphemes/README.md
generated
vendored
Normal file
82
vendor/github.com/clipperhouse/uax29/v2/graphemes/README.md
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
An implementation of grapheme cluster boundaries from [Unicode text segmentation](https://unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries) (UAX 29), for Unicode version 15.0.0.
|
||||
|
||||
## Quick start
|
||||
|
||||
```
|
||||
go get "github.com/clipperhouse/uax29/v2/graphemes"
|
||||
```
|
||||
|
||||
```go
|
||||
import "github.com/clipperhouse/uax29/v2/graphemes"
|
||||
|
||||
text := "Hello, 世界. Nice dog! 👍🐶"
|
||||
|
||||
tokens := graphemes.FromString(text)
|
||||
|
||||
for tokens.Next() { // Next() returns true until end of data
|
||||
fmt.Println(tokens.Value()) // Do something with the current grapheme
|
||||
}
|
||||
```
|
||||
|
||||
[](https://pkg.go.dev/github.com/clipperhouse/uax29/v2/graphemes)
|
||||
|
||||
_A grapheme is a “single visible character”, which might be a simple as a single letter, or a complex emoji that consists of several Unicode code points._
|
||||
|
||||
## Conformance
|
||||
|
||||
We use the Unicode [test suite](https://unicode.org/reports/tr41/tr41-26.html#Tests29). Status:
|
||||
|
||||

|
||||
|
||||
## APIs
|
||||
|
||||
### If you have a `string`
|
||||
|
||||
```go
|
||||
text := "Hello, 世界. Nice dog! 👍🐶"
|
||||
|
||||
tokens := graphemes.FromString(text)
|
||||
|
||||
for tokens.Next() { // Next() returns true until end of data
|
||||
fmt.Println(tokens.Value()) // Do something with the current grapheme
|
||||
}
|
||||
```
|
||||
|
||||
### If you have an `io.Reader`
|
||||
|
||||
`FromReader` embeds a [`bufio.Scanner`](https://pkg.go.dev/bufio#Scanner), so just use those methods.
|
||||
|
||||
```go
|
||||
r := getYourReader() // from a file or network maybe
|
||||
tokens := graphemes.FromReader(r)
|
||||
|
||||
for tokens.Scan() { // Scan() returns true until error or EOF
|
||||
fmt.Println(tokens.Text()) // Do something with the current grapheme
|
||||
}
|
||||
|
||||
if tokens.Err() != nil { // Check the error
|
||||
log.Fatal(tokens.Err())
|
||||
}
|
||||
```
|
||||
|
||||
### If you have a `[]byte`
|
||||
|
||||
```go
|
||||
b := []byte("Hello, 世界. Nice dog! 👍🐶")
|
||||
|
||||
tokens := graphemes.FromBytes(b)
|
||||
|
||||
for tokens.Next() { // Next() returns true until end of data
|
||||
fmt.Println(tokens.Value()) // Do something with the current grapheme
|
||||
}
|
||||
```
|
||||
|
||||
### Performance
|
||||
|
||||
On a Mac M2 laptop, we see around 200MB/s, or around 100 million graphemes per second. You should see ~constant memory, and no allocations.
|
||||
|
||||
### Invalid inputs
|
||||
|
||||
Invalid UTF-8 input is considered undefined behavior. We test to ensure that bad inputs will not cause pathological outcomes, such as a panic or infinite loop. Callers should expect “garbage-in, garbage-out”.
|
||||
|
||||
Your pipeline should probably include a call to [`utf8.Valid()`](https://pkg.go.dev/unicode/utf8#Valid).
|
||||
28
vendor/github.com/clipperhouse/uax29/v2/graphemes/iterator.go
generated
vendored
Normal file
28
vendor/github.com/clipperhouse/uax29/v2/graphemes/iterator.go
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
package graphemes
|
||||
|
||||
import "github.com/clipperhouse/uax29/v2/internal/iterators"
|
||||
|
||||
type Iterator[T iterators.Stringish] struct {
|
||||
*iterators.Iterator[T]
|
||||
}
|
||||
|
||||
var (
|
||||
splitFuncString = splitFunc[string]
|
||||
splitFuncBytes = splitFunc[[]byte]
|
||||
)
|
||||
|
||||
// FromString returns an iterator for the grapheme clusters in the input string.
|
||||
// Iterate while Next() is true, and access the grapheme via Value().
|
||||
func FromString(s string) Iterator[string] {
|
||||
return Iterator[string]{
|
||||
iterators.New(splitFuncString, s),
|
||||
}
|
||||
}
|
||||
|
||||
// FromBytes returns an iterator for the grapheme clusters in the input bytes.
|
||||
// Iterate while Next() is true, and access the grapheme via Value().
|
||||
func FromBytes(b []byte) Iterator[[]byte] {
|
||||
return Iterator[[]byte]{
|
||||
iterators.New(splitFuncBytes, b),
|
||||
}
|
||||
}
|
||||
25
vendor/github.com/clipperhouse/uax29/v2/graphemes/reader.go
generated
vendored
Normal file
25
vendor/github.com/clipperhouse/uax29/v2/graphemes/reader.go
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
// Package graphemes implements Unicode grapheme cluster boundaries: https://unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries
|
||||
package graphemes
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
)
|
||||
|
||||
type Scanner struct {
|
||||
*bufio.Scanner
|
||||
}
|
||||
|
||||
// FromReader returns a Scanner, to split graphemes per
|
||||
// https://unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries.
|
||||
//
|
||||
// It embeds a [bufio.Scanner], so you can use its methods.
|
||||
//
|
||||
// Iterate through graphemes by calling Scan() until false, then check Err().
|
||||
func FromReader(r io.Reader) *Scanner {
|
||||
sc := bufio.NewScanner(r)
|
||||
sc.Split(SplitFunc)
|
||||
return &Scanner{
|
||||
Scanner: sc,
|
||||
}
|
||||
}
|
||||
174
vendor/github.com/clipperhouse/uax29/v2/graphemes/splitfunc.go
generated
vendored
Normal file
174
vendor/github.com/clipperhouse/uax29/v2/graphemes/splitfunc.go
generated
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
package graphemes
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
|
||||
"github.com/clipperhouse/uax29/v2/internal/iterators"
|
||||
)
|
||||
|
||||
// is determines if lookup intersects propert(ies)
|
||||
func (lookup property) is(properties property) bool {
|
||||
return (lookup & properties) != 0
|
||||
}
|
||||
|
||||
const _Ignore = _Extend
|
||||
|
||||
// SplitFunc is a bufio.SplitFunc implementation of Unicode grapheme cluster segmentation, for use with bufio.Scanner.
|
||||
//
|
||||
// See https://unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries.
|
||||
var SplitFunc bufio.SplitFunc = splitFunc[[]byte]
|
||||
|
||||
func splitFunc[T iterators.Stringish](data T, atEOF bool) (advance int, token T, err error) {
|
||||
var empty T
|
||||
if len(data) == 0 {
|
||||
return 0, empty, nil
|
||||
}
|
||||
|
||||
// These vars are stateful across loop iterations
|
||||
var pos int
|
||||
var lastExIgnore property = 0 // "last excluding ignored categories"
|
||||
var lastLastExIgnore property = 0 // "last one before that"
|
||||
var regionalIndicatorCount int
|
||||
|
||||
// Rules are usually of the form Cat1 × Cat2; "current" refers to the first property
|
||||
// to the right of the ×, from which we look back or forward
|
||||
|
||||
current, w := lookup(data[pos:])
|
||||
if w == 0 {
|
||||
if !atEOF {
|
||||
// Rune extends past current data, request more
|
||||
return 0, empty, nil
|
||||
}
|
||||
pos = len(data)
|
||||
return pos, data[:pos], nil
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB1
|
||||
// Start of text always advances
|
||||
pos += w
|
||||
|
||||
for {
|
||||
eot := pos == len(data) // "end of text"
|
||||
|
||||
if eot {
|
||||
if !atEOF {
|
||||
// Token extends past current data, request more
|
||||
return 0, empty, nil
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB2
|
||||
break
|
||||
}
|
||||
|
||||
/*
|
||||
We've switched the evaluation order of GB1↓ and GB2↑. It's ok:
|
||||
because we've checked for len(data) at the top of this function,
|
||||
sot and eot are mutually exclusive, order doesn't matter.
|
||||
*/
|
||||
|
||||
// Rules are usually of the form Cat1 × Cat2; "current" refers to the first property
|
||||
// to the right of the ×, from which we look back or forward
|
||||
|
||||
// Remember previous properties to avoid lookups/lookbacks
|
||||
last := current
|
||||
if !last.is(_Ignore) {
|
||||
lastLastExIgnore = lastExIgnore
|
||||
lastExIgnore = last
|
||||
}
|
||||
|
||||
current, w = lookup(data[pos:])
|
||||
if w == 0 {
|
||||
if atEOF {
|
||||
// Just return the bytes, we can't do anything with them
|
||||
pos = len(data)
|
||||
break
|
||||
}
|
||||
// Rune extends past current data, request more
|
||||
return 0, empty, nil
|
||||
}
|
||||
|
||||
// Optimization: no rule can possibly apply
|
||||
if current|last == 0 { // i.e. both are zero
|
||||
break
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB3
|
||||
if current.is(_LF) && last.is(_CR) {
|
||||
pos += w
|
||||
continue
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB4
|
||||
// https://unicode.org/reports/tr29/#GB5
|
||||
if (current | last).is(_Control | _CR | _LF) {
|
||||
break
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB6
|
||||
if current.is(_L|_V|_LV|_LVT) && last.is(_L) {
|
||||
pos += w
|
||||
continue
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB7
|
||||
if current.is(_V|_T) && last.is(_LV|_V) {
|
||||
pos += w
|
||||
continue
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB8
|
||||
if current.is(_T) && last.is(_LVT|_T) {
|
||||
pos += w
|
||||
continue
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB9
|
||||
if current.is(_Extend | _ZWJ) {
|
||||
pos += w
|
||||
continue
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB9a
|
||||
if current.is(_SpacingMark) {
|
||||
pos += w
|
||||
continue
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB9b
|
||||
if last.is(_Prepend) {
|
||||
pos += w
|
||||
continue
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB9c
|
||||
// TODO(clipperhouse):
|
||||
// It appears to be added in Unicode 15.1.0:
|
||||
// https://unicode.org/versions/Unicode15.1.0/#Migration
|
||||
// This package currently supports Unicode 15.0.0, so
|
||||
// out of scope for now
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB11
|
||||
if current.is(_ExtendedPictographic) && last.is(_ZWJ) && lastLastExIgnore.is(_ExtendedPictographic) {
|
||||
pos += w
|
||||
continue
|
||||
}
|
||||
|
||||
// https://unicode.org/reports/tr29/#GB12
|
||||
// https://unicode.org/reports/tr29/#GB13
|
||||
if (current & last).is(_RegionalIndicator) {
|
||||
regionalIndicatorCount++
|
||||
|
||||
odd := regionalIndicatorCount%2 == 1
|
||||
if odd {
|
||||
pos += w
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// If we fall through all the above rules, it's a grapheme cluster break
|
||||
break
|
||||
}
|
||||
|
||||
// Return token
|
||||
return pos, data[:pos], nil
|
||||
}
|
||||
1409
vendor/github.com/clipperhouse/uax29/v2/graphemes/trie.go
generated
vendored
Normal file
1409
vendor/github.com/clipperhouse/uax29/v2/graphemes/trie.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
85
vendor/github.com/clipperhouse/uax29/v2/internal/iterators/iterator.go
generated
vendored
Normal file
85
vendor/github.com/clipperhouse/uax29/v2/internal/iterators/iterator.go
generated
vendored
Normal file
@@ -0,0 +1,85 @@
|
||||
package iterators
|
||||
|
||||
type Stringish interface {
|
||||
[]byte | string
|
||||
}
|
||||
|
||||
type SplitFunc[T Stringish] func(T, bool) (int, T, error)
|
||||
|
||||
// Iterator is a generic iterator for words that are either []byte or string.
|
||||
// Iterate while Next() is true, and access the word via Value().
|
||||
type Iterator[T Stringish] struct {
|
||||
split SplitFunc[T]
|
||||
data T
|
||||
start int
|
||||
pos int
|
||||
}
|
||||
|
||||
// New creates a new Iterator for the given data and SplitFunc.
|
||||
func New[T Stringish](split SplitFunc[T], data T) *Iterator[T] {
|
||||
return &Iterator[T]{
|
||||
split: split,
|
||||
data: data,
|
||||
}
|
||||
}
|
||||
|
||||
// SetText sets the text for the iterator to operate on, and resets all state.
|
||||
func (iter *Iterator[T]) SetText(data T) {
|
||||
iter.data = data
|
||||
iter.start = 0
|
||||
iter.pos = 0
|
||||
}
|
||||
|
||||
// Split sets the SplitFunc for the Iterator.
|
||||
func (iter *Iterator[T]) Split(split SplitFunc[T]) {
|
||||
iter.split = split
|
||||
}
|
||||
|
||||
// Next advances the iterator to the next token. It returns false when there
|
||||
// are no remaining tokens or an error occurred.
|
||||
func (iter *Iterator[T]) Next() bool {
|
||||
if iter.pos == len(iter.data) {
|
||||
return false
|
||||
}
|
||||
if iter.pos > len(iter.data) {
|
||||
panic("SplitFunc advanced beyond the end of the data")
|
||||
}
|
||||
|
||||
iter.start = iter.pos
|
||||
|
||||
advance, _, err := iter.split(iter.data[iter.pos:], true)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if advance <= 0 {
|
||||
panic("SplitFunc returned a zero or negative advance")
|
||||
}
|
||||
|
||||
iter.pos += advance
|
||||
if iter.pos > len(iter.data) {
|
||||
panic("SplitFunc advanced beyond the end of the data")
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// Value returns the current token.
|
||||
func (iter *Iterator[T]) Value() T {
|
||||
return iter.data[iter.start:iter.pos]
|
||||
}
|
||||
|
||||
// Start returns the byte position of the current token in the original data.
|
||||
func (iter *Iterator[T]) Start() int {
|
||||
return iter.start
|
||||
}
|
||||
|
||||
// End returns the byte position after the current token in the original data.
|
||||
func (iter *Iterator[T]) End() int {
|
||||
return iter.pos
|
||||
}
|
||||
|
||||
// Reset resets the iterator to the beginning of the data.
|
||||
func (iter *Iterator[T]) Reset() {
|
||||
iter.start = 0
|
||||
iter.pos = 0
|
||||
}
|
||||
43
vendor/github.com/mattn/go-runewidth/benchstat.txt
generated
vendored
Normal file
43
vendor/github.com/mattn/go-runewidth/benchstat.txt
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
goos: darwin
|
||||
goarch: arm64
|
||||
pkg: github.com/mattn/go-runewidth
|
||||
cpu: Apple M2
|
||||
│ old.txt │ new.txt │
|
||||
│ sec/op │ sec/op vs base │
|
||||
String1WidthAll/regular-8 108.92m ± 0% 35.09m ± 3% -67.78% (p=0.002 n=6)
|
||||
String1WidthAll/lut-8 93.97m ± 0% 18.70m ± 0% -80.10% (p=0.002 n=6)
|
||||
String1Width768/regular-8 60.62µ ± 1% 11.54µ ± 0% -80.97% (p=0.002 n=6)
|
||||
String1Width768/lut-8 60.66µ ± 1% 11.43µ ± 0% -81.16% (p=0.002 n=6)
|
||||
String1WidthAllEastAsian/regular-8 115.13m ± 1% 40.79m ± 8% -64.57% (p=0.002 n=6)
|
||||
String1WidthAllEastAsian/lut-8 93.65m ± 0% 18.70m ± 2% -80.03% (p=0.002 n=6)
|
||||
String1Width768EastAsian/regular-8 75.32µ ± 0% 23.49µ ± 0% -68.82% (p=0.002 n=6)
|
||||
String1Width768EastAsian/lut-8 60.76µ ± 0% 11.50µ ± 0% -81.07% (p=0.002 n=6)
|
||||
geomean 2.562m 604.5µ -76.41%
|
||||
|
||||
│ old.txt │ new.txt │
|
||||
│ B/op │ B/op vs base │
|
||||
String1WidthAll/regular-8 106.3Mi ± 0% 0.0Mi ± 0% -100.00% (p=0.002 n=6)
|
||||
String1WidthAll/lut-8 106.3Mi ± 0% 0.0Mi ± 0% -100.00% (p=0.002 n=6)
|
||||
String1Width768/regular-8 75.00Ki ± 0% 0.00Ki ± 0% -100.00% (p=0.002 n=6)
|
||||
String1Width768/lut-8 75.00Ki ± 0% 0.00Ki ± 0% -100.00% (p=0.002 n=6)
|
||||
String1WidthAllEastAsian/regular-8 106.3Mi ± 0% 0.0Mi ± 0% -100.00% (p=0.002 n=6)
|
||||
String1WidthAllEastAsian/lut-8 106.3Mi ± 0% 0.0Mi ± 0% -100.00% (p=0.002 n=6)
|
||||
String1Width768EastAsian/regular-8 75.00Ki ± 0% 0.00Ki ± 0% -100.00% (p=0.002 n=6)
|
||||
String1Width768EastAsian/lut-8 75.00Ki ± 0% 0.00Ki ± 0% -100.00% (p=0.002 n=6)
|
||||
geomean 2.790Mi ? ¹ ²
|
||||
¹ summaries must be >0 to compute geomean
|
||||
² ratios must be >0 to compute geomean
|
||||
|
||||
│ old.txt │ new.txt │
|
||||
│ allocs/op │ allocs/op vs base │
|
||||
String1WidthAll/regular-8 3.342M ± 0% 0.000M ± 0% -100.00% (p=0.002 n=6)
|
||||
String1WidthAll/lut-8 3.342M ± 0% 0.000M ± 0% -100.00% (p=0.002 n=6)
|
||||
String1Width768/regular-8 2.304k ± 0% 0.000k ± 0% -100.00% (p=0.002 n=6)
|
||||
String1Width768/lut-8 2.304k ± 0% 0.000k ± 0% -100.00% (p=0.002 n=6)
|
||||
String1WidthAllEastAsian/regular-8 3.342M ± 0% 0.000M ± 0% -100.00% (p=0.002 n=6)
|
||||
String1WidthAllEastAsian/lut-8 3.342M ± 0% 0.000M ± 0% -100.00% (p=0.002 n=6)
|
||||
String1Width768EastAsian/regular-8 2.304k ± 0% 0.000k ± 0% -100.00% (p=0.002 n=6)
|
||||
String1Width768EastAsian/lut-8 2.304k ± 0% 0.000k ± 0% -100.00% (p=0.002 n=6)
|
||||
geomean 87.75k ? ¹ ²
|
||||
¹ summaries must be >0 to compute geomean
|
||||
² ratios must be >0 to compute geomean
|
||||
54
vendor/github.com/mattn/go-runewidth/new.txt
generated
vendored
Normal file
54
vendor/github.com/mattn/go-runewidth/new.txt
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
goos: darwin
|
||||
goarch: arm64
|
||||
pkg: github.com/mattn/go-runewidth
|
||||
cpu: Apple M2
|
||||
BenchmarkString1WidthAll/regular-8 33 35033923 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 33 34965112 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 33 36307234 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 33 35007705 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 33 35154182 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 34 35155400 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 63 18688500 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 63 18712474 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 63 18700211 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 62 18694179 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 62 18708392 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 63 18770608 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/regular-8 104137 11526 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/regular-8 103986 11540 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/regular-8 104079 11552 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/regular-8 103963 11530 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/regular-8 103714 11538 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/regular-8 104181 11537 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/lut-8 105150 11420 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/lut-8 104778 11423 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/lut-8 105069 11422 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/lut-8 105127 11475 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/lut-8 104742 11433 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768/lut-8 105163 11432 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 28 40723347 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 28 40790299 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 28 40801338 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 28 40798216 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 28 44135253 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 28 40779546 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 62 18694165 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 62 18685047 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 62 18689273 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 62 19150346 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 63 19126154 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 62 18712619 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 50775 23595 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 51061 23563 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 51057 23492 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 51138 23445 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 51195 23469 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 51087 23482 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 104559 11549 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 104508 11483 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 104296 11503 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 104606 11485 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 104588 11495 ns/op 0 B/op 0 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 104602 11518 ns/op 0 B/op 0 allocs/op
|
||||
PASS
|
||||
ok github.com/mattn/go-runewidth 64.455s
|
||||
54
vendor/github.com/mattn/go-runewidth/old.txt
generated
vendored
Normal file
54
vendor/github.com/mattn/go-runewidth/old.txt
generated
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
goos: darwin
|
||||
goarch: arm64
|
||||
pkg: github.com/mattn/go-runewidth
|
||||
cpu: Apple M2
|
||||
BenchmarkString1WidthAll/regular-8 10 108559258 ns/op 111412145 B/op 3342342 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 10 108968079 ns/op 111412364 B/op 3342343 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 10 108890338 ns/op 111412388 B/op 3342344 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 10 108940704 ns/op 111412584 B/op 3342346 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 10 108632796 ns/op 111412348 B/op 3342343 allocs/op
|
||||
BenchmarkString1WidthAll/regular-8 10 109354546 ns/op 111412777 B/op 3342343 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 12 93844406 ns/op 111412569 B/op 3342345 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 12 93991080 ns/op 111412512 B/op 3342344 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 12 93980632 ns/op 111412413 B/op 3342343 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 12 94004083 ns/op 111412396 B/op 3342343 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 12 93959795 ns/op 111412445 B/op 3342343 allocs/op
|
||||
BenchmarkString1WidthAll/lut-8 12 93846198 ns/op 111412556 B/op 3342345 allocs/op
|
||||
BenchmarkString1Width768/regular-8 19785 60696 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/regular-8 19824 60520 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/regular-8 19832 60547 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/regular-8 19778 60543 ns/op 76800 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/regular-8 19842 61142 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/regular-8 19780 60696 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/lut-8 19598 61161 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/lut-8 19731 60707 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/lut-8 19738 60626 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/lut-8 19764 60670 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/lut-8 19797 60642 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768/lut-8 19738 60608 ns/op 76800 B/op 2304 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 9 115080431 ns/op 111412458 B/op 3342345 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 9 114908880 ns/op 111412476 B/op 3342345 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 9 115077134 ns/op 111412540 B/op 3342345 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 9 115175292 ns/op 111412467 B/op 3342345 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 9 115792653 ns/op 111412362 B/op 3342344 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/regular-8 9 115255417 ns/op 111412572 B/op 3342346 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 12 93761542 ns/op 111412538 B/op 3342345 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 12 94089990 ns/op 111412440 B/op 3342343 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 12 93721410 ns/op 111412514 B/op 3342344 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 12 93572951 ns/op 111412329 B/op 3342342 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 12 93536052 ns/op 111412206 B/op 3342341 allocs/op
|
||||
BenchmarkString1WidthAllEastAsian/lut-8 12 93532365 ns/op 111412412 B/op 3342343 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 15904 75401 ns/op 76800 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 15932 75449 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 15944 75181 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 15963 75311 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 15879 75292 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/regular-8 15955 75334 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 19692 60692 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 19712 60699 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 19741 60819 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 19771 60653 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 19737 61027 ns/op 76801 B/op 2304 allocs/op
|
||||
BenchmarkString1Width768EastAsian/lut-8 19657 60820 ns/op 76801 B/op 2304 allocs/op
|
||||
PASS
|
||||
ok github.com/mattn/go-runewidth 76.165s
|
||||
23
vendor/github.com/mattn/go-runewidth/runewidth.go
generated
vendored
23
vendor/github.com/mattn/go-runewidth/runewidth.go
generated
vendored
@@ -4,7 +4,7 @@ import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/rivo/uniseg"
|
||||
"github.com/clipperhouse/uax29/v2/graphemes"
|
||||
)
|
||||
|
||||
//go:generate go run script/generate.go
|
||||
@@ -64,6 +64,9 @@ func inTable(r rune, t table) bool {
|
||||
if r < t[0].first {
|
||||
return false
|
||||
}
|
||||
if r > t[len(t)-1].last {
|
||||
return false
|
||||
}
|
||||
|
||||
bot := 0
|
||||
top := len(t) - 1
|
||||
@@ -175,10 +178,10 @@ func (c *Condition) CreateLUT() {
|
||||
|
||||
// StringWidth return width as you can see
|
||||
func (c *Condition) StringWidth(s string) (width int) {
|
||||
g := uniseg.NewGraphemes(s)
|
||||
g := graphemes.FromString(s)
|
||||
for g.Next() {
|
||||
var chWidth int
|
||||
for _, r := range g.Runes() {
|
||||
for _, r := range g.Value() {
|
||||
chWidth = c.RuneWidth(r)
|
||||
if chWidth > 0 {
|
||||
break // Our best guess at this point is to use the width of the first non-zero-width rune.
|
||||
@@ -197,17 +200,17 @@ func (c *Condition) Truncate(s string, w int, tail string) string {
|
||||
w -= c.StringWidth(tail)
|
||||
var width int
|
||||
pos := len(s)
|
||||
g := uniseg.NewGraphemes(s)
|
||||
g := graphemes.FromString(s)
|
||||
for g.Next() {
|
||||
var chWidth int
|
||||
for _, r := range g.Runes() {
|
||||
for _, r := range g.Value() {
|
||||
chWidth = c.RuneWidth(r)
|
||||
if chWidth > 0 {
|
||||
break // See StringWidth() for details.
|
||||
}
|
||||
}
|
||||
if width+chWidth > w {
|
||||
pos, _ = g.Positions()
|
||||
pos = g.Start()
|
||||
break
|
||||
}
|
||||
width += chWidth
|
||||
@@ -224,10 +227,10 @@ func (c *Condition) TruncateLeft(s string, w int, prefix string) string {
|
||||
var width int
|
||||
pos := len(s)
|
||||
|
||||
g := uniseg.NewGraphemes(s)
|
||||
g := graphemes.FromString(s)
|
||||
for g.Next() {
|
||||
var chWidth int
|
||||
for _, r := range g.Runes() {
|
||||
for _, r := range g.Value() {
|
||||
chWidth = c.RuneWidth(r)
|
||||
if chWidth > 0 {
|
||||
break // See StringWidth() for details.
|
||||
@@ -236,10 +239,10 @@ func (c *Condition) TruncateLeft(s string, w int, prefix string) string {
|
||||
|
||||
if width+chWidth > w {
|
||||
if width < w {
|
||||
_, pos = g.Positions()
|
||||
pos = g.End()
|
||||
prefix += strings.Repeat(" ", width+chWidth-w)
|
||||
} else {
|
||||
pos, _ = g.Positions()
|
||||
pos = g.Start()
|
||||
}
|
||||
|
||||
break
|
||||
|
||||
6
vendor/github.com/mattn/go-runewidth/runewidth_windows.go
generated
vendored
6
vendor/github.com/mattn/go-runewidth/runewidth_windows.go
generated
vendored
@@ -4,6 +4,7 @@
|
||||
package runewidth
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
@@ -14,6 +15,11 @@ var (
|
||||
|
||||
// IsEastAsian return true if the current locale is CJK
|
||||
func IsEastAsian() bool {
|
||||
if os.Getenv("WT_SESSION") != "" {
|
||||
// Windows Terminal always not use East Asian Ambiguous Width(s).
|
||||
return false
|
||||
}
|
||||
|
||||
r1, _, _ := procGetConsoleOutputCP.Call()
|
||||
if r1 == 0 {
|
||||
return false
|
||||
|
||||
3
vendor/github.com/olekukonko/cat/.gitignore
generated
vendored
Normal file
3
vendor/github.com/olekukonko/cat/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
.idea
|
||||
.github
|
||||
lab
|
||||
21
vendor/github.com/olekukonko/cat/LICENSE
generated
vendored
Normal file
21
vendor/github.com/olekukonko/cat/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 Oleku Konko
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
168
vendor/github.com/olekukonko/cat/README.md
generated
vendored
Normal file
168
vendor/github.com/olekukonko/cat/README.md
generated
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
# 🐱 `cat` - The Fast & Fluent String Concatenation Library for Go
|
||||
|
||||
> **"Because building strings shouldn't feel like herding cats"** 😼
|
||||
|
||||
## Why `cat`?
|
||||
|
||||
Go's `strings.Builder` is great, but building complex strings often feels clunky. `cat` makes string concatenation:
|
||||
|
||||
- **Faster** - Optimized paths for common types, zero-allocation conversions
|
||||
- **Fluent** - Chainable methods for beautiful, readable code
|
||||
- **Flexible** - Handles any type, nested structures, and custom formatting
|
||||
- **Smart** - Automatic pooling, size estimation, and separator handling
|
||||
|
||||
```go
|
||||
// Without cat
|
||||
var b strings.Builder
|
||||
b.WriteString("Hello, ")
|
||||
b.WriteString(user.Name)
|
||||
b.WriteString("! You have ")
|
||||
b.WriteString(strconv.Itoa(count))
|
||||
b.WriteString(" new messages.")
|
||||
result := b.String()
|
||||
|
||||
// With cat
|
||||
result := cat.Concat("Hello, ", user.Name, "! You have ", count, " new messages.")
|
||||
```
|
||||
|
||||
## 🔥 Hot Features
|
||||
|
||||
### 1. Fluent Builder API
|
||||
|
||||
Build strings like a boss with method chaining:
|
||||
|
||||
```go
|
||||
s := cat.New(", ").
|
||||
Add("apple").
|
||||
If(user.IsVIP, "golden kiwi").
|
||||
Add("orange").
|
||||
Sep(" | "). // Change separator mid-way
|
||||
Add("banana").
|
||||
String()
|
||||
// "apple, golden kiwi, orange | banana"
|
||||
```
|
||||
|
||||
### 2. Zero-Allocation Magic
|
||||
|
||||
- **Pooled builders** (optional) reduce GC pressure
|
||||
- **Unsafe byte conversions** (opt-in) avoid `[]byte`→`string` copies
|
||||
- **Stack buffers** for numbers instead of heap allocations
|
||||
|
||||
```go
|
||||
// Enable performance features
|
||||
cat.Pool(true) // Builder pooling
|
||||
cat.SetUnsafeBytes(true) // Zero-copy []byte conversion
|
||||
```
|
||||
|
||||
### 3. Handles Any Type - Even Nested Ones!
|
||||
|
||||
No more manual type conversions:
|
||||
|
||||
```go
|
||||
data := map[string]any{
|
||||
"id": 12345,
|
||||
"tags": []string{"go", "fast", "efficient"},
|
||||
}
|
||||
|
||||
fmt.Println(cat.JSONPretty(data))
|
||||
// {
|
||||
// "id": 12345,
|
||||
// "tags": ["go", "fast", "efficient"]
|
||||
// }
|
||||
```
|
||||
|
||||
### 4. Concatenation for Every Use Case
|
||||
|
||||
```go
|
||||
// Simple joins
|
||||
cat.With(", ", "apple", "banana", "cherry") // "apple, banana, cherry"
|
||||
|
||||
// File paths
|
||||
cat.Path("dir", "sub", "file.txt") // "dir/sub/file.txt"
|
||||
|
||||
// CSV
|
||||
cat.CSV(1, 2, 3) // "1,2,3"
|
||||
|
||||
// Conditional elements
|
||||
cat.Start("Hello").If(user != nil, " ", user.Name) // "Hello" or "Hello Alice"
|
||||
|
||||
// Repeated patterns
|
||||
cat.RepeatWith("-+", "X", 3) // "X-+X-+X"
|
||||
```
|
||||
|
||||
### 5. Smarter Than Your Average String Lib
|
||||
|
||||
```go
|
||||
// Automatic nesting handling
|
||||
nested := []any{"a", []any{"b", "c"}, "d"}
|
||||
cat.FlattenWith(",", nested) // "a,b,c,d"
|
||||
|
||||
// Precise size estimation (minimizes allocations)
|
||||
b := cat.New(", ").Grow(estimatedSize) // Preallocate exactly what you need
|
||||
|
||||
// Reflection support for any type
|
||||
cat.Reflect(anyComplexStruct) // "{Field1:value Field2:[1 2 3]}"
|
||||
```
|
||||
|
||||
## 🚀 Getting Started
|
||||
|
||||
```bash
|
||||
go get github.com/your-repo/cat
|
||||
```
|
||||
|
||||
```go
|
||||
import "github.com/your-repo/cat"
|
||||
|
||||
func main() {
|
||||
// Simple concatenation
|
||||
msg := cat.Concat("User ", userID, " has ", count, " items")
|
||||
|
||||
// Pooled builder (for high-performance loops)
|
||||
builder := cat.New(", ")
|
||||
defer builder.Release() // Return to pool
|
||||
result := builder.Add(items...).String()
|
||||
}
|
||||
```
|
||||
|
||||
## 🤔 Why Not Just Use...?
|
||||
|
||||
- `fmt.Sprintf` - Slow, many allocations
|
||||
- `strings.Join` - Only works with strings
|
||||
- `bytes.Buffer` - No separator support, manual type handling
|
||||
- `string +` - Even worse performance, especially in loops
|
||||
|
||||
## 💡 Pro Tips
|
||||
|
||||
1. **Enable pooling** in high-throughput scenarios
|
||||
2. **Preallocate** with `.Grow()` when you know the final size
|
||||
3. Use **`If()`** for conditional elements in fluent chains
|
||||
4. Try **`SetUnsafeBytes(true)`** if you can guarantee byte slices won't mutate
|
||||
5. **Release builders** when pooling is enabled
|
||||
|
||||
## 🐱👤 Advanced Usage
|
||||
|
||||
```go
|
||||
// Custom value formatting
|
||||
type User struct {
|
||||
Name string
|
||||
Age int
|
||||
}
|
||||
|
||||
func (u User) String() string {
|
||||
return cat.With(" ", u.Name, cat.Wrap("(", u.Age, ")"))
|
||||
}
|
||||
|
||||
// JSON-like output
|
||||
func JSONPretty(v any) string {
|
||||
return cat.WrapWith(",\n ", "{\n ", "\n}", prettyFields(v))
|
||||
}
|
||||
```
|
||||
|
||||
```text
|
||||
/\_/\
|
||||
( o.o ) > Concatenate with purr-fection!
|
||||
> ^ <
|
||||
|
||||
```
|
||||
|
||||
**`cat`** - Because life's too short for ugly string building code. 😻
|
||||
124
vendor/github.com/olekukonko/cat/builder.go
generated
vendored
Normal file
124
vendor/github.com/olekukonko/cat/builder.go
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
package cat
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Builder is a fluent concatenation helper. It is safe for concurrent use by
|
||||
// multiple goroutines only if each goroutine uses a distinct *Builder.
|
||||
// If pooling is enabled via Pool(true), call Release() when done.
|
||||
// The Builder uses an internal strings.Builder for efficient string concatenation
|
||||
// and manages a separator that is inserted between added values.
|
||||
// It supports chaining methods for a fluent API style.
|
||||
type Builder struct {
|
||||
buf strings.Builder
|
||||
sep string
|
||||
needsSep bool
|
||||
}
|
||||
|
||||
// New begins a new Builder with a separator. If pooling is enabled,
|
||||
// the Builder is reused and MUST be released with b.Release() when done.
|
||||
// If sep is empty, uses DefaultSep().
|
||||
// Optional initial arguments x are added immediately after creation.
|
||||
// Pooling is controlled globally via Pool(true/false); when enabled, Builders
|
||||
// are recycled to reduce allocations in high-throughput scenarios.
|
||||
func New(sep string, x ...any) *Builder {
|
||||
var b *Builder
|
||||
if poolEnabled.Load() {
|
||||
b = builderPool.Get().(*Builder)
|
||||
b.buf.Reset()
|
||||
b.sep = sep
|
||||
b.needsSep = false
|
||||
} else {
|
||||
b = &Builder{sep: sep}
|
||||
}
|
||||
|
||||
// Process initial arguments *after* the builder is prepared.
|
||||
if len(x) > 0 {
|
||||
b.Add(x...)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// Start begins a new Builder with no separator (using an empty string as sep).
|
||||
// It is a convenience function that wraps New(empty, x...), where empty is a constant empty string.
|
||||
// This allows starting a concatenation without any separator between initial or subsequent additions.
|
||||
// If pooling is enabled via Pool(true), the returned Builder MUST be released with b.Release() when done.
|
||||
// Optional variadic arguments x are passed directly to New and added immediately after creation.
|
||||
// Useful for fluent chains where no default separator is desired from the start.
|
||||
func Start(x ...any) *Builder {
|
||||
return New(empty, x...)
|
||||
}
|
||||
|
||||
// Grow pre-sizes the internal buffer.
|
||||
// This can be used to preallocate capacity based on an estimated total size,
|
||||
// reducing reallocations during subsequent Add calls.
|
||||
// It chains, returning the Builder for fluent use.
|
||||
func (b *Builder) Grow(n int) *Builder { b.buf.Grow(n); return b }
|
||||
|
||||
// Add appends values to the builder.
|
||||
// It inserts the current separator before each new value if needed (i.e., after the first addition).
|
||||
// Values are converted to strings using the optimized write function, which handles
|
||||
// common types efficiently without allocations where possible.
|
||||
// Supports any number of arguments of any type.
|
||||
// Chains, returning the Builder for fluent use.
|
||||
func (b *Builder) Add(args ...any) *Builder {
|
||||
for _, arg := range args {
|
||||
if b.needsSep && b.sep != empty {
|
||||
b.buf.WriteString(b.sep)
|
||||
}
|
||||
write(&b.buf, arg)
|
||||
b.needsSep = true
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// If appends values to the builder only if the condition is true.
|
||||
// Behaves like Add when condition is true; does nothing otherwise.
|
||||
// Useful for conditional concatenation in chains.
|
||||
// Chains, returning the Builder for fluent use.
|
||||
func (b *Builder) If(condition bool, args ...any) *Builder {
|
||||
if condition {
|
||||
b.Add(args...)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// Sep changes the separator for subsequent additions.
|
||||
// Future Add calls will use this new separator.
|
||||
// Does not affect already added content.
|
||||
// If sep is empty, no separator will be added between future values.
|
||||
// Chains, returning the Builder for fluent use.
|
||||
func (b *Builder) Sep(sep string) *Builder { b.sep = sep; return b }
|
||||
|
||||
// String returns the concatenated result.
|
||||
// This does not release the Builder; if pooling is enabled, call Release separately
|
||||
// if you are done with the Builder.
|
||||
// Can be called multiple times; the internal buffer remains unchanged.
|
||||
func (b *Builder) String() string { return b.buf.String() }
|
||||
|
||||
// Output returns the concatenated result and releases the Builder if pooling is enabled.
|
||||
// This is a convenience method to get the string and clean up in one call.
|
||||
// After Output, the Builder should not be used further if pooled, as it may be recycled.
|
||||
// If pooling is disabled, it behaves like String without release.
|
||||
func (b *Builder) Output() string {
|
||||
out := b.buf.String()
|
||||
b.Release() // Release takes care of the poolEnabled check
|
||||
return out
|
||||
}
|
||||
|
||||
// Release returns the Builder to the pool if pooling is enabled.
|
||||
// You should call this exactly once per New() when Pool(true) is active.
|
||||
// Resets the internal state (buffer, separator, needsSep) before pooling to avoid
|
||||
// retaining data or large allocations.
|
||||
// If pooling is disabled, this is a no-op.
|
||||
// Safe to call multiple times, but typically called once at the end of use.
|
||||
func (b *Builder) Release() {
|
||||
if poolEnabled.Load() {
|
||||
// Avoid retaining large buffers.
|
||||
b.buf.Reset()
|
||||
b.sep = empty
|
||||
b.needsSep = false
|
||||
builderPool.Put(b)
|
||||
}
|
||||
}
|
||||
117
vendor/github.com/olekukonko/cat/cat.go
generated
vendored
Normal file
117
vendor/github.com/olekukonko/cat/cat.go
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
// Package cat provides efficient and flexible string concatenation utilities.
|
||||
// It includes optimized functions for concatenating various types, builders for fluent chaining,
|
||||
// and configuration options for defaults, pooling, and unsafe optimizations.
|
||||
// The package aims to minimize allocations and improve performance in string building scenarios.
|
||||
package cat
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
)
|
||||
|
||||
// Constants used throughout the package for separators, defaults, and configuration.
|
||||
// These include common string literals for separators, empty strings, and special representations,
|
||||
// as well as limits like recursion depth. Defining them as constants allows for compile-time
|
||||
// optimizations, readability, and consistent usage in functions like Space, Path, CSV, and reflection handlers.
|
||||
// cat.go (updated constants section)
|
||||
const (
|
||||
empty = "" // Empty string constant, used for checks and defaults.
|
||||
space = " " // Single space, default separator.
|
||||
slash = "/" // Forward slash, for paths.
|
||||
dot = "." // Period, for extensions or decimals.
|
||||
comma = "," // Comma, for CSV or lists.
|
||||
equal = "=" // Equals, for comparisons.
|
||||
newline = "\n" // Newline, for multi-line strings.
|
||||
|
||||
// SQL-specific constants
|
||||
and = "AND" // AND operator, for SQL conditions.
|
||||
inOpen = " IN (" // Opening for SQL IN clause
|
||||
inClose = ")" // Closing for SQL IN clause
|
||||
asSQL = " AS " // SQL AS for aliasing
|
||||
count = "COUNT(" // SQL COUNT function prefix
|
||||
sum = "SUM(" // SQL SUM function prefix
|
||||
avg = "AVG(" // SQL AVG function prefix
|
||||
maxOpen = "MAX(" // SQL MAX function prefix
|
||||
minOpen = "MIN(" // SQL MIN function prefix
|
||||
caseSQL = "CASE " // SQL CASE keyword
|
||||
when = "WHEN " // SQL WHEN clause
|
||||
then = " THEN " // SQL THEN clause
|
||||
elseSQL = " ELSE " // SQL ELSE clause
|
||||
end = " END" // SQL END for CASE
|
||||
countAll = "COUNT(*)" // SQL COUNT(*) for all rows
|
||||
parenOpen = "(" // Opening parenthesis
|
||||
parenClose = ")" // Closing parenthesis
|
||||
|
||||
maxRecursionDepth = 32 // Maximum recursion depth for nested structure handling.
|
||||
nilString = "<nil>" // String representation for nil values.
|
||||
unexportedString = "<?>" // Placeholder for unexported fields.
|
||||
)
|
||||
|
||||
// Numeric is a generic constraint interface for numeric types.
|
||||
// It includes all signed/unsigned integers and floats.
|
||||
// Used in generic functions like Number and NumberWith to constrain to numbers.
|
||||
type Numeric interface {
|
||||
~int | ~int8 | ~int16 | ~int32 | ~int64 | ~uint | ~uint8 | ~uint16 | ~uint32 | ~uint64 | ~float32 | ~float64
|
||||
}
|
||||
|
||||
// poolEnabled controls whether New() reuses Builder instances from a pool.
|
||||
// Atomic.Bool for thread-safe toggle.
|
||||
// When true, Builders from New must be Released to avoid leaks.
|
||||
var poolEnabled atomic.Bool
|
||||
|
||||
// builderPool stores reusable *Builder to reduce GC pressure on hot paths.
|
||||
// Uses sync.Pool for efficient allocation/reuse.
|
||||
// New func creates a fresh &Builder when pool is empty.
|
||||
var builderPool = sync.Pool{
|
||||
New: func() any { return &Builder{} },
|
||||
}
|
||||
|
||||
// Pool enables or disables Builder pooling for New()/Release().
|
||||
// When enabled, you MUST call b.Release() after b.String() to return it.
|
||||
// Thread-safe via atomic.Store.
|
||||
// Enable for high-throughput scenarios to reduce allocations.
|
||||
func Pool(enable bool) { poolEnabled.Store(enable) }
|
||||
|
||||
// unsafeBytesFlag controls zero-copy []byte -> string behavior via atomics.
|
||||
// Int32 used for atomic operations: 1 = enabled, 0 = disabled.
|
||||
// Affects bytesToString function for zero-copy conversions using unsafe.
|
||||
var unsafeBytesFlag atomic.Int32 // 1 = true, 0 = false
|
||||
|
||||
// SetUnsafeBytes toggles zero-copy []byte -> string conversions globally.
|
||||
// When enabled, bytesToString uses unsafe.String for zero-allocation conversion.
|
||||
// Thread-safe via atomic.Store.
|
||||
// Use with caution: assumes the byte slice is not modified after conversion.
|
||||
// Compatible with Go 1.20+; fallback to string(bts) if disabled.
|
||||
func SetUnsafeBytes(enable bool) {
|
||||
if enable {
|
||||
unsafeBytesFlag.Store(1)
|
||||
} else {
|
||||
unsafeBytesFlag.Store(0)
|
||||
}
|
||||
}
|
||||
|
||||
// IsUnsafeBytes reports whether zero-copy []byte -> string is enabled.
|
||||
// Thread-safe via atomic.Load.
|
||||
// Returns true if flag is 1, false otherwise.
|
||||
// Useful for checking current configuration.
|
||||
func IsUnsafeBytes() bool { return unsafeBytesFlag.Load() == 1 }
|
||||
|
||||
// deterministicMaps controls whether map keys are sorted for deterministic output in string conversions.
|
||||
// It uses atomic.Bool for thread-safe access.
|
||||
var deterministicMaps atomic.Bool
|
||||
|
||||
// SetDeterministicMaps controls whether map keys are sorted for deterministic output
|
||||
// in reflection-based handling (e.g., in writeReflect for maps).
|
||||
// When enabled, keys are sorted using a string-based comparison for consistent string representations.
|
||||
// Thread-safe via atomic.Store.
|
||||
// Useful for reproducible outputs in testing or logging.
|
||||
func SetDeterministicMaps(enable bool) {
|
||||
deterministicMaps.Store(enable)
|
||||
}
|
||||
|
||||
// IsDeterministicMaps returns current map sorting setting.
|
||||
// Thread-safe via atomic.Load.
|
||||
// Returns true if deterministic sorting is enabled, false otherwise.
|
||||
func IsDeterministicMaps() bool {
|
||||
return deterministicMaps.Load()
|
||||
}
|
||||
590
vendor/github.com/olekukonko/cat/concat.go
generated
vendored
Normal file
590
vendor/github.com/olekukonko/cat/concat.go
generated
vendored
Normal file
@@ -0,0 +1,590 @@
|
||||
package cat
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Append appends args to dst and returns the grown slice.
|
||||
// Callers can reuse dst across calls to amortize allocs.
|
||||
// It uses an internal Builder for efficient concatenation of the args (no separators),
|
||||
// then appends the result to the dst byte slice.
|
||||
// Preallocates based on a size estimate to minimize reallocations.
|
||||
// Benefits from Builder pooling if enabled.
|
||||
// Useful for building byte slices incrementally without separators.
|
||||
func Append(dst []byte, args ...any) []byte {
|
||||
return AppendWith(empty, dst, args...)
|
||||
}
|
||||
|
||||
// AppendWith appends args to dst and returns the grown slice.
|
||||
// Callers can reuse dst across calls to amortize allocs.
|
||||
// Similar to Append, but inserts the specified sep between each arg.
|
||||
// Preallocates based on a size estimate including separators.
|
||||
// Benefits from Builder pooling if enabled.
|
||||
// Useful for building byte slices incrementally with custom separators.
|
||||
func AppendWith(sep string, dst []byte, args ...any) []byte {
|
||||
if len(args) == 0 {
|
||||
return dst
|
||||
}
|
||||
b := New(sep)
|
||||
b.Grow(estimateWith(sep, args))
|
||||
b.Add(args...)
|
||||
out := b.Output()
|
||||
return append(dst, out...)
|
||||
}
|
||||
|
||||
// AppendBytes joins byte slices without separators.
|
||||
// Only for compatibility with low-level byte processing.
|
||||
// Directly appends each []byte arg to dst without any conversion or separators.
|
||||
// Efficient for pure byte concatenation; no allocations if dst has capacity.
|
||||
// Returns the extended dst slice.
|
||||
// Does not use Builder, as it's simple append operations.
|
||||
func AppendBytes(dst []byte, args ...[]byte) []byte {
|
||||
if len(args) == 0 {
|
||||
return dst
|
||||
}
|
||||
for _, b := range args {
|
||||
dst = append(dst, b...)
|
||||
}
|
||||
return dst
|
||||
}
|
||||
|
||||
// AppendTo writes arguments to an existing strings.Builder.
|
||||
// More efficient than creating new builders.
|
||||
// Appends each arg to the provided strings.Builder using the optimized write function.
|
||||
// No separators are added; for direct concatenation.
|
||||
// Useful when you already have a strings.Builder and want to add more values efficiently.
|
||||
// Does not use cat.Builder, as it appends to an existing strings.Builder.
|
||||
func AppendTo(b *strings.Builder, args ...any) {
|
||||
for _, arg := range args {
|
||||
write(b, arg)
|
||||
}
|
||||
}
|
||||
|
||||
// AppendStrings writes strings to an existing strings.Builder.
|
||||
// Directly writes each string arg to the provided strings.Builder.
|
||||
// No type checks or conversions; assumes all args are strings.
|
||||
// Efficient for appending known strings without separators.
|
||||
// Does not use cat.Builder, as it appends to an existing strings.Builder.
|
||||
func AppendStrings(b *strings.Builder, ss ...string) {
|
||||
for _, s := range ss {
|
||||
b.WriteString(s)
|
||||
}
|
||||
}
|
||||
|
||||
// Between concatenates values wrapped between x and y (no separator between args).
|
||||
// Equivalent to BetweenWith with an empty separator.
|
||||
func Between(x, y any, args ...any) string {
|
||||
return BetweenWith(empty, x, y, args...)
|
||||
}
|
||||
|
||||
// BetweenWith concatenates values wrapped between x and y, using sep between x, args, and y.
|
||||
// Uses a pooled Builder if enabled; releases it after use.
|
||||
// Equivalent to With(sep, x, args..., y).
|
||||
func BetweenWith(sep string, x, y any, args ...any) string {
|
||||
b := New(sep)
|
||||
// Estimate size for all parts to avoid re-allocation.
|
||||
b.Grow(estimate([]any{x, y}) + estimateWith(sep, args))
|
||||
|
||||
b.Add(x)
|
||||
b.Add(args...)
|
||||
b.Add(y)
|
||||
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// CSV joins arguments with "," separators (no space).
|
||||
// Convenience wrapper for With using a comma as separator.
|
||||
// Useful for simple CSV string generation without spaces.
|
||||
func CSV(args ...any) string { return With(comma, args...) }
|
||||
|
||||
// Comma joins arguments with ", " separators.
|
||||
// Convenience wrapper for With using ", " as separator.
|
||||
// Useful for human-readable lists with comma and space.
|
||||
func Comma(args ...any) string { return With(comma+space, args...) }
|
||||
|
||||
// Concat concatenates any values (no separators).
|
||||
// Usage: cat.Concat("a", 1, true) → "a1true"
|
||||
// Equivalent to With with an empty separator.
|
||||
func Concat(args ...any) string {
|
||||
return With(empty, args...)
|
||||
}
|
||||
|
||||
// ConcatWith concatenates any values with separator.
|
||||
// Alias for With; joins args with the provided sep.
|
||||
func ConcatWith(sep string, args ...any) string {
|
||||
return With(sep, args...)
|
||||
}
|
||||
|
||||
// Flatten joins nested values into a single concatenation using empty.
|
||||
// Convenience for FlattenWith using empty.
|
||||
func Flatten(args ...any) string {
|
||||
return FlattenWith(empty, args...)
|
||||
}
|
||||
|
||||
// FlattenWith joins nested values into a single concatenation with sep, avoiding
|
||||
// intermediate slice allocations where possible.
|
||||
// It recursively flattens any nested []any arguments, concatenating all leaf items
|
||||
// with sep between them. Skips empty nested slices to avoid extra separators.
|
||||
// Leaf items (non-slices) are converted using the optimized write function.
|
||||
// Uses a pooled Builder if enabled; releases it after use.
|
||||
// Preallocates based on a recursive estimate for efficiency.
|
||||
// Example: FlattenWith(",", 1, []any{2, []any{3,4}}, 5) → "1,2,3,4,5"
|
||||
func FlattenWith(sep string, args ...any) string {
|
||||
if len(args) == 0 {
|
||||
return empty
|
||||
}
|
||||
|
||||
// Recursive estimate for preallocation.
|
||||
totalSize := recursiveEstimate(sep, args)
|
||||
|
||||
b := New(sep)
|
||||
b.Grow(totalSize)
|
||||
recursiveAdd(b, args)
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// Group joins multiple groups with empty between groups (no intra-group separators).
|
||||
// Convenience for GroupWith using empty.
|
||||
func Group(groups ...[]any) string {
|
||||
return GroupWith(empty, groups...)
|
||||
}
|
||||
|
||||
// GroupWith joins multiple groups with a separator between groups (no intra-group separators).
|
||||
// Concatenates each group internally without separators, then joins non-empty groups with sep.
|
||||
// Preestimates total size for allocation; uses pooled Builder if enabled.
|
||||
// Optimized for single group: direct Concat.
|
||||
// Useful for grouping related items with inter-group separation.
|
||||
func GroupWith(sep string, groups ...[]any) string {
|
||||
if len(groups) == 0 {
|
||||
return empty
|
||||
}
|
||||
if len(groups) == 1 {
|
||||
return Concat(groups[0]...)
|
||||
}
|
||||
|
||||
total := 0
|
||||
nonEmpty := 0
|
||||
for _, g := range groups {
|
||||
if len(g) == 0 {
|
||||
continue
|
||||
}
|
||||
if nonEmpty > 0 {
|
||||
total += len(sep)
|
||||
}
|
||||
total += estimate(g)
|
||||
nonEmpty++
|
||||
}
|
||||
|
||||
b := New(empty)
|
||||
b.Grow(total)
|
||||
first := true
|
||||
for _, g := range groups {
|
||||
if len(g) == 0 {
|
||||
continue
|
||||
}
|
||||
if !first && sep != empty {
|
||||
b.buf.WriteString(sep)
|
||||
}
|
||||
first = false
|
||||
for _, a := range g {
|
||||
write(&b.buf, a)
|
||||
}
|
||||
}
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// Indent prefixes the concatenation of args with depth levels of two spaces per level.
|
||||
// Example: Indent(2, "hello") => " hello"
|
||||
// If depth <= 0, equivalent to Concat(args...).
|
||||
// Uses " " repeated depth times as prefix, followed by concatenated args (no separators).
|
||||
// Benefits from pooling via Concat.
|
||||
func Indent(depth int, args ...any) string {
|
||||
if depth <= 0 {
|
||||
return Concat(args...)
|
||||
}
|
||||
prefix := strings.Repeat(" ", depth)
|
||||
return Prefix(prefix, args...)
|
||||
}
|
||||
|
||||
// Join joins strings (matches stdlib strings.Join behavior).
|
||||
// Usage: cat.Join("a", "b") → "a b" (using empty)
|
||||
// Joins the variadic string args with the current empty.
|
||||
// Useful for compatibility with stdlib but using package default sep.
|
||||
func Join(elems ...string) string {
|
||||
return strings.Join(elems, empty)
|
||||
}
|
||||
|
||||
// JoinWith joins strings with separator (variadic version).
|
||||
// Directly uses strings.Join on the variadic string args with sep.
|
||||
// Efficient for known strings; no conversions needed.
|
||||
func JoinWith(sep string, elems ...string) string {
|
||||
return strings.Join(elems, sep)
|
||||
}
|
||||
|
||||
// Lines joins arguments with newline separators.
|
||||
// Convenience for With using "\n" as separator.
|
||||
// Useful for building multi-line strings.
|
||||
func Lines(args ...any) string { return With(newline, args...) }
|
||||
|
||||
// Number concatenates numeric values without separators.
|
||||
// Generic over Numeric types.
|
||||
// Equivalent to NumberWith with empty sep.
|
||||
func Number[T Numeric](a ...T) string {
|
||||
return NumberWith(empty, a...)
|
||||
}
|
||||
|
||||
// NumberWith concatenates numeric values with the provided separator.
|
||||
// Generic over Numeric types.
|
||||
// If no args, returns empty string.
|
||||
// Uses pooled Builder if enabled, with rough growth estimate (8 bytes per item).
|
||||
// Relies on valueToString for numeric conversion.
|
||||
func NumberWith[T Numeric](sep string, a ...T) string {
|
||||
if len(a) == 0 {
|
||||
return empty
|
||||
}
|
||||
|
||||
b := New(sep)
|
||||
b.Grow(len(a) * 8)
|
||||
for _, v := range a {
|
||||
b.Add(v)
|
||||
}
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// Path joins arguments with "/" separators.
|
||||
// Convenience for With using "/" as separator.
|
||||
// Useful for building file paths or URLs.
|
||||
func Path(args ...any) string { return With(slash, args...) }
|
||||
|
||||
// Prefix concatenates with a prefix (no separator).
|
||||
// Equivalent to PrefixWith with empty sep.
|
||||
func Prefix(p any, args ...any) string {
|
||||
return PrefixWith(empty, p, args...)
|
||||
}
|
||||
|
||||
// PrefixWith concatenates with a prefix and separator.
|
||||
// Adds p, then sep (if args present and sep not empty), then joins args with sep.
|
||||
// Uses pooled Builder if enabled.
|
||||
func PrefixWith(sep string, p any, args ...any) string {
|
||||
b := New(sep)
|
||||
b.Grow(estimateWith(sep, args) + estimate([]any{p}))
|
||||
b.Add(p)
|
||||
b.Add(args...)
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// PrefixEach applies the same prefix to each argument and joins the pairs with sep.
|
||||
// Example: PrefixEach("pre-", ",", "a","b") => "pre-a,pre-b"
|
||||
// Preestimates size including prefixes and seps.
|
||||
// Uses pooled Builder if enabled; manually adds sep between pairs, no sep between p and a.
|
||||
// Returns empty if no args.
|
||||
func PrefixEach(p any, sep string, args ...any) string {
|
||||
if len(args) == 0 {
|
||||
return empty
|
||||
}
|
||||
pSize := estimate([]any{p})
|
||||
total := len(sep)*(len(args)-1) + estimate(args) + pSize*len(args)
|
||||
|
||||
b := New(empty)
|
||||
b.Grow(total)
|
||||
for i, a := range args {
|
||||
if i > 0 && sep != empty {
|
||||
b.buf.WriteString(sep)
|
||||
}
|
||||
write(&b.buf, p)
|
||||
write(&b.buf, a)
|
||||
}
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// Pair joins exactly two values (no separator).
|
||||
// Equivalent to PairWith with empty sep.
|
||||
func Pair(a, b any) string {
|
||||
return PairWith(empty, a, b)
|
||||
}
|
||||
|
||||
// PairWith joins exactly two values with a separator.
|
||||
// Optimized for two args: uses With(sep, a, b).
|
||||
func PairWith(sep string, a, b any) string {
|
||||
return With(sep, a, b)
|
||||
}
|
||||
|
||||
// Quote wraps each argument in double quotes, separated by spaces.
|
||||
// Equivalent to QuoteWith with '"' as quote.
|
||||
func Quote(args ...any) string {
|
||||
return QuoteWith('"', args...)
|
||||
}
|
||||
|
||||
// QuoteWith wraps each argument with the specified quote byte, separated by spaces.
|
||||
// Wraps each arg with quote, writes arg, closes with quote; joins with space.
|
||||
// Preestimates with quotes and spaces.
|
||||
// Uses pooled Builder if enabled.
|
||||
func QuoteWith(quote byte, args ...any) string {
|
||||
if len(args) == 0 {
|
||||
return empty
|
||||
}
|
||||
total := estimate(args) + 2*len(args) + len(space)*(len(args)-1)
|
||||
|
||||
b := New(empty)
|
||||
b.Grow(total)
|
||||
need := false
|
||||
for _, a := range args {
|
||||
if need {
|
||||
b.buf.WriteString(space)
|
||||
}
|
||||
b.buf.WriteByte(quote)
|
||||
write(&b.buf, a)
|
||||
b.buf.WriteByte(quote)
|
||||
need = true
|
||||
}
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// Repeat concatenates val n times (no sep between instances).
|
||||
// Equivalent to RepeatWith with empty sep.
|
||||
func Repeat(val any, n int) string {
|
||||
return RepeatWith(empty, val, n)
|
||||
}
|
||||
|
||||
// RepeatWith concatenates val n times with sep between each instance.
|
||||
// If n <= 0, returns an empty string.
|
||||
// Optimized to make exactly one allocation; converts val once.
|
||||
// Uses pooled Builder if enabled.
|
||||
func RepeatWith(sep string, val any, n int) string {
|
||||
if n <= 0 {
|
||||
return empty
|
||||
}
|
||||
if n == 1 {
|
||||
return valueToString(val)
|
||||
}
|
||||
b := New(sep)
|
||||
b.Grow(n*estimate([]any{val}) + (n-1)*len(sep))
|
||||
for i := 0; i < n; i++ {
|
||||
b.Add(val)
|
||||
}
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// Reflect converts a reflect.Value to its string representation.
|
||||
// It handles all kinds of reflected values including primitives, structs, slices, maps, etc.
|
||||
// For nil values, it returns the nilString constant ("<nil>").
|
||||
// For unexported or inaccessible fields, it returns unexportedString ("<?>").
|
||||
// The output follows Go's syntax conventions where applicable (e.g., slices as [a, b], maps as {k:v}).
|
||||
func Reflect(r reflect.Value) string {
|
||||
if !r.IsValid() {
|
||||
return nilString
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
writeReflect(&b, r.Interface(), 0)
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// Space concatenates arguments with space separators.
|
||||
// Convenience for With using " " as separator.
|
||||
func Space(args ...any) string { return With(space, args...) }
|
||||
|
||||
// Dot concatenates arguments with dot separators.
|
||||
// Convenience for With using " " as separator.
|
||||
func Dot(args ...any) string { return With(dot, args...) }
|
||||
|
||||
// Suffix concatenates with a suffix (no separator).
|
||||
// Equivalent to SuffixWith with empty sep.
|
||||
func Suffix(s any, args ...any) string {
|
||||
return SuffixWith(empty, s, args...)
|
||||
}
|
||||
|
||||
// SuffixWith concatenates with a suffix and separator.
|
||||
// Joins args with sep, then adds sep (if args present and sep not empty), then s.
|
||||
// Uses pooled Builder if enabled.
|
||||
func SuffixWith(sep string, s any, args ...any) string {
|
||||
b := New(sep)
|
||||
b.Grow(estimateWith(sep, args) + estimate([]any{s}))
|
||||
b.Add(args...)
|
||||
b.Add(s)
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// SuffixEach applies the same suffix to each argument and joins the pairs with sep.
|
||||
// Example: SuffixEach("-suf", " | ", "a","b") => "a-suf | b-suf"
|
||||
// Preestimates size including suffixes and seps.
|
||||
// Uses pooled Builder if enabled; manually adds sep between pairs, no sep between a and s.
|
||||
// Returns empty if no args.
|
||||
func SuffixEach(s any, sep string, args ...any) string {
|
||||
if len(args) == 0 {
|
||||
return empty
|
||||
}
|
||||
sSize := estimate([]any{s})
|
||||
total := len(sep)*(len(args)-1) + estimate(args) + sSize*len(args)
|
||||
|
||||
b := New(empty)
|
||||
b.Grow(total)
|
||||
for i, a := range args {
|
||||
if i > 0 && sep != empty {
|
||||
b.buf.WriteString(sep)
|
||||
}
|
||||
write(&b.buf, a)
|
||||
write(&b.buf, s)
|
||||
}
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// Sprint concatenates any values (no separators).
|
||||
// Usage: Sprint("a", 1, true) → "a1true"
|
||||
// Equivalent to Concat or With with an empty separator.
|
||||
func Sprint(args ...any) string {
|
||||
if len(args) == 0 {
|
||||
return empty
|
||||
}
|
||||
if len(args) == 1 {
|
||||
return valueToString(args[0])
|
||||
}
|
||||
|
||||
// For multiple args, use the existing Concat functionality
|
||||
return Concat(args...)
|
||||
}
|
||||
|
||||
// Trio joins exactly three values (no separator).
|
||||
// Equivalent to TrioWith with empty sep
|
||||
func Trio(a, b, c any) string {
|
||||
return TrioWith(empty, a, b, c)
|
||||
}
|
||||
|
||||
// TrioWith joins exactly three values with a separator.
|
||||
// Optimized for three args: uses With(sep, a, b, c).
|
||||
func TrioWith(sep string, a, b, c any) string {
|
||||
return With(sep, a, b, c)
|
||||
}
|
||||
|
||||
// With concatenates arguments with the specified separator.
|
||||
// Core concatenation function with sep.
|
||||
// Optimized for zero or one arg: empty or direct valueToString.
|
||||
// Fast path for all strings: exact preallocation, direct writes via raw strings.Builder (minimal branches/allocs).
|
||||
// Fallback: pooled Builder with estimateWith, adds args with sep.
|
||||
// Benefits from pooling if enabled for mixed types.
|
||||
func With(sep string, args ...any) string {
|
||||
switch len(args) {
|
||||
case 0:
|
||||
return empty
|
||||
case 1:
|
||||
return valueToString(args[0])
|
||||
}
|
||||
|
||||
// Fast path for all strings: use raw strings.Builder for speed, no pooling needed.
|
||||
allStrings := true
|
||||
totalLen := len(sep) * (len(args) - 1)
|
||||
for _, a := range args {
|
||||
if s, ok := a.(string); ok {
|
||||
totalLen += len(s)
|
||||
} else {
|
||||
allStrings = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if allStrings {
|
||||
var b strings.Builder
|
||||
b.Grow(totalLen)
|
||||
b.WriteString(args[0].(string))
|
||||
for i := 1; i < len(args); i++ {
|
||||
if sep != empty {
|
||||
b.WriteString(sep)
|
||||
}
|
||||
b.WriteString(args[i].(string))
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// Fallback for mixed types: use pooled Builder.
|
||||
b := New(sep)
|
||||
b.Grow(estimateWith(sep, args))
|
||||
b.Add(args...)
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// Wrap encloses concatenated args between before and after strings (no inner separator).
|
||||
// Equivalent to Concat(before, args..., after).
|
||||
func Wrap(before, after string, args ...any) string {
|
||||
b := Start()
|
||||
b.Grow(len(before) + len(after) + estimate(args))
|
||||
|
||||
b.Add(before)
|
||||
b.Add(args...)
|
||||
b.Add(after)
|
||||
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// WrapEach wraps each argument individually with before/after, concatenated without separators.
|
||||
// Applies before + arg + after to each arg.
|
||||
// Preestimates size; uses pooled Builder if enabled.
|
||||
// Returns empty if no args.
|
||||
// Useful for wrapping multiple items identically without joins.
|
||||
func WrapEach(before, after string, args ...any) string {
|
||||
if len(args) == 0 {
|
||||
return empty
|
||||
}
|
||||
total := (len(before)+len(after))*len(args) + estimate(args)
|
||||
|
||||
b := Start() // Use pooled builder, but we will write manually.
|
||||
b.Grow(total)
|
||||
for _, a := range args {
|
||||
write(&b.buf, before)
|
||||
write(&b.buf, a)
|
||||
write(&b.buf, after)
|
||||
}
|
||||
// No separators were ever added, so this is safe.
|
||||
b.needsSep = true // Correctly set state in case of reuse.
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// WrapWith encloses concatenated args between before and after strings,
|
||||
// joining the arguments with the provided separator.
|
||||
// If no args, returns before + after.
|
||||
// Builds inner with With(sep, args...), then Concat(before, inner, after).
|
||||
// Benefits from pooling via With and Concat.
|
||||
func WrapWith(sep, before, after string, args ...any) string {
|
||||
if len(args) == 0 {
|
||||
return before + after
|
||||
}
|
||||
// First, efficiently build the inner part.
|
||||
inner := With(sep, args...)
|
||||
|
||||
// Then, wrap it without allocating another slice.
|
||||
b := Start()
|
||||
b.Grow(len(before) + len(inner) + len(after))
|
||||
|
||||
b.Add(before)
|
||||
b.Add(inner)
|
||||
b.Add(after)
|
||||
|
||||
return b.Output()
|
||||
}
|
||||
|
||||
// Pad surrounds a string with spaces on both sides.
|
||||
// Ensures proper spacing for SQL operators like "=", "AND", etc.
|
||||
// Example: Pad("=") returns " = " for cleaner formatting.
|
||||
func Pad(s string) string {
|
||||
return Concat(space, s, space)
|
||||
}
|
||||
|
||||
// PadWith adds a separator before the string and a space after it.
|
||||
// Useful for formatting SQL parts with custom leading separators.
|
||||
// Example: PadWith(",", "column") returns ",column ".
|
||||
func PadWith(sep, s string) string {
|
||||
return Concat(sep, s, space)
|
||||
}
|
||||
|
||||
// Parens wraps content in parentheses
|
||||
// Useful for grouping SQL conditions or expressions
|
||||
// Example: Parens("a = b AND c = d") → "(a = b AND c = d)"
|
||||
func Parens(content string) string {
|
||||
return Concat(parenOpen, content, parenClose)
|
||||
}
|
||||
|
||||
// ParensWith wraps multiple arguments in parentheses with a separator
|
||||
// Example: ParensWith(" AND ", "a = b", "c = d") → "(a = b AND c = d)"
|
||||
func ParensWith(sep string, args ...any) string {
|
||||
return Concat(parenOpen, With(sep, args...), parenClose)
|
||||
}
|
||||
376
vendor/github.com/olekukonko/cat/fn.go
generated
vendored
Normal file
376
vendor/github.com/olekukonko/cat/fn.go
generated
vendored
Normal file
@@ -0,0 +1,376 @@
|
||||
package cat
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// write writes a value to the given strings.Builder using fast paths to avoid temporary allocations.
|
||||
// It handles common types like strings, byte slices, integers, floats, and booleans directly for efficiency.
|
||||
// For other types, it falls back to fmt.Fprint, which may involve allocations.
|
||||
// This function is optimized for performance in string concatenation scenarios, prioritizing
|
||||
// common cases like strings and numbers at the top of the type switch for compiler optimization.
|
||||
// Note: For integers and floats, it uses stack-allocated buffers and strconv.Append* functions to
|
||||
// convert numbers to strings without heap allocations.
|
||||
func write(b *strings.Builder, arg any) {
|
||||
writeValue(b, arg, 0)
|
||||
}
|
||||
|
||||
// writeValue appends the string representation of arg to b, handling recursion with a depth limit.
|
||||
// It serves as a recursive helper for write, directly handling primitives and delegating complex
|
||||
// types to writeReflect. The depth parameter prevents excessive recursion in deeply nested structures.
|
||||
func writeValue(b *strings.Builder, arg any, depth int) {
|
||||
// Handle recursion depth limit
|
||||
if depth > maxRecursionDepth {
|
||||
b.WriteString("...")
|
||||
return
|
||||
}
|
||||
|
||||
// Handle nil values
|
||||
if arg == nil {
|
||||
b.WriteString(nilString)
|
||||
return
|
||||
}
|
||||
|
||||
// Fast path type switch for all primitive types
|
||||
switch v := arg.(type) {
|
||||
case string:
|
||||
b.WriteString(v)
|
||||
case []byte:
|
||||
b.WriteString(bytesToString(v))
|
||||
case int:
|
||||
var buf [20]byte
|
||||
b.Write(strconv.AppendInt(buf[:0], int64(v), 10))
|
||||
case int64:
|
||||
var buf [20]byte
|
||||
b.Write(strconv.AppendInt(buf[:0], v, 10))
|
||||
case int32:
|
||||
var buf [11]byte
|
||||
b.Write(strconv.AppendInt(buf[:0], int64(v), 10))
|
||||
case int16:
|
||||
var buf [6]byte
|
||||
b.Write(strconv.AppendInt(buf[:0], int64(v), 10))
|
||||
case int8:
|
||||
var buf [4]byte
|
||||
b.Write(strconv.AppendInt(buf[:0], int64(v), 10))
|
||||
case uint:
|
||||
var buf [20]byte
|
||||
b.Write(strconv.AppendUint(buf[:0], uint64(v), 10))
|
||||
case uint64:
|
||||
var buf [20]byte
|
||||
b.Write(strconv.AppendUint(buf[:0], v, 10))
|
||||
case uint32:
|
||||
var buf [10]byte
|
||||
b.Write(strconv.AppendUint(buf[:0], uint64(v), 10))
|
||||
case uint16:
|
||||
var buf [5]byte
|
||||
b.Write(strconv.AppendUint(buf[:0], uint64(v), 10))
|
||||
case uint8:
|
||||
var buf [3]byte
|
||||
b.Write(strconv.AppendUint(buf[:0], uint64(v), 10))
|
||||
case float64:
|
||||
var buf [24]byte
|
||||
b.Write(strconv.AppendFloat(buf[:0], v, 'f', -1, 64))
|
||||
case float32:
|
||||
var buf [24]byte
|
||||
b.Write(strconv.AppendFloat(buf[:0], float64(v), 'f', -1, 32))
|
||||
case bool:
|
||||
if v {
|
||||
b.WriteString("true")
|
||||
} else {
|
||||
b.WriteString("false")
|
||||
}
|
||||
case fmt.Stringer:
|
||||
b.WriteString(v.String())
|
||||
case error:
|
||||
b.WriteString(v.Error())
|
||||
default:
|
||||
// Fallback to reflection-based handling
|
||||
writeReflect(b, arg, depth)
|
||||
}
|
||||
}
|
||||
|
||||
// writeReflect handles all complex types safely.
|
||||
func writeReflect(b *strings.Builder, arg any, depth int) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
b.WriteString("[!reflect panic!]")
|
||||
}
|
||||
}()
|
||||
|
||||
val := reflect.ValueOf(arg)
|
||||
if val.Kind() == reflect.Ptr {
|
||||
if val.IsNil() {
|
||||
b.WriteString(nilString)
|
||||
return
|
||||
}
|
||||
val = val.Elem()
|
||||
}
|
||||
|
||||
switch val.Kind() {
|
||||
case reflect.Slice, reflect.Array:
|
||||
b.WriteByte('[')
|
||||
for i := 0; i < val.Len(); i++ {
|
||||
if i > 0 {
|
||||
b.WriteString(", ") // Use comma-space for readability
|
||||
}
|
||||
writeValue(b, val.Index(i).Interface(), depth+1)
|
||||
}
|
||||
b.WriteByte(']')
|
||||
|
||||
case reflect.Struct:
|
||||
typ := val.Type()
|
||||
b.WriteByte('{') // Use {} for structs to follow Go convention
|
||||
first := true
|
||||
for i := 0; i < val.NumField(); i++ {
|
||||
fieldValue := val.Field(i)
|
||||
if !fieldValue.CanInterface() {
|
||||
continue // Skip unexported fields
|
||||
}
|
||||
if !first {
|
||||
b.WriteByte(' ') // Use space as separator
|
||||
}
|
||||
first = false
|
||||
b.WriteString(typ.Field(i).Name)
|
||||
b.WriteByte(':')
|
||||
|
||||
writeValue(b, fieldValue.Interface(), depth+1)
|
||||
}
|
||||
b.WriteByte('}')
|
||||
|
||||
case reflect.Map:
|
||||
b.WriteByte('{')
|
||||
keys := val.MapKeys()
|
||||
sort.Slice(keys, func(i, j int) bool {
|
||||
// A simple string-based sort for keys
|
||||
return fmt.Sprint(keys[i].Interface()) < fmt.Sprint(keys[j].Interface())
|
||||
})
|
||||
for i, key := range keys {
|
||||
if i > 0 {
|
||||
b.WriteByte(' ') // Use space as separator
|
||||
}
|
||||
writeValue(b, key.Interface(), depth+1)
|
||||
b.WriteByte(':')
|
||||
writeValue(b, val.MapIndex(key).Interface(), depth+1)
|
||||
}
|
||||
b.WriteByte('}')
|
||||
|
||||
case reflect.Interface:
|
||||
if val.IsNil() {
|
||||
b.WriteString(nilString)
|
||||
return
|
||||
}
|
||||
writeValue(b, val.Elem().Interface(), depth+1)
|
||||
|
||||
default:
|
||||
fmt.Fprint(b, arg)
|
||||
}
|
||||
}
|
||||
|
||||
// valueToString converts any value to a string representation.
|
||||
// It uses optimized paths for common types to avoid unnecessary allocations.
|
||||
// For types like integers and floats, it directly uses strconv functions.
|
||||
// This function is useful for single-argument conversions or as a helper in other parts of the package.
|
||||
// Unlike write, it returns a string instead of appending to a builder.
|
||||
func valueToString(arg any) string {
|
||||
switch v := arg.(type) {
|
||||
case string:
|
||||
return v
|
||||
case []byte:
|
||||
return bytesToString(v)
|
||||
case int:
|
||||
return strconv.Itoa(v)
|
||||
case int64:
|
||||
return strconv.FormatInt(v, 10)
|
||||
case int32:
|
||||
return strconv.FormatInt(int64(v), 10)
|
||||
case uint:
|
||||
return strconv.FormatUint(uint64(v), 10)
|
||||
case uint64:
|
||||
return strconv.FormatUint(v, 10)
|
||||
case float64:
|
||||
return strconv.FormatFloat(v, 'f', -1, 64)
|
||||
case bool:
|
||||
if v {
|
||||
return "true"
|
||||
}
|
||||
return "false"
|
||||
case fmt.Stringer:
|
||||
return v.String()
|
||||
case error:
|
||||
return v.Error()
|
||||
default:
|
||||
return fmt.Sprint(v)
|
||||
}
|
||||
}
|
||||
|
||||
// estimateWith calculates a conservative estimate of the total string length when concatenating
|
||||
// the given arguments with a separator. This is used for preallocating capacity in strings.Builder
|
||||
// to minimize reallocations during building.
|
||||
// It accounts for the length of separators and estimates the length of each argument based on its type.
|
||||
// If no arguments are provided, it returns 0.
|
||||
func estimateWith(sep string, args []any) int {
|
||||
if len(args) == 0 {
|
||||
return 0
|
||||
}
|
||||
size := len(sep) * (len(args) - 1)
|
||||
size += estimate(args)
|
||||
return size
|
||||
}
|
||||
|
||||
// estimate calculates a conservative estimate of the combined string length of the given arguments.
|
||||
// It iterates over each argument and adds an estimated length based on its type:
|
||||
// - Strings and byte slices: exact length.
|
||||
// - Numbers: calculated digit count using numLen or uNumLen.
|
||||
// - Floats and others: fixed conservative estimates (e.g., 16 or 24 bytes).
|
||||
// This helper is used internally by estimateWith and focuses solely on the arguments without separators.
|
||||
func estimate(args []any) int {
|
||||
var size int
|
||||
for _, a := range args {
|
||||
switch v := a.(type) {
|
||||
case string:
|
||||
size += len(v)
|
||||
case []byte:
|
||||
size += len(v)
|
||||
case int:
|
||||
size += numLen(int64(v))
|
||||
case int8:
|
||||
size += numLen(int64(v))
|
||||
case int16:
|
||||
size += numLen(int64(v))
|
||||
case int32:
|
||||
size += numLen(int64(v))
|
||||
case int64:
|
||||
size += numLen(v)
|
||||
case uint:
|
||||
size += uNumLen(uint64(v))
|
||||
case uint8:
|
||||
size += uNumLen(uint64(v))
|
||||
case uint16:
|
||||
size += uNumLen(uint64(v))
|
||||
case uint32:
|
||||
size += uNumLen(uint64(v))
|
||||
case uint64:
|
||||
size += uNumLen(v)
|
||||
case float32:
|
||||
size += 16
|
||||
case float64:
|
||||
size += 24
|
||||
case bool:
|
||||
size += 5 // "false"
|
||||
case fmt.Stringer, error:
|
||||
size += 16 // conservative
|
||||
default:
|
||||
size += 16 // conservative
|
||||
}
|
||||
}
|
||||
return size
|
||||
}
|
||||
|
||||
// numLen returns the number of characters required to represent the signed integer n as a string.
|
||||
// It handles negative numbers by adding 1 for the '-' sign and uses a loop to count digits.
|
||||
// Special handling for math.MinInt64 to avoid overflow when negating.
|
||||
// Returns 1 for 0, and up to 20 for the largest values.
|
||||
func numLen(n int64) int {
|
||||
if n == 0 {
|
||||
return 1
|
||||
}
|
||||
c := 0
|
||||
if n < 0 {
|
||||
c = 1 // for '-'
|
||||
// NOTE: math.MinInt64 negated overflows; handle by adding one digit and returning 20.
|
||||
if n == -1<<63 {
|
||||
return 20
|
||||
}
|
||||
n = -n
|
||||
}
|
||||
for n > 0 {
|
||||
n /= 10
|
||||
c++
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// uNumLen returns the number of characters required to represent the unsigned integer n as a string.
|
||||
// It uses a loop to count digits.
|
||||
// Returns 1 for 0, and up to 20 for the largest uint64 values.
|
||||
func uNumLen(n uint64) int {
|
||||
if n == 0 {
|
||||
return 1
|
||||
}
|
||||
c := 0
|
||||
for n > 0 {
|
||||
n /= 10
|
||||
c++
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// bytesToString converts a byte slice to a string efficiently.
|
||||
// If the package's UnsafeBytes flag is set (via IsUnsafeBytes()), it uses unsafe operations
|
||||
// to create a string backed by the same memory as the byte slice, avoiding a copy.
|
||||
// This is zero-allocation when unsafe is enabled.
|
||||
// Falls back to standard string(bts) conversion otherwise.
|
||||
// For empty slices, it returns a constant empty string.
|
||||
// Compatible with Go 1.20+ unsafe functions like unsafe.String and unsafe.SliceData.
|
||||
func bytesToString(bts []byte) string {
|
||||
if len(bts) == 0 {
|
||||
return empty
|
||||
}
|
||||
if IsUnsafeBytes() {
|
||||
// Go 1.20+: unsafe.String with SliceData (1.20 introduced, 1.22 added SliceData).
|
||||
return unsafe.String(unsafe.SliceData(bts), len(bts))
|
||||
}
|
||||
return string(bts)
|
||||
}
|
||||
|
||||
// recursiveEstimate calculates the estimated string length for potentially nested arguments,
|
||||
// including the lengths of separators between elements. It recurses on nested []any slices,
|
||||
// flattening the structure while accounting for separators only between non-empty subparts.
|
||||
// This function is useful for preallocating capacity in builders for nested concatenation operations.
|
||||
func recursiveEstimate(sep string, args []any) int {
|
||||
if len(args) == 0 {
|
||||
return 0
|
||||
}
|
||||
size := 0
|
||||
needsSep := false
|
||||
for _, a := range args {
|
||||
switch v := a.(type) {
|
||||
case []any:
|
||||
subSize := recursiveEstimate(sep, v)
|
||||
if subSize > 0 {
|
||||
if needsSep {
|
||||
size += len(sep)
|
||||
}
|
||||
size += subSize
|
||||
needsSep = true
|
||||
}
|
||||
default:
|
||||
if needsSep {
|
||||
size += len(sep)
|
||||
}
|
||||
size += estimate([]any{a})
|
||||
needsSep = true
|
||||
}
|
||||
}
|
||||
return size
|
||||
}
|
||||
|
||||
// recursiveAdd appends the string representations of potentially nested arguments to the builder.
|
||||
// It recurses on nested []any slices, effectively flattening the structure by adding leaf values
|
||||
// directly via b.Add without inserting separators (separators are handled externally if needed).
|
||||
// This function is designed for efficient concatenation of nested argument lists.
|
||||
func recursiveAdd(b *Builder, args []any) {
|
||||
for _, a := range args {
|
||||
switch v := a.(type) {
|
||||
case []any:
|
||||
recursiveAdd(b, v)
|
||||
default:
|
||||
b.Add(a)
|
||||
}
|
||||
}
|
||||
}
|
||||
161
vendor/github.com/olekukonko/cat/sql.go
generated
vendored
Normal file
161
vendor/github.com/olekukonko/cat/sql.go
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
package cat
|
||||
|
||||
// On builds a SQL ON clause comparing two columns across tables.
|
||||
// Formats as: "table1.column1 = table2.column2" with proper spacing.
|
||||
// Useful in JOIN conditions to match keys between tables.
|
||||
func On(table1, column1, table2, column2 string) string {
|
||||
return With(space,
|
||||
With(dot, table1, column1),
|
||||
Pad(equal),
|
||||
With(dot, table2, column2),
|
||||
)
|
||||
}
|
||||
|
||||
// Using builds a SQL condition comparing two aliased columns.
|
||||
// Formats as: "alias1.column1 = alias2.column2" for JOINs or filters.
|
||||
// Helps when working with table aliases in complex queries.
|
||||
func Using(alias1, column1, alias2, column2 string) string {
|
||||
return With(space,
|
||||
With(dot, alias1, column1),
|
||||
Pad(equal),
|
||||
With(dot, alias2, column2),
|
||||
)
|
||||
}
|
||||
|
||||
// And joins multiple SQL conditions with the AND operator.
|
||||
// Adds spacing to ensure clean SQL output (e.g., "cond1 AND cond2").
|
||||
// Accepts variadic arguments for flexible condition chaining.
|
||||
func And(conditions ...any) string {
|
||||
return With(Pad(and), conditions...)
|
||||
}
|
||||
|
||||
// In creates a SQL IN clause with properly quoted values
|
||||
// Example: In("status", "active", "pending") → "status IN ('active', 'pending')"
|
||||
// Handles value quoting and comma separation automatically
|
||||
func In(column string, values ...string) string {
|
||||
if len(values) == 0 {
|
||||
return Concat(column, inOpen, inClose)
|
||||
}
|
||||
|
||||
quotedValues := make([]string, len(values))
|
||||
for i, v := range values {
|
||||
quotedValues[i] = "'" + v + "'"
|
||||
}
|
||||
return Concat(column, inOpen, JoinWith(comma+space, quotedValues...), inClose)
|
||||
}
|
||||
|
||||
// As creates an aliased SQL expression
|
||||
// Example: As("COUNT(*)", "total_count") → "COUNT(*) AS total_count"
|
||||
func As(expression, alias string) string {
|
||||
return Concat(expression, asSQL, alias)
|
||||
}
|
||||
|
||||
// Count creates a COUNT expression with optional alias
|
||||
// Example: Count("id") → "COUNT(id)"
|
||||
// Example: Count("id", "total") → "COUNT(id) AS total"
|
||||
// Example: Count("DISTINCT user_id", "unique_users") → "COUNT(DISTINCT user_id) AS unique_users"
|
||||
func Count(column string, alias ...string) string {
|
||||
expression := Concat(count, column, parenClose)
|
||||
if len(alias) == 0 {
|
||||
return expression
|
||||
}
|
||||
return As(expression, alias[0])
|
||||
}
|
||||
|
||||
// CountAll creates COUNT(*) with optional alias
|
||||
// Example: CountAll() → "COUNT(*)"
|
||||
// Example: CountAll("total") → "COUNT(*) AS total"
|
||||
func CountAll(alias ...string) string {
|
||||
if len(alias) == 0 {
|
||||
return countAll
|
||||
}
|
||||
return As(countAll, alias[0])
|
||||
}
|
||||
|
||||
// Sum creates a SUM expression with optional alias
|
||||
// Example: Sum("amount") → "SUM(amount)"
|
||||
// Example: Sum("amount", "total") → "SUM(amount) AS total"
|
||||
func Sum(column string, alias ...string) string {
|
||||
expression := Concat(sum, column, parenClose)
|
||||
if len(alias) == 0 {
|
||||
return expression
|
||||
}
|
||||
return As(expression, alias[0])
|
||||
}
|
||||
|
||||
// Avg creates an AVG expression with optional alias
|
||||
// Example: Avg("score") → "AVG(score)"
|
||||
// Example: Avg("score", "average") → "AVG(score) AS average"
|
||||
func Avg(column string, alias ...string) string {
|
||||
expression := Concat(avg, column, parenClose)
|
||||
if len(alias) == 0 {
|
||||
return expression
|
||||
}
|
||||
return As(expression, alias[0])
|
||||
}
|
||||
|
||||
// Max creates a MAX expression with optional alias
|
||||
// Example: Max("price") → "MAX(price)"
|
||||
// Example: Max("price", "max_price") → "MAX(price) AS max_price"
|
||||
func Max(column string, alias ...string) string {
|
||||
expression := Concat(maxOpen, column, parenClose)
|
||||
if len(alias) == 0 {
|
||||
return expression
|
||||
}
|
||||
return As(expression, alias[0])
|
||||
}
|
||||
|
||||
// Min creates a MIN expression with optional alias
|
||||
// Example: Min("price") → "MIN(price)"
|
||||
// Example: Min("price", "min_price") → "MIN(price) AS min_price"
|
||||
func Min(column string, alias ...string) string {
|
||||
expression := Concat(minOpen, column, parenClose)
|
||||
if len(alias) == 0 {
|
||||
return expression
|
||||
}
|
||||
return As(expression, alias[0])
|
||||
}
|
||||
|
||||
// Case creates a SQL CASE expression with optional alias
|
||||
// Example: Case("WHEN status = 'active' THEN 1 ELSE 0 END", "is_active") → "CASE WHEN status = 'active' THEN 1 ELSE 0 END AS is_active"
|
||||
func Case(expression string, alias ...string) string {
|
||||
caseExpr := Concat(caseSQL, expression)
|
||||
if len(alias) == 0 {
|
||||
return caseExpr
|
||||
}
|
||||
return As(caseExpr, alias[0])
|
||||
}
|
||||
|
||||
// CaseWhen creates a complete SQL CASE expression from individual parts with proper value handling
|
||||
// Example: CaseWhen("status =", "'active'", "1", "0", "is_active") → "CASE WHEN status = 'active' THEN 1 ELSE 0 END AS is_active"
|
||||
// Example: CaseWhen("age >", "18", "'adult'", "'minor'", "age_group") → "CASE WHEN age > 18 THEN 'adult' ELSE 'minor' END AS age_group"
|
||||
func CaseWhen(conditionPart string, conditionValue, thenValue, elseValue any, alias ...string) string {
|
||||
condition := Concat(conditionPart, valueToString(conditionValue))
|
||||
expression := Concat(
|
||||
when, condition, then, valueToString(thenValue), elseSQL, valueToString(elseValue), end,
|
||||
)
|
||||
return Case(expression, alias...)
|
||||
}
|
||||
|
||||
// CaseWhenMulti creates a SQL CASE expression with multiple WHEN clauses
|
||||
// Example: CaseWhenMulti([]string{"status =", "age >"}, []any{"'active'", 18}, []any{1, "'adult'"}, 0, "result") → "CASE WHEN status = 'active' THEN 1 WHEN age > 18 THEN 'adult' ELSE 0 END AS result"
|
||||
func CaseWhenMulti(conditionParts []string, conditionValues, thenValues []any, elseValue any, alias ...string) string {
|
||||
if len(conditionParts) != len(conditionValues) || len(conditionParts) != len(thenValues) {
|
||||
return "" // or handle error
|
||||
}
|
||||
|
||||
var whenClauses []string
|
||||
for i := 0; i < len(conditionParts); i++ {
|
||||
condition := Concat(conditionParts[i], valueToString(conditionValues[i]))
|
||||
whenClause := Concat(when, condition, then, valueToString(thenValues[i]))
|
||||
whenClauses = append(whenClauses, whenClause)
|
||||
}
|
||||
|
||||
expression := Concat(
|
||||
JoinWith(space, whenClauses...),
|
||||
elseSQL,
|
||||
valueToString(elseValue),
|
||||
end,
|
||||
)
|
||||
return Case(expression, alias...)
|
||||
}
|
||||
7
vendor/github.com/olekukonko/ll/README.md
generated
vendored
7
vendor/github.com/olekukonko/ll/README.md
generated
vendored
@@ -222,6 +222,13 @@ logger.Info("Slog log") // Output: level=INFO msg="Slog log" namespace=app class
|
||||
ll.Stack("Critical error") // Output: [app] ERROR: Critical error [stack=...] (see example/stack.png)
|
||||
```
|
||||
|
||||
4**General Output**
|
||||
Logs a output in structured way for inspection of public & private values.
|
||||
```go
|
||||
ll.Handler(lh.NewColorizedHandler(os.Stdout))
|
||||
ll.Output(&SomeStructWithPrivateValues{})
|
||||
```
|
||||
|
||||
#### Performance Tracking
|
||||
Measure execution time for performance analysis.
|
||||
```go
|
||||
|
||||
421
vendor/github.com/olekukonko/ll/concat.go
generated
vendored
421
vendor/github.com/olekukonko/ll/concat.go
generated
vendored
@@ -1,421 +0,0 @@
|
||||
package ll
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/olekukonko/ll/lx"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const (
|
||||
maxRecursionDepth = 20 // Maximum depth for recursive type handling to prevent stack overflow
|
||||
nilString = "<nil>" // String representation for nil values
|
||||
unexportedString = "<?>" // String representation for unexported fields
|
||||
)
|
||||
|
||||
// Concat efficiently concatenates values without a separator using the default logger.
|
||||
// It converts each argument to a string and joins them directly, optimizing for performance
|
||||
// in logging scenarios. Thread-safe as it does not modify shared state.
|
||||
// Example:
|
||||
//
|
||||
// msg := ll.Concat("Hello", 42, true) // Returns "Hello42true"
|
||||
func Concat(args ...any) string {
|
||||
return concat(args...)
|
||||
}
|
||||
|
||||
// ConcatSpaced concatenates values with a space separator using the default logger.
|
||||
// It converts each argument to a string and joins them with spaces, suitable for log message
|
||||
// formatting. Thread-safe as it does not modify shared state.
|
||||
// Example:
|
||||
//
|
||||
// msg := ll.ConcatSpaced("Hello", 42, true) // Returns "Hello 42 true"
|
||||
func ConcatSpaced(args ...any) string {
|
||||
return concatSpaced(args...)
|
||||
}
|
||||
|
||||
// ConcatAll concatenates elements with a separator, prefix, and suffix using the default logger.
|
||||
// It combines before, main, and after arguments with the specified separator, optimizing memory
|
||||
// allocation for logging. Thread-safe as it does not modify shared state.
|
||||
// Example:
|
||||
//
|
||||
// msg := ll.ConcatAll(",", []any{"prefix"}, []any{"suffix"}, "main")
|
||||
// // Returns "prefix,main,suffix"
|
||||
func ConcatAll(sep string, before, after []any, args ...any) string {
|
||||
return concatenate(sep, before, after, args...)
|
||||
}
|
||||
|
||||
// concat efficiently concatenates values without a separator.
|
||||
// It converts each argument to a string and joins them directly, optimizing for performance
|
||||
// in logging scenarios. Used internally by Concat and other logging functions.
|
||||
// Example:
|
||||
//
|
||||
// msg := concat("Hello", 42, true) // Returns "Hello42true"
|
||||
func concat(args ...any) string {
|
||||
return concatWith("", args...)
|
||||
}
|
||||
|
||||
// concatSpaced concatenates values with a space separator.
|
||||
// It converts each argument to a string and joins them with spaces, suitable for formatting
|
||||
// log messages. Used internally by ConcatSpaced.
|
||||
// Example:
|
||||
//
|
||||
// msg := concatSpaced("Hello", 42, true) // Returns "Hello 42 true"
|
||||
func concatSpaced(args ...any) string {
|
||||
return concatWith(lx.Space, args...)
|
||||
}
|
||||
|
||||
// concatWith concatenates values with a specified separator using optimized type handling.
|
||||
// It builds a string from arguments, handling various types efficiently (strings, numbers,
|
||||
// structs, etc.), and is used by concat and concatSpaced for log message construction.
|
||||
// Thread-safe as it does not modify shared state.
|
||||
// Example:
|
||||
//
|
||||
// msg := concatWith(",", "Hello", 42, true) // Returns "Hello,42,true"
|
||||
func concatWith(sep string, args ...any) string {
|
||||
switch len(args) {
|
||||
case 0:
|
||||
return ""
|
||||
case 1:
|
||||
return concatToString(args[0])
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.Grow(concatEstimateArgs(sep, args))
|
||||
|
||||
for i, arg := range args {
|
||||
if i > 0 {
|
||||
b.WriteString(sep)
|
||||
}
|
||||
concatWriteValue(&b, arg, 0)
|
||||
}
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// concatenate concatenates elements with separators, prefixes, and suffixes efficiently.
|
||||
// It combines before, main, and after arguments with the specified separator, optimizing
|
||||
// memory allocation for complex log message formatting. Used internally by ConcatAll.
|
||||
// Example:
|
||||
//
|
||||
// msg := concatenate(",", []any{"prefix"}, []any{"suffix"}, "main")
|
||||
// // Returns "prefix,main,suffix"
|
||||
func concatenate(sep string, before []any, after []any, args ...any) string {
|
||||
totalLen := len(before) + len(after) + len(args)
|
||||
switch totalLen {
|
||||
case 0:
|
||||
return ""
|
||||
case 1:
|
||||
switch {
|
||||
case len(before) > 0:
|
||||
return concatToString(before[0])
|
||||
case len(args) > 0:
|
||||
return concatToString(args[0])
|
||||
default:
|
||||
return concatToString(after[0])
|
||||
}
|
||||
}
|
||||
|
||||
var b strings.Builder
|
||||
b.Grow(concatEstimateTotal(sep, before, after, args))
|
||||
|
||||
// Write before elements
|
||||
concatWriteGroup(&b, sep, before)
|
||||
|
||||
// Write main arguments
|
||||
if len(before) > 0 && len(args) > 0 {
|
||||
b.WriteString(sep)
|
||||
}
|
||||
concatWriteGroup(&b, sep, args)
|
||||
|
||||
// Write after elements
|
||||
if len(after) > 0 && (len(before) > 0 || len(args) > 0) {
|
||||
b.WriteString(sep)
|
||||
}
|
||||
concatWriteGroup(&b, sep, after)
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// concatWriteGroup writes a group of arguments to a strings.Builder with a separator.
|
||||
// It handles each argument by converting it to a string, used internally by concatenate
|
||||
// to process before, main, or after groups in log message construction.
|
||||
// Example:
|
||||
//
|
||||
// var b strings.Builder
|
||||
// concatWriteGroup(&b, ",", []any{"a", 42}) // Writes "a,42" to b
|
||||
func concatWriteGroup(b *strings.Builder, sep string, group []any) {
|
||||
for i, arg := range group {
|
||||
if i > 0 {
|
||||
b.WriteString(sep)
|
||||
}
|
||||
concatWriteValue(b, arg, 0)
|
||||
}
|
||||
}
|
||||
|
||||
// concatToString converts a single argument to a string efficiently.
|
||||
// It handles common types (string, []byte, fmt.Stringer) with minimal overhead and falls
|
||||
// back to fmt.Sprint for other types. Used internally by concat and concatenate.
|
||||
// Example:
|
||||
//
|
||||
// s := concatToString("Hello") // Returns "Hello"
|
||||
// s := concatToString([]byte{65, 66}) // Returns "AB"
|
||||
func concatToString(arg any) string {
|
||||
switch v := arg.(type) {
|
||||
case string:
|
||||
return v
|
||||
case []byte:
|
||||
return *(*string)(unsafe.Pointer(&v))
|
||||
case fmt.Stringer:
|
||||
return v.String()
|
||||
case error:
|
||||
return v.Error()
|
||||
default:
|
||||
return fmt.Sprint(v)
|
||||
}
|
||||
}
|
||||
|
||||
// concatEstimateTotal estimates the total string length for concatenate.
|
||||
// It calculates the expected size of the concatenated string, including before, main, and
|
||||
// after arguments with separators, to preallocate the strings.Builder capacity.
|
||||
// Example:
|
||||
//
|
||||
// size := concatEstimateTotal(",", []any{"prefix"}, []any{"suffix"}, "main")
|
||||
// // Returns estimated length for "prefix,main,suffix"
|
||||
func concatEstimateTotal(sep string, before, after, args []any) int {
|
||||
size := 0
|
||||
if len(before) > 0 {
|
||||
size += concatEstimateArgs(sep, before)
|
||||
}
|
||||
if len(args) > 0 {
|
||||
if size > 0 {
|
||||
size += len(sep)
|
||||
}
|
||||
size += concatEstimateArgs(sep, args)
|
||||
}
|
||||
if len(after) > 0 {
|
||||
if size > 0 {
|
||||
size += len(sep)
|
||||
}
|
||||
size += concatEstimateArgs(sep, after)
|
||||
}
|
||||
return size
|
||||
}
|
||||
|
||||
// concatEstimateArgs estimates the string length for a group of arguments.
|
||||
// It sums the estimated sizes of each argument plus separators, used by concatEstimateTotal
|
||||
// and concatWith to optimize memory allocation for log message construction.
|
||||
// Example:
|
||||
//
|
||||
// size := concatEstimateArgs(",", []any{"hello", 42}) // Returns estimated length for "hello,42"
|
||||
func concatEstimateArgs(sep string, args []any) int {
|
||||
if len(args) == 0 {
|
||||
return 0
|
||||
}
|
||||
size := len(sep) * (len(args) - 1)
|
||||
for _, arg := range args {
|
||||
size += concatEstimateSize(arg)
|
||||
}
|
||||
return size
|
||||
}
|
||||
|
||||
// concatEstimateSize estimates the string length for a single argument.
|
||||
// It provides size estimates for various types (strings, numbers, booleans, etc.) to
|
||||
// optimize strings.Builder capacity allocation in logging functions.
|
||||
// Example:
|
||||
//
|
||||
// size := concatEstimateSize("hello") // Returns 5
|
||||
// size := concatEstimateSize(42) // Returns ~2
|
||||
func concatEstimateSize(arg any) int {
|
||||
switch v := arg.(type) {
|
||||
case string:
|
||||
return len(v)
|
||||
case []byte:
|
||||
return len(v)
|
||||
case int:
|
||||
return concatNumLen(int64(v))
|
||||
case int64:
|
||||
return concatNumLen(v)
|
||||
case int32:
|
||||
return concatNumLen(int64(v))
|
||||
case int16:
|
||||
return concatNumLen(int64(v))
|
||||
case int8:
|
||||
return concatNumLen(int64(v))
|
||||
case uint:
|
||||
return concatNumLen(uint64(v))
|
||||
case uint64:
|
||||
return concatNumLen(v)
|
||||
case uint32:
|
||||
return concatNumLen(uint64(v))
|
||||
case uint16:
|
||||
return concatNumLen(uint64(v))
|
||||
case uint8:
|
||||
return concatNumLen(uint64(v))
|
||||
case float64:
|
||||
return 24 // Max digits for float64
|
||||
case float32:
|
||||
return 16 // Max digits for float32
|
||||
case bool:
|
||||
if v {
|
||||
return 4 // "true"
|
||||
}
|
||||
return 5 // "false"
|
||||
case fmt.Stringer:
|
||||
return 16 // Conservative estimate
|
||||
default:
|
||||
return 16 // Default estimate
|
||||
}
|
||||
}
|
||||
|
||||
// concatNumLen estimates the string length for a signed or unsigned integer.
|
||||
// It returns a conservative estimate (20 digits) for int64 or uint64 values, including
|
||||
// a sign for negative numbers, used by concatEstimateSize for memory allocation.
|
||||
// Example:
|
||||
//
|
||||
// size := concatNumLen(int64(-123)) // Returns 20
|
||||
// size := concatNumLen(uint64(123)) // Returns 20
|
||||
func concatNumLen[T int64 | uint64](v T) int {
|
||||
if v < 0 {
|
||||
return 20 // Max digits for int64 + sign
|
||||
}
|
||||
return 20 // Max digits for uint64
|
||||
}
|
||||
|
||||
// concatWriteValue writes a formatted value to a strings.Builder with recursion depth tracking.
|
||||
// It handles various types (strings, numbers, structs, slices, etc.) and prevents infinite
|
||||
// recursion by limiting depth. Used internally by concatWith and concatWriteGroup for log
|
||||
// message formatting.
|
||||
// Example:
|
||||
//
|
||||
// var b strings.Builder
|
||||
// concatWriteValue(&b, "hello", 0) // Writes "hello" to b
|
||||
// concatWriteValue(&b, []int{1, 2}, 0) // Writes "[1,2]" to b
|
||||
func concatWriteValue(b *strings.Builder, arg any, depth int) {
|
||||
if depth > maxRecursionDepth {
|
||||
b.WriteString("...")
|
||||
return
|
||||
}
|
||||
|
||||
if arg == nil {
|
||||
b.WriteString(nilString)
|
||||
return
|
||||
}
|
||||
|
||||
if s, ok := arg.(fmt.Stringer); ok {
|
||||
b.WriteString(s.String())
|
||||
return
|
||||
}
|
||||
|
||||
switch v := arg.(type) {
|
||||
case string:
|
||||
b.WriteString(v)
|
||||
case []byte:
|
||||
b.Write(v)
|
||||
case int:
|
||||
b.WriteString(strconv.FormatInt(int64(v), 10))
|
||||
case int64:
|
||||
b.WriteString(strconv.FormatInt(v, 10))
|
||||
case int32:
|
||||
b.WriteString(strconv.FormatInt(int64(v), 10))
|
||||
case int16:
|
||||
b.WriteString(strconv.FormatInt(int64(v), 10))
|
||||
case int8:
|
||||
b.WriteString(strconv.FormatInt(int64(v), 10))
|
||||
case uint:
|
||||
b.WriteString(strconv.FormatUint(uint64(v), 10))
|
||||
case uint64:
|
||||
b.WriteString(strconv.FormatUint(v, 10))
|
||||
case uint32:
|
||||
b.WriteString(strconv.FormatUint(uint64(v), 10))
|
||||
case uint16:
|
||||
b.WriteString(strconv.FormatUint(uint64(v), 10))
|
||||
case uint8:
|
||||
b.WriteString(strconv.FormatUint(uint64(v), 10))
|
||||
case float64:
|
||||
b.WriteString(strconv.FormatFloat(v, 'f', -1, 64))
|
||||
case float32:
|
||||
b.WriteString(strconv.FormatFloat(float64(v), 'f', -1, 32))
|
||||
case bool:
|
||||
if v {
|
||||
b.WriteString("true")
|
||||
} else {
|
||||
b.WriteString("false")
|
||||
}
|
||||
default:
|
||||
val := reflect.ValueOf(arg)
|
||||
if val.Kind() == reflect.Ptr {
|
||||
if val.IsNil() {
|
||||
b.WriteString(nilString)
|
||||
return
|
||||
}
|
||||
val = val.Elem()
|
||||
}
|
||||
|
||||
switch val.Kind() {
|
||||
case reflect.Slice, reflect.Array:
|
||||
concatFormatSlice(b, val, depth)
|
||||
case reflect.Struct:
|
||||
concatFormatStruct(b, val, depth)
|
||||
default:
|
||||
fmt.Fprint(b, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// concatFormatSlice formats a slice or array for logging.
|
||||
// It writes the elements in a bracketed, comma-separated format, handling nested types
|
||||
// recursively with depth tracking. Used internally by concatWriteValue for log message formatting.
|
||||
// Example:
|
||||
//
|
||||
// var b strings.Builder
|
||||
// val := reflect.ValueOf([]int{1, 2})
|
||||
// concatFormatSlice(&b, val, 0) // Writes "[1,2]" to b
|
||||
func concatFormatSlice(b *strings.Builder, val reflect.Value, depth int) {
|
||||
b.WriteByte('[')
|
||||
for i := 0; i < val.Len(); i++ {
|
||||
if i > 0 {
|
||||
b.WriteByte(',')
|
||||
}
|
||||
concatWriteValue(b, val.Index(i).Interface(), depth+1)
|
||||
}
|
||||
b.WriteByte(']')
|
||||
}
|
||||
|
||||
// concatFormatStruct formats a struct for logging.
|
||||
// It writes the struct’s exported fields in a bracketed, name:value format, handling nested
|
||||
// types recursively with depth tracking. Unexported fields are represented as "<?>".
|
||||
// Used internally by concatWriteValue for log message formatting.
|
||||
// Example:
|
||||
//
|
||||
// var b strings.Builder
|
||||
// val := reflect.ValueOf(struct{ Name string }{Name: "test"})
|
||||
// concatFormatStruct(&b, val, 0) // Writes "[Name:test]" to b
|
||||
func concatFormatStruct(b *strings.Builder, val reflect.Value, depth int) {
|
||||
typ := val.Type()
|
||||
b.WriteByte('[')
|
||||
|
||||
first := true
|
||||
for i := 0; i < val.NumField(); i++ {
|
||||
field := typ.Field(i)
|
||||
fieldValue := val.Field(i)
|
||||
|
||||
if !first {
|
||||
b.WriteString("; ")
|
||||
}
|
||||
first = false
|
||||
|
||||
b.WriteString(field.Name)
|
||||
b.WriteByte(':')
|
||||
|
||||
if !fieldValue.CanInterface() {
|
||||
b.WriteString(unexportedString)
|
||||
continue
|
||||
}
|
||||
|
||||
concatWriteValue(b, fieldValue.Interface(), depth+1)
|
||||
}
|
||||
|
||||
b.WriteByte(']')
|
||||
}
|
||||
11
vendor/github.com/olekukonko/ll/field.go
generated
vendored
11
vendor/github.com/olekukonko/ll/field.go
generated
vendored
@@ -2,6 +2,7 @@ package ll
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/olekukonko/cat"
|
||||
"github.com/olekukonko/ll/lx"
|
||||
"os"
|
||||
"strings"
|
||||
@@ -50,7 +51,7 @@ func (fb *FieldBuilder) Info(args ...any) {
|
||||
return
|
||||
}
|
||||
// Log at Info level with the builder’s fields, no stack trace
|
||||
fb.logger.log(lx.LevelInfo, lx.ClassText, concatSpaced(args...), fb.fields, false)
|
||||
fb.logger.log(lx.LevelInfo, lx.ClassText, cat.Space(args...), fb.fields, false)
|
||||
}
|
||||
|
||||
// Infof logs a message at Info level with the builder’s fields.
|
||||
@@ -85,7 +86,7 @@ func (fb *FieldBuilder) Debug(args ...any) {
|
||||
return
|
||||
}
|
||||
// Log at Debug level with the builder’s fields, no stack trace
|
||||
fb.logger.log(lx.LevelDebug, lx.ClassText, concatSpaced(args...), fb.fields, false)
|
||||
fb.logger.log(lx.LevelDebug, lx.ClassText, cat.Space(args...), fb.fields, false)
|
||||
}
|
||||
|
||||
// Debugf logs a message at Debug level with the builder’s fields.
|
||||
@@ -120,7 +121,7 @@ func (fb *FieldBuilder) Warn(args ...any) {
|
||||
return
|
||||
}
|
||||
// Log at Warn level with the builder’s fields, no stack trace
|
||||
fb.logger.log(lx.LevelWarn, lx.ClassText, concatSpaced(args...), fb.fields, false)
|
||||
fb.logger.log(lx.LevelWarn, lx.ClassText, cat.Space(args...), fb.fields, false)
|
||||
}
|
||||
|
||||
// Warnf logs a message at Warn level with the builder’s fields.
|
||||
@@ -154,7 +155,7 @@ func (fb *FieldBuilder) Error(args ...any) {
|
||||
return
|
||||
}
|
||||
// Log at Error level with the builder’s fields, no stack trace
|
||||
fb.logger.log(lx.LevelError, lx.ClassText, concatSpaced(args...), fb.fields, false)
|
||||
fb.logger.log(lx.LevelError, lx.ClassText, cat.Space(args...), fb.fields, false)
|
||||
}
|
||||
|
||||
// Errorf logs a message at Error level with the builder’s fields.
|
||||
@@ -188,7 +189,7 @@ func (fb *FieldBuilder) Stack(args ...any) {
|
||||
return
|
||||
}
|
||||
// Log at Error level with the builder’s fields and a stack trace
|
||||
fb.logger.log(lx.LevelError, lx.ClassText, concatSpaced(args...), fb.fields, true)
|
||||
fb.logger.log(lx.LevelError, lx.ClassText, cat.Space(args...), fb.fields, true)
|
||||
}
|
||||
|
||||
// Stackf logs a message at Error level with a stack trace and the builder’s fields.
|
||||
|
||||
23
vendor/github.com/olekukonko/ll/global.go
generated
vendored
23
vendor/github.com/olekukonko/ll/global.go
generated
vendored
@@ -1,11 +1,12 @@
|
||||
package ll
|
||||
|
||||
import (
|
||||
"github.com/olekukonko/ll/lh"
|
||||
"github.com/olekukonko/ll/lx"
|
||||
"os"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/olekukonko/ll/lh"
|
||||
"github.com/olekukonko/ll/lx"
|
||||
)
|
||||
|
||||
// defaultLogger is the global logger instance for package-level logging functions.
|
||||
@@ -468,13 +469,7 @@ func Len() int64 {
|
||||
// duration := ll.Measure(func() { time.Sleep(time.Millisecond) })
|
||||
// // Output: [] INFO: function executed [duration=~1ms]
|
||||
func Measure(fns ...func()) time.Duration {
|
||||
start := time.Now()
|
||||
for _, fn := range fns {
|
||||
fn()
|
||||
}
|
||||
duration := time.Since(start)
|
||||
defaultLogger.Fields("duration", duration).Infof("function executed")
|
||||
return duration
|
||||
return defaultLogger.Measure(fns...)
|
||||
}
|
||||
|
||||
// Benchmark logs the duration since a start time at Info level using the default logger.
|
||||
@@ -486,7 +481,7 @@ func Measure(fns ...func()) time.Duration {
|
||||
// time.Sleep(time.Millisecond)
|
||||
// ll.Benchmark(start) // Output: [] INFO: benchmark [start=... end=... duration=...]
|
||||
func Benchmark(start time.Time) {
|
||||
defaultLogger.Fields("start", start, "end", time.Now(), "duration", time.Now().Sub(start)).Infof("benchmark")
|
||||
defaultLogger.Benchmark(start)
|
||||
}
|
||||
|
||||
// Clone returns a new logger with the same configuration as the default logger.
|
||||
@@ -657,3 +652,11 @@ func Mark(names ...string) {
|
||||
defaultLogger.mark(2, names...)
|
||||
|
||||
}
|
||||
|
||||
// Output logs data in a human-readable JSON format at Info level, including caller file and line information.
|
||||
// It is similar to Dbg but formats the output as JSON for better readability. It is thread-safe and respects
|
||||
// the logger’s configuration (e.g., enabled, level, suspend, handler, middleware).
|
||||
func Output(values ...interface{}) {
|
||||
o := NewInspector(defaultLogger)
|
||||
o.Log(2, values...)
|
||||
}
|
||||
|
||||
239
vendor/github.com/olekukonko/ll/inspector.go
generated
vendored
Normal file
239
vendor/github.com/olekukonko/ll/inspector.go
generated
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
package ll
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"strings"
|
||||
"unsafe"
|
||||
|
||||
"github.com/olekukonko/ll/lx"
|
||||
)
|
||||
|
||||
// Inspector is a utility for Logger that provides advanced inspection and logging of data
|
||||
// in human-readable JSON format. It uses reflection to access and represent unexported fields,
|
||||
// nested structs, embedded structs, and pointers, making it useful for debugging complex data structures.
|
||||
type Inspector struct {
|
||||
logger *Logger
|
||||
}
|
||||
|
||||
// NewInspector returns a new Inspector instance associated with the provided logger.
|
||||
func NewInspector(logger *Logger) *Inspector {
|
||||
return &Inspector{logger: logger}
|
||||
}
|
||||
|
||||
// Log outputs the given values as indented JSON at the Info level, prefixed with the caller's
|
||||
// file name and line number. It handles structs (including unexported fields, nested, and embedded),
|
||||
// pointers, errors, and other types. The skip parameter determines how many stack frames to skip
|
||||
// when identifying the caller; typically set to 2 to account for the call to Log and its wrapper.
|
||||
//
|
||||
// Example usage within a Logger method:
|
||||
//
|
||||
// o := NewInspector(l)
|
||||
// o.Log(2, someStruct) // Logs JSON representation with caller info
|
||||
func (o *Inspector) Log(skip int, values ...interface{}) {
|
||||
// Skip if logger is suspended or Info level is disabled
|
||||
if o.logger.suspend.Load() || !o.logger.shouldLog(lx.LevelInfo) {
|
||||
return
|
||||
}
|
||||
|
||||
// Retrieve caller information for logging context
|
||||
_, file, line, ok := runtime.Caller(skip)
|
||||
if !ok {
|
||||
o.logger.log(lx.LevelError, lx.ClassText, "Inspector: Unable to parse runtime caller", nil, false)
|
||||
return
|
||||
}
|
||||
|
||||
// Extract short filename for concise output
|
||||
shortFile := file
|
||||
if idx := strings.LastIndex(file, "/"); idx >= 0 {
|
||||
shortFile = file[idx+1:]
|
||||
}
|
||||
|
||||
// Process each value individually
|
||||
for _, value := range values {
|
||||
var jsonData []byte
|
||||
var err error
|
||||
|
||||
// Use reflection for struct types to handle unexported and nested fields
|
||||
val := reflect.ValueOf(value)
|
||||
if val.Kind() == reflect.Ptr {
|
||||
val = val.Elem()
|
||||
}
|
||||
if val.Kind() == reflect.Struct {
|
||||
valueMap := o.structToMap(val)
|
||||
jsonData, err = json.MarshalIndent(valueMap, "", " ")
|
||||
} else if errVal, ok := value.(error); ok {
|
||||
// Special handling for errors to represent them as a simple map
|
||||
value = map[string]string{"error": errVal.Error()}
|
||||
jsonData, err = json.MarshalIndent(value, "", " ")
|
||||
} else {
|
||||
// Fall back to standard JSON marshaling for non-struct types
|
||||
jsonData, err = json.MarshalIndent(value, "", " ")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
o.logger.log(lx.LevelError, lx.ClassText, fmt.Sprintf("Inspector: JSON encoding error: %v", err), nil, false)
|
||||
continue
|
||||
}
|
||||
|
||||
// Construct log message with file, line, and JSON data
|
||||
msg := fmt.Sprintf("[%s:%d] DUMP: %s", shortFile, line, string(jsonData))
|
||||
o.logger.log(lx.LevelInfo, lx.ClassText, msg, nil, false)
|
||||
}
|
||||
}
|
||||
|
||||
// structToMap recursively converts a struct's reflect.Value to a map[string]interface{}.
|
||||
// It includes unexported fields (named with parentheses), prefixes pointers with '*',
|
||||
// flattens anonymous embedded structs without json tags, and uses unsafe pointers to access
|
||||
// unexported primitive fields when reflect.CanInterface() returns false.
|
||||
func (o *Inspector) structToMap(val reflect.Value) map[string]interface{} {
|
||||
result := make(map[string]interface{})
|
||||
if !val.IsValid() {
|
||||
return result
|
||||
}
|
||||
|
||||
typ := val.Type()
|
||||
for i := 0; i < val.NumField(); i++ {
|
||||
field := val.Field(i)
|
||||
fieldType := typ.Field(i)
|
||||
|
||||
// Determine field name: prefer json tag if present and not "-", else use struct field name
|
||||
baseName := fieldType.Name
|
||||
jsonTag := fieldType.Tag.Get("json")
|
||||
hasJsonTag := false
|
||||
if jsonTag != "" {
|
||||
if idx := strings.Index(jsonTag, ","); idx != -1 {
|
||||
jsonTag = jsonTag[:idx]
|
||||
}
|
||||
if jsonTag != "-" {
|
||||
baseName = jsonTag
|
||||
hasJsonTag = true
|
||||
}
|
||||
}
|
||||
|
||||
// Enclose unexported field names in parentheses
|
||||
fieldName := baseName
|
||||
if !fieldType.IsExported() {
|
||||
fieldName = "(" + baseName + ")"
|
||||
}
|
||||
|
||||
// Handle pointer fields
|
||||
isPtr := fieldType.Type.Kind() == reflect.Ptr
|
||||
if isPtr {
|
||||
fieldName = "*" + fieldName
|
||||
if field.IsNil() {
|
||||
result[fieldName] = nil
|
||||
continue
|
||||
}
|
||||
field = field.Elem()
|
||||
}
|
||||
|
||||
// Recurse for struct fields
|
||||
if field.Kind() == reflect.Struct {
|
||||
subMap := o.structToMap(field)
|
||||
isNested := !fieldType.Anonymous || hasJsonTag
|
||||
if isNested {
|
||||
result[fieldName] = subMap
|
||||
} else {
|
||||
// Flatten embedded struct fields into the parent map, avoiding overwrites
|
||||
for k, v := range subMap {
|
||||
if _, exists := result[k]; !exists {
|
||||
result[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Handle primitive fields
|
||||
if field.CanInterface() {
|
||||
result[fieldName] = field.Interface()
|
||||
} else {
|
||||
// Use unsafe access for unexported primitives
|
||||
ptr := getDataPtr(field)
|
||||
switch field.Kind() {
|
||||
case reflect.String:
|
||||
result[fieldName] = *(*string)(ptr)
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
result[fieldName] = o.getIntFromUnexportedField(field)
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
result[fieldName] = o.getUintFromUnexportedField(field)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
result[fieldName] = o.getFloatFromUnexportedField(field)
|
||||
case reflect.Bool:
|
||||
result[fieldName] = *(*bool)(ptr)
|
||||
default:
|
||||
result[fieldName] = fmt.Sprintf("*unexported %s*", field.Type().String())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// emptyInterface represents the internal structure of an empty interface{}.
|
||||
// This is used for unsafe pointer manipulation to access unexported field data.
|
||||
type emptyInterface struct {
|
||||
typ unsafe.Pointer
|
||||
word unsafe.Pointer
|
||||
}
|
||||
|
||||
// getDataPtr returns an unsafe.Pointer to the underlying data of a reflect.Value.
|
||||
// This enables direct access to unexported fields via unsafe operations.
|
||||
func getDataPtr(v reflect.Value) unsafe.Pointer {
|
||||
return (*emptyInterface)(unsafe.Pointer(&v)).word
|
||||
}
|
||||
|
||||
// getIntFromUnexportedField extracts a signed integer value from an unexported field
|
||||
// using unsafe pointer access. It supports int, int8, int16, int32, and int64 kinds,
|
||||
// returning the value as int64. Returns 0 for unsupported kinds.
|
||||
func (o *Inspector) getIntFromUnexportedField(field reflect.Value) int64 {
|
||||
ptr := getDataPtr(field)
|
||||
switch field.Kind() {
|
||||
case reflect.Int:
|
||||
return int64(*(*int)(ptr))
|
||||
case reflect.Int8:
|
||||
return int64(*(*int8)(ptr))
|
||||
case reflect.Int16:
|
||||
return int64(*(*int16)(ptr))
|
||||
case reflect.Int32:
|
||||
return int64(*(*int32)(ptr))
|
||||
case reflect.Int64:
|
||||
return *(*int64)(ptr)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// getUintFromUnexportedField extracts an unsigned integer value from an unexported field
|
||||
// using unsafe pointer access. It supports uint, uint8, uint16, uint32, and uint64 kinds,
|
||||
// returning the value as uint64. Returns 0 for unsupported kinds.
|
||||
func (o *Inspector) getUintFromUnexportedField(field reflect.Value) uint64 {
|
||||
ptr := getDataPtr(field)
|
||||
switch field.Kind() {
|
||||
case reflect.Uint:
|
||||
return uint64(*(*uint)(ptr))
|
||||
case reflect.Uint8:
|
||||
return uint64(*(*uint8)(ptr))
|
||||
case reflect.Uint16:
|
||||
return uint64(*(*uint16)(ptr))
|
||||
case reflect.Uint32:
|
||||
return uint64(*(*uint32)(ptr))
|
||||
case reflect.Uint64:
|
||||
return *(*uint64)(ptr)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// getFloatFromUnexportedField extracts a floating-point value from an unexported field
|
||||
// using unsafe pointer access. It supports float32 and float64 kinds, returning the value
|
||||
// as float64. Returns 0 for unsupported kinds.
|
||||
func (o *Inspector) getFloatFromUnexportedField(field reflect.Value) float64 {
|
||||
ptr := getDataPtr(field)
|
||||
switch field.Kind() {
|
||||
case reflect.Float32:
|
||||
return float64(*(*float32)(ptr))
|
||||
case reflect.Float64:
|
||||
return *(*float64)(ptr)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
9
vendor/github.com/olekukonko/ll/lh/colorized.go
generated
vendored
9
vendor/github.com/olekukonko/ll/lh/colorized.go
generated
vendored
@@ -2,12 +2,14 @@ package lh
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/olekukonko/ll/lx"
|
||||
"io"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/olekukonko/ll/lx"
|
||||
)
|
||||
|
||||
// Palette defines ANSI color codes for various log components.
|
||||
@@ -81,6 +83,7 @@ type ColorizedHandler struct {
|
||||
palette Palette // Color scheme for formatting
|
||||
showTime bool // Whether to display timestamps
|
||||
timeFormat string // Format for timestamps (defaults to time.RFC3339)
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
// ColorOption defines a configuration function for ColorizedHandler.
|
||||
@@ -130,6 +133,10 @@ func NewColorizedHandler(w io.Writer, opts ...ColorOption) *ColorizedHandler {
|
||||
//
|
||||
// handler.Handle(&lx.Entry{Message: "test", Level: lx.LevelInfo}) // Writes colored output
|
||||
func (h *ColorizedHandler) Handle(e *lx.Entry) error {
|
||||
|
||||
h.mu.Lock()
|
||||
defer h.mu.Unlock()
|
||||
|
||||
switch e.Class {
|
||||
case lx.ClassDump:
|
||||
// Handle hex dump entries
|
||||
|
||||
8
vendor/github.com/olekukonko/ll/lh/text.go
generated
vendored
8
vendor/github.com/olekukonko/ll/lh/text.go
generated
vendored
@@ -2,11 +2,13 @@ package lh
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/olekukonko/ll/lx"
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/olekukonko/ll/lx"
|
||||
)
|
||||
|
||||
// TextHandler is a handler that outputs log entries as plain text.
|
||||
@@ -17,6 +19,7 @@ type TextHandler struct {
|
||||
w io.Writer // Destination for formatted log output
|
||||
showTime bool // Whether to display timestamps
|
||||
timeFormat string // Format for timestamps (defaults to time.RFC3339)
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
// NewTextHandler creates a new TextHandler writing to the specified writer.
|
||||
@@ -55,6 +58,9 @@ func (h *TextHandler) Timestamped(enable bool, format ...string) {
|
||||
//
|
||||
// handler.Handle(&lx.Entry{Message: "test", Level: lx.LevelInfo}) // Writes "INFO: test"
|
||||
func (h *TextHandler) Handle(e *lx.Entry) error {
|
||||
h.mu.Lock()
|
||||
defer h.mu.Unlock()
|
||||
|
||||
// Special handling for dump output
|
||||
if e.Class == lx.ClassDump {
|
||||
return h.handleDumpOutput(e)
|
||||
|
||||
113
vendor/github.com/olekukonko/ll/ll.go
generated
vendored
113
vendor/github.com/olekukonko/ll/ll.go
generated
vendored
@@ -5,8 +5,6 @@ import (
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/olekukonko/ll/lh"
|
||||
"github.com/olekukonko/ll/lx"
|
||||
"io"
|
||||
"math"
|
||||
"os"
|
||||
@@ -16,6 +14,10 @@ import (
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/olekukonko/cat"
|
||||
"github.com/olekukonko/ll/lh"
|
||||
"github.com/olekukonko/ll/lx"
|
||||
)
|
||||
|
||||
// Logger manages logging configuration and behavior, encapsulating state such as enablement,
|
||||
@@ -24,7 +26,7 @@ import (
|
||||
type Logger struct {
|
||||
mu sync.RWMutex // Guards concurrent access to fields
|
||||
enabled bool // Determines if logging is enabled
|
||||
suspend bool // uses suspend path for most actions eg. skipping namespace checks
|
||||
suspend atomic.Bool // uses suspend path for most actions eg. skipping namespace checks
|
||||
level lx.LevelType // Minimum log level (e.g., Debug, Info, Warn, Error)
|
||||
namespaces *lx.Namespace // Manages namespace enable/disable states
|
||||
currentPath string // Current namespace path (e.g., "parent/child")
|
||||
@@ -97,7 +99,11 @@ func (l *Logger) AddContext(key string, value interface{}) *Logger {
|
||||
// logger.Benchmark(start) // Output: [app] INFO: benchmark [start=... end=... duration=...]
|
||||
func (l *Logger) Benchmark(start time.Time) time.Duration {
|
||||
duration := time.Since(start)
|
||||
l.Fields("start", start, "end", time.Now(), "duration", duration).Infof("benchmark")
|
||||
l.Fields(
|
||||
"duration_ms", duration.Milliseconds(),
|
||||
"duration", duration.String(),
|
||||
).Infof("benchmark completed")
|
||||
|
||||
return duration
|
||||
}
|
||||
|
||||
@@ -220,7 +226,7 @@ func (l *Logger) Dbg(values ...interface{}) {
|
||||
// logger.Debug("Debugging") // Output: [app] DEBUG: Debugging
|
||||
func (l *Logger) Debug(args ...any) {
|
||||
// check if suspended
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -229,7 +235,7 @@ func (l *Logger) Debug(args ...any) {
|
||||
return
|
||||
}
|
||||
|
||||
l.log(lx.LevelDebug, lx.ClassText, concatSpaced(args...), nil, false)
|
||||
l.log(lx.LevelDebug, lx.ClassText, cat.Space(args...), nil, false)
|
||||
}
|
||||
|
||||
// Debugf logs a formatted message at Debug level, delegating to Debug. It is thread-safe.
|
||||
@@ -239,7 +245,7 @@ func (l *Logger) Debug(args ...any) {
|
||||
// logger.Debugf("Debug %s", "message") // Output: [app] DEBUG: Debug message
|
||||
func (l *Logger) Debugf(format string, args ...any) {
|
||||
// check if suspended
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -344,6 +350,21 @@ func (l *Logger) Dump(values ...interface{}) {
|
||||
}
|
||||
}
|
||||
|
||||
// Output logs data in a human-readable JSON format at Info level, including caller file and line information.
|
||||
// It is similar to Dbg but formats the output as JSON for better readability. It is thread-safe and respects
|
||||
// the logger's configuration (e.g., enabled, level, suspend, handler, middleware).
|
||||
// Example:
|
||||
//
|
||||
// logger := New("app").Enable()
|
||||
// x := map[string]int{"key": 42}
|
||||
// logger.Output(x) // Output: [app] INFO: [file.go:123] JSON: {"key": 42}
|
||||
//
|
||||
// Logger method to provide access to Output functionality
|
||||
func (l *Logger) Output(values ...interface{}) {
|
||||
o := NewInspector(l)
|
||||
o.Log(2, values...)
|
||||
}
|
||||
|
||||
// Enable activates logging, allowing logs to be emitted if other conditions (e.g., level,
|
||||
// namespace) are met. It is thread-safe using a write lock and returns the logger for chaining.
|
||||
// Example:
|
||||
@@ -432,7 +453,7 @@ func (l *Logger) Err(errs ...error) {
|
||||
// logger.Error("Error occurred") // Output: [app] ERROR: Error occurred
|
||||
func (l *Logger) Error(args ...any) {
|
||||
// check if suspended
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -440,7 +461,7 @@ func (l *Logger) Error(args ...any) {
|
||||
if !l.shouldLog(lx.LevelError) {
|
||||
return
|
||||
}
|
||||
l.log(lx.LevelError, lx.ClassText, concatSpaced(args...), nil, false)
|
||||
l.log(lx.LevelError, lx.ClassText, cat.Space(args...), nil, false)
|
||||
}
|
||||
|
||||
// Errorf logs a formatted message at Error level, delegating to Error. It is thread-safe.
|
||||
@@ -450,7 +471,7 @@ func (l *Logger) Error(args ...any) {
|
||||
// logger.Errorf("Error %s", "occurred") // Output: [app] ERROR: Error occurred
|
||||
func (l *Logger) Errorf(format string, args ...any) {
|
||||
// check if suspended
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -465,7 +486,7 @@ func (l *Logger) Errorf(format string, args ...any) {
|
||||
// logger.Fatal("Fatal error") // Output: [app] ERROR: Fatal error [stack=...], then exits
|
||||
func (l *Logger) Fatal(args ...any) {
|
||||
// check if suspended
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -474,7 +495,7 @@ func (l *Logger) Fatal(args ...any) {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
l.log(lx.LevelError, lx.ClassText, concatSpaced(args...), nil, true)
|
||||
l.log(lx.LevelError, lx.ClassText, cat.Space(args...), nil, false)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@@ -486,7 +507,7 @@ func (l *Logger) Fatal(args ...any) {
|
||||
// logger.Fatalf("Fatal %s", "error") // Output: [app] ERROR: Fatal error [stack=...], then exits
|
||||
func (l *Logger) Fatalf(format string, args ...any) {
|
||||
// check if suspended
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -503,7 +524,7 @@ func (l *Logger) Field(fields map[string]interface{}) *FieldBuilder {
|
||||
fb := &FieldBuilder{logger: l, fields: make(map[string]interface{})}
|
||||
|
||||
// check if suspended
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return fb
|
||||
}
|
||||
|
||||
@@ -524,7 +545,7 @@ func (l *Logger) Field(fields map[string]interface{}) *FieldBuilder {
|
||||
func (l *Logger) Fields(pairs ...any) *FieldBuilder {
|
||||
fb := &FieldBuilder{logger: l, fields: make(map[string]interface{})}
|
||||
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return fb
|
||||
}
|
||||
|
||||
@@ -650,7 +671,7 @@ func (l *Logger) Indent(depth int) *Logger {
|
||||
// logger := New("app").Enable().Style(lx.NestedPath)
|
||||
// logger.Info("Started") // Output: [app]: INFO: Started
|
||||
func (l *Logger) Info(args ...any) {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -658,7 +679,7 @@ func (l *Logger) Info(args ...any) {
|
||||
return
|
||||
}
|
||||
|
||||
l.log(lx.LevelInfo, lx.ClassText, concatSpaced(args...), nil, false)
|
||||
l.log(lx.LevelInfo, lx.ClassText, cat.Space(args...), nil, false)
|
||||
}
|
||||
|
||||
// Infof logs a formatted message at Info level, delegating to Info. It is thread-safe.
|
||||
@@ -667,7 +688,7 @@ func (l *Logger) Info(args ...any) {
|
||||
// logger := New("app").Enable().Style(lx.NestedPath)
|
||||
// logger.Infof("Started %s", "now") // Output: [app]: INFO: Started now
|
||||
func (l *Logger) Infof(format string, args ...any) {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -771,12 +792,20 @@ func (l *Logger) mark(skip int, names ...string) {
|
||||
// // Output: [app] INFO: function executed [duration=~1ms]
|
||||
func (l *Logger) Measure(fns ...func()) time.Duration {
|
||||
start := time.Now()
|
||||
// Execute all provided functions
|
||||
|
||||
for _, fn := range fns {
|
||||
fn()
|
||||
if fn != nil {
|
||||
fn()
|
||||
}
|
||||
}
|
||||
|
||||
duration := time.Since(start)
|
||||
l.Fields("duration", duration).Infof("function executed")
|
||||
l.Fields(
|
||||
"duration_ns", duration.Nanoseconds(),
|
||||
"duration", duration.String(),
|
||||
"duration_ms", fmt.Sprintf("%.3fms", float64(duration.Nanoseconds())/1e6),
|
||||
).Infof("execution completed")
|
||||
|
||||
return duration
|
||||
}
|
||||
|
||||
@@ -789,7 +818,7 @@ func (l *Logger) Measure(fns ...func()) time.Duration {
|
||||
// child := parent.Namespace("child")
|
||||
// child.Info("Child log") // Output: [parent/child] INFO: Child log
|
||||
func (l *Logger) Namespace(name string) *Logger {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return l
|
||||
}
|
||||
|
||||
@@ -897,9 +926,9 @@ func (l *Logger) NamespaceEnabled(relativePath string) bool {
|
||||
// logger.Panic("Panic error") // Output: [app] ERROR: Panic error [stack=...], then panics
|
||||
func (l *Logger) Panic(args ...any) {
|
||||
// Build message by concatenating arguments with spaces
|
||||
msg := concatSpaced(args...)
|
||||
msg := cat.Space(args...)
|
||||
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
panic(msg)
|
||||
}
|
||||
|
||||
@@ -942,7 +971,7 @@ func (l *Logger) Prefix(prefix string) *Logger {
|
||||
// logger := New("app").Enable()
|
||||
// logger.Print("message", "value") // Output: [app] INFO: message value
|
||||
func (l *Logger) Print(args ...any) {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -950,7 +979,7 @@ func (l *Logger) Print(args ...any) {
|
||||
if !l.shouldLog(lx.LevelInfo) {
|
||||
return
|
||||
}
|
||||
l.log(lx.LevelNone, lx.ClassRaw, concatSpaced(args...), nil, false)
|
||||
l.log(lx.LevelNone, lx.ClassRaw, cat.Space(args...), nil, false)
|
||||
}
|
||||
|
||||
// Println logs a message at Info level without format specifiers, minimizing allocations
|
||||
@@ -960,7 +989,7 @@ func (l *Logger) Print(args ...any) {
|
||||
// logger := New("app").Enable()
|
||||
// logger.Println("message", "value") // Output: [app] INFO: message value
|
||||
func (l *Logger) Println(args ...any) {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -968,7 +997,7 @@ func (l *Logger) Println(args ...any) {
|
||||
if !l.shouldLog(lx.LevelInfo) {
|
||||
return
|
||||
}
|
||||
l.log(lx.LevelNone, lx.ClassRaw, concatenate(lx.Space, nil, []any{lx.Newline}, args...), nil, false)
|
||||
l.log(lx.LevelNone, lx.ClassRaw, cat.SuffixWith(lx.Space, lx.Newline, args...), nil, false)
|
||||
}
|
||||
|
||||
// Printf logs a formatted message at Info level, delegating to Print. It is thread-safe.
|
||||
@@ -977,7 +1006,7 @@ func (l *Logger) Println(args ...any) {
|
||||
// logger := New("app").Enable()
|
||||
// logger.Printf("Message %s", "value") // Output: [app] INFO: Message value
|
||||
func (l *Logger) Printf(format string, args ...any) {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1004,9 +1033,7 @@ func (l *Logger) Remove(m *Middleware) {
|
||||
// logger.Resume()
|
||||
// logger.Info("Resumed") // Output: [app] INFO: Resumed
|
||||
func (l *Logger) Resume() *Logger {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
l.suspend = false // Clear suspend flag to resume logging
|
||||
l.suspend.Store(false)
|
||||
return l
|
||||
}
|
||||
|
||||
@@ -1032,9 +1059,7 @@ func (l *Logger) Separator(separator string) *Logger {
|
||||
// logger.Suspend()
|
||||
// logger.Info("Ignored") // No output
|
||||
func (l *Logger) Suspend() *Logger {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
l.suspend = true // Set suspend flag to pause logging
|
||||
l.suspend.Store(true)
|
||||
return l
|
||||
}
|
||||
|
||||
@@ -1047,9 +1072,7 @@ func (l *Logger) Suspend() *Logger {
|
||||
// fmt.Println("Logging is suspended") // Prints message
|
||||
// }
|
||||
func (l *Logger) Suspended() bool {
|
||||
l.mu.Lock()
|
||||
defer l.mu.Unlock()
|
||||
return l.suspend // Return current suspend state
|
||||
return l.suspend.Load()
|
||||
}
|
||||
|
||||
// Stack logs messages at Error level with a stack trace for each provided argument.
|
||||
@@ -1059,7 +1082,7 @@ func (l *Logger) Suspended() bool {
|
||||
// logger := New("app").Enable()
|
||||
// logger.Stack("Critical error") // Output: [app] ERROR: Critical error [stack=...]
|
||||
func (l *Logger) Stack(args ...any) {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1069,7 +1092,7 @@ func (l *Logger) Stack(args ...any) {
|
||||
}
|
||||
|
||||
for _, arg := range args {
|
||||
l.log(lx.LevelError, lx.ClassText, concat(arg), nil, true)
|
||||
l.log(lx.LevelError, lx.ClassText, cat.Concat(arg), nil, true)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1080,7 +1103,7 @@ func (l *Logger) Stack(args ...any) {
|
||||
// logger := New("app").Enable()
|
||||
// logger.Stackf("Critical %s", "error") // Output: [app] ERROR: Critical error [stack=...]
|
||||
func (l *Logger) Stackf(format string, args ...any) {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1171,7 +1194,7 @@ func (l *Logger) Use(fn lx.Handler) *Middleware {
|
||||
// logger := New("app").Enable()
|
||||
// logger.Warn("Warning") // Output: [app] WARN: Warning
|
||||
func (l *Logger) Warn(args ...any) {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1180,7 +1203,7 @@ func (l *Logger) Warn(args ...any) {
|
||||
return
|
||||
}
|
||||
|
||||
l.log(lx.LevelWarn, lx.ClassText, concatSpaced(args...), nil, false)
|
||||
l.log(lx.LevelWarn, lx.ClassText, cat.Space(args...), nil, false)
|
||||
}
|
||||
|
||||
// Warnf logs a formatted message at Warn level, delegating to Warn. It is thread-safe.
|
||||
@@ -1189,7 +1212,7 @@ func (l *Logger) Warn(args ...any) {
|
||||
// logger := New("app").Enable()
|
||||
// logger.Warnf("Warning %s", "issued") // Output: [app] WARN: Warning issued
|
||||
func (l *Logger) Warnf(format string, args ...any) {
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -1363,7 +1386,7 @@ func (l *Logger) shouldLog(level lx.LevelType) bool {
|
||||
}
|
||||
|
||||
// check for suspend mode
|
||||
if l.suspend {
|
||||
if l.suspend.Load() {
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
93
vendor/github.com/olekukonko/ll/lx/lx.go
generated
vendored
93
vendor/github.com/olekukonko/ll/lx/lx.go
generated
vendored
@@ -1,6 +1,7 @@
|
||||
package lx
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
@@ -31,11 +32,28 @@ const (
|
||||
// These constants define the severity levels for log messages, used to filter logs based
|
||||
// on the logger’s minimum level. They are ordered to allow comparison (e.g., LevelDebug < LevelWarn).
|
||||
const (
|
||||
LevelNone LevelType = iota // Debug level for detailed diagnostic information
|
||||
LevelInfo // Info level for general operational messages
|
||||
LevelWarn // Warn level for warning conditions
|
||||
LevelError // Error level for error conditions requiring attention
|
||||
LevelDebug // None level for logs without a specific severity (e.g., raw output)
|
||||
LevelNone LevelType = iota // Debug level for detailed diagnostic information
|
||||
LevelInfo // Info level for general operational messages
|
||||
LevelWarn // Warn level for warning conditions
|
||||
LevelError // Error level for error conditions requiring attention
|
||||
LevelDebug // None level for logs without a specific severity (e.g., raw output)
|
||||
LevelUnknown // None level for logs without a specific severity (e.g., raw output)
|
||||
)
|
||||
|
||||
// String constants for each level
|
||||
const (
|
||||
DebugString = "DEBUG"
|
||||
InfoString = "INFO"
|
||||
WarnString = "WARN"
|
||||
ErrorString = "ERROR"
|
||||
NoneString = "NONE"
|
||||
UnknownString = "UNKNOWN"
|
||||
|
||||
TextString = "TEXT"
|
||||
JSONString = "JSON"
|
||||
DumpString = "DUMP"
|
||||
SpecialString = "SPECIAL"
|
||||
RawString = "RAW"
|
||||
)
|
||||
|
||||
// Log class constants, defining the type of log entry.
|
||||
@@ -47,6 +65,7 @@ const (
|
||||
ClassDump // Dump entries for hex/ASCII dumps
|
||||
ClassSpecial // Special entries for custom or non-standard logs
|
||||
ClassRaw // Raw entries for unformatted output
|
||||
ClassUnknown // Raw entries for unformatted output
|
||||
)
|
||||
|
||||
// Namespace style constants.
|
||||
@@ -72,17 +91,37 @@ type LevelType int
|
||||
func (l LevelType) String() string {
|
||||
switch l {
|
||||
case LevelDebug:
|
||||
return "DEBUG"
|
||||
return DebugString
|
||||
case LevelInfo:
|
||||
return "INFO"
|
||||
return InfoString
|
||||
case LevelWarn:
|
||||
return "WARN"
|
||||
return WarnString
|
||||
case LevelError:
|
||||
return "ERROR"
|
||||
return ErrorString
|
||||
case LevelNone:
|
||||
return "NONE"
|
||||
return NoneString
|
||||
default:
|
||||
return "UNKNOWN"
|
||||
return UnknownString
|
||||
}
|
||||
}
|
||||
|
||||
// LevelParse converts a string to its corresponding LevelType.
|
||||
// It parses a string (case-insensitive) and returns the corresponding LevelType, defaulting to
|
||||
// LevelUnknown for unrecognized strings. Supports "WARNING" as an alias for "WARN".
|
||||
func LevelParse(s string) LevelType {
|
||||
switch strings.ToUpper(s) {
|
||||
case DebugString:
|
||||
return LevelDebug
|
||||
case InfoString:
|
||||
return LevelInfo
|
||||
case WarnString, "WARNING": // Allow both "WARN" and "WARNING"
|
||||
return LevelWarn
|
||||
case ErrorString:
|
||||
return LevelError
|
||||
case NoneString:
|
||||
return LevelNone
|
||||
default:
|
||||
return LevelUnknown
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,16 +188,36 @@ type ClassType int
|
||||
func (t ClassType) String() string {
|
||||
switch t {
|
||||
case ClassText:
|
||||
return "TEST" // Note: Likely a typo, should be "TEXT"
|
||||
return TextString
|
||||
case ClassJSON:
|
||||
return "JSON"
|
||||
return JSONString
|
||||
case ClassDump:
|
||||
return "DUMP"
|
||||
return DumpString
|
||||
case ClassSpecial:
|
||||
return "SPECIAL"
|
||||
return SpecialString
|
||||
case ClassRaw:
|
||||
return "RAW"
|
||||
return RawString
|
||||
default:
|
||||
return "UNKNOWN"
|
||||
return UnknownString
|
||||
}
|
||||
}
|
||||
|
||||
// ParseClass converts a string to its corresponding ClassType.
|
||||
// It parses a string (case-insensitive) and returns the corresponding ClassType, defaulting to
|
||||
// ClassUnknown for unrecognized strings.
|
||||
func ParseClass(s string) ClassType {
|
||||
switch strings.ToUpper(s) {
|
||||
case TextString:
|
||||
return ClassText
|
||||
case JSONString:
|
||||
return ClassJSON
|
||||
case DumpString:
|
||||
return ClassDump
|
||||
case SpecialString:
|
||||
return ClassSpecial
|
||||
case RawString:
|
||||
return ClassRaw
|
||||
default:
|
||||
return ClassUnknown
|
||||
}
|
||||
}
|
||||
|
||||
1
vendor/github.com/olekukonko/tablewriter/.gitignore
generated
vendored
1
vendor/github.com/olekukonko/tablewriter/.gitignore
generated
vendored
@@ -8,3 +8,4 @@
|
||||
dev.sh
|
||||
*csv2table
|
||||
_test/
|
||||
*.test
|
||||
|
||||
96
vendor/github.com/olekukonko/tablewriter/MIGRATION.md
generated
vendored
96
vendor/github.com/olekukonko/tablewriter/MIGRATION.md
generated
vendored
@@ -206,51 +206,51 @@ func main() {
|
||||
|
||||
The `defaultConfig()` function (`config.go:defaultConfig`) establishes baseline settings for new tables, ensuring predictable behavior unless overridden. Below is a detailed table of default parameters, organized by configuration section, to help you understand the starting point for table behavior and appearance.
|
||||
|
||||
| Section | Parameter | Default Value | Description |
|
||||
|---------------|-------------------------------|-----------------------------------|-----------------------------------------------------------------------------|
|
||||
| **Header** | `Alignment.Global` | `tw.AlignCenter` | Centers header text globally unless overridden by `PerColumn`. |
|
||||
| Header | `Alignment.PerColumn` | `[]tw.Align{}` | Empty; falls back to `Global` unless specified. |
|
||||
| Header | `Formatting.AutoFormat` | `tw.On` | Applies title case (e.g., "col_one" → "COL ONE") to header content. |
|
||||
| Header | `Formatting.AutoWrap` | `tw.WrapTruncate` | Truncates long header text with "…" based on width constraints. |
|
||||
| Header | `Formatting.MergeMode` | `tw.MergeNone` | Disables cell merging in headers by default. |
|
||||
| Header | `Padding.Global` | `tw.PaddingDefault` (`" "`) | Adds one space on left and right of header cells. |
|
||||
| Header | `Padding.PerColumn` | `[]tw.Padding{}` | Empty; falls back to `Global` unless specified. |
|
||||
| Header | `ColMaxWidths.Global` | `0` (unlimited) | No maximum content width for header cells unless set. |
|
||||
| Header | `ColMaxWidths.PerColumn` | `tw.NewMapper[int, int]()` | Empty map; no per-column content width limits unless specified. |
|
||||
| Header | `Filter.Global` | `nil` | No global content transformation for header cells. |
|
||||
| Header | `Filter.PerColumn` | `[]func(string) string{}` | No per-column content transformations unless specified. |
|
||||
| **Row** | `Alignment.Global` | `tw.AlignLeft` | Left-aligns row text globally unless overridden by `PerColumn`. |
|
||||
| Row | `Alignment.PerColumn` | `[]tw.Align{}` | Empty; falls back to `Global`. |
|
||||
| Row | `Formatting.AutoFormat` | `tw.Off` | Disables auto-formatting (e.g., title case) for row content. |
|
||||
| Row | `Formatting.AutoWrap` | `tw.WrapNormal` | Wraps long row text naturally at word boundaries based on width constraints.|
|
||||
| Row | `Formatting.MergeMode` | `tw.MergeNone` | Disables cell merging in rows by default. |
|
||||
| Row | `Padding.Global` | `tw.PaddingDefault` (`" "`) | Adds one space on left and right of row cells. |
|
||||
| Row | `Padding.PerColumn` | `[]tw.Padding{}` | Empty; falls back to `Global`. |
|
||||
| Row | `ColMaxWidths.Global` | `0` (unlimited) | No maximum content width for row cells. |
|
||||
| Row | `ColMaxWidths.PerColumn` | `tw.NewMapper[int, int]()` | Empty map; no per-column content width limits. |
|
||||
| Row | `Filter.Global` | `nil` | No global content transformation for row cells. |
|
||||
| Row | `Filter.PerColumn` | `[]func(string) string{}` | No per-column content transformations. |
|
||||
| **Footer** | `Alignment.Global` | `tw.AlignRight` | Right-aligns footer text globally unless overridden by `PerColumn`. |
|
||||
| Footer | `Alignment.PerColumn` | `[]tw.Align{}` | Empty; falls back to `Global`. |
|
||||
| Footer | `Formatting.AutoFormat` | `tw.Off` | Disables auto-formatting for footer content. |
|
||||
| Footer | `Formatting.AutoWrap` | `tw.WrapNormal` | Wraps long footer text naturally. |
|
||||
| Footer | `Formatting.MergeMode` | `tw.MergeNone` | Disables cell merging in footers. |
|
||||
| Footer | `Padding.Global` | `tw.PaddingDefault` (`" "`) | Adds one space on left and right of footer cells. |
|
||||
| Footer | `Padding.PerColumn` | `[]tw.Padding{}` | Empty; falls back to `Global`. |
|
||||
| Footer | `ColMaxWidths.Global` | `0` (unlimited) | No maximum content width for footer cells. |
|
||||
| Footer | `ColMaxWidths.PerColumn` | `tw.NewMapper[int, int]()` | Empty map; no per-column content width limits. |
|
||||
| Footer | `Filter.Global` | `nil` | No global content transformation for footer cells. |
|
||||
| Footer | `Filter.PerColumn` | `[]func(string) string{}` | No per-column content transformations. |
|
||||
| **Global** | `MaxWidth` | `0` (unlimited) | No overall table width limit. |
|
||||
| Global | `Behavior.AutoHide` | `tw.Off` | Displays empty columns (ignored in streaming). |
|
||||
| Global | `Behavior.TrimSpace` | `tw.On` | Trims leading/trailing spaces from cell content. |
|
||||
| Global | `Behavior.Header` | `tw.Control{Hide: tw.Off}` | Shows header if content is provided. |
|
||||
| Global | `Behavior.Footer` | `tw.Control{Hide: tw.Off}` | Shows footer if content is provided. |
|
||||
| Global | `Behavior.Compact` | `tw.Compact{Merge: tw.Off}` | No compact width optimization for merged cells. |
|
||||
| Global | `Debug` | `false` | Disables debug logging. |
|
||||
| Global | `Stream.Enable` | `false` | Disables streaming mode by default. |
|
||||
| Global | `Widths.Global` | `0` (unlimited) | No fixed column width unless specified. |
|
||||
| Global | `Widths.PerColumn` | `tw.NewMapper[int, int]()` | Empty map; no per-column fixed widths unless specified. |
|
||||
| Section | Parameter | Default Value | Description |
|
||||
|---------------|--------------------------|-----------------------------------|-----------------------------------------------------------------------------|
|
||||
| **Header** | `Alignment.Global` | `tw.AlignCenter` | Centers header text globally unless overridden by `PerColumn`. |
|
||||
| Header | `Alignment.PerColumn` | `[]tw.Align{}` | Empty; falls back to `Global` unless specified. |
|
||||
| Header | `Formatting.AutoFormat` | `tw.On` | Applies title case (e.g., "col_one" → "COL ONE") to header content. |
|
||||
| Header | `Formatting.AutoWrap` | `tw.WrapTruncate` | Truncates long header text with "…" based on width constraints. |
|
||||
| Header | `Merging.Mode` | `tw.MergeNone` | Disables cell merging in headers by default. |
|
||||
| Header | `Padding.Global` | `tw.PaddingDefault` (`" "`) | Adds one space on left and right of header cells. |
|
||||
| Header | `Padding.PerColumn` | `[]tw.Padding{}` | Empty; falls back to `Global` unless specified. |
|
||||
| Header | `ColMaxWidths.Global` | `0` (unlimited) | No maximum content width for header cells unless set. |
|
||||
| Header | `ColMaxWidths.PerColumn` | `tw.NewMapper[int, int]()` | Empty map; no per-column content width limits unless specified. |
|
||||
| Header | `Filter.Global` | `nil` | No global content transformation for header cells. |
|
||||
| Header | `Filter.PerColumn` | `[]func(string) string{}` | No per-column content transformations unless specified. |
|
||||
| **Row** | `Alignment.Global` | `tw.AlignLeft` | Left-aligns row text globally unless overridden by `PerColumn`. |
|
||||
| Row | `Alignment.PerColumn` | `[]tw.Align{}` | Empty; falls back to `Global`. |
|
||||
| Row | `Formatting.AutoFormat` | `tw.Off` | Disables auto-formatting (e.g., title case) for row content. |
|
||||
| Row | `Formatting.AutoWrap` | `tw.WrapNormal` | Wraps long row text naturally at word boundaries based on width constraints.|
|
||||
| Row | `Merging.Mode` | `tw.MergeNone` | Disables cell merging in rows by default. |
|
||||
| Row | `Padding.Global` | `tw.PaddingDefault` (`" "`) | Adds one space on left and right of row cells. |
|
||||
| Row | `Padding.PerColumn` | `[]tw.Padding{}` | Empty; falls back to `Global`. |
|
||||
| Row | `ColMaxWidths.Global` | `0` (unlimited) | No maximum content width for row cells. |
|
||||
| Row | `ColMaxWidths.PerColumn` | `tw.NewMapper[int, int]()` | Empty map; no per-column content width limits. |
|
||||
| Row | `Filter.Global` | `nil` | No global content transformation for row cells. |
|
||||
| Row | `Filter.PerColumn` | `[]func(string) string{}` | No per-column content transformations. |
|
||||
| **Footer** | `Alignment.Global` | `tw.AlignRight` | Right-aligns footer text globally unless overridden by `PerColumn`. |
|
||||
| Footer | `Alignment.PerColumn` | `[]tw.Align{}` | Empty; falls back to `Global`. |
|
||||
| Footer | `Formatting.AutoFormat` | `tw.Off` | Disables auto-formatting for footer content. |
|
||||
| Footer | `Formatting.AutoWrap` | `tw.WrapNormal` | Wraps long footer text naturally. |
|
||||
| Footer | `Formatting.MergeMode` | `tw.MergeNone` | Disables cell merging in footers. |
|
||||
| Footer | `Padding.Global` | `tw.PaddingDefault` (`" "`) | Adds one space on left and right of footer cells. |
|
||||
| Footer | `Padding.PerColumn` | `[]tw.Padding{}` | Empty; falls back to `Global`. |
|
||||
| Footer | `ColMaxWidths.Global` | `0` (unlimited) | No maximum content width for footer cells. |
|
||||
| Footer | `ColMaxWidths.PerColumn` | `tw.NewMapper[int, int]()` | Empty map; no per-column content width limits. |
|
||||
| Footer | `Filter.Global` | `nil` | No global content transformation for footer cells. |
|
||||
| Footer | `Filter.PerColumn` | `[]func(string) string{}` | No per-column content transformations. |
|
||||
| **Global** | `MaxWidth` | `0` (unlimited) | No overall table width limit. |
|
||||
| Global | `Behavior.AutoHide` | `tw.Off` | Displays empty columns (ignored in streaming). |
|
||||
| Global | `Behavior.TrimSpace` | `tw.On` | Trims leading/trailing spaces from cell content. |
|
||||
| Global | `Behavior.Header` | `tw.Control{Hide: tw.Off}` | Shows header if content is provided. |
|
||||
| Global | `Behavior.Footer` | `tw.Control{Hide: tw.Off}` | Shows footer if content is provided. |
|
||||
| Global | `Behavior.Compact` | `tw.Compact{Merge: tw.Off}` | No compact width optimization for merged cells. |
|
||||
| Global | `Debug` | `false` | Disables debug logging. |
|
||||
| Global | `Stream.Enable` | `false` | Disables streaming mode by default. |
|
||||
| Global | `Widths.Global` | `0` (unlimited) | No fixed column width unless specified. |
|
||||
| Global | `Widths.PerColumn` | `tw.NewMapper[int, int]()` | Empty map; no per-column fixed widths unless specified. |
|
||||
|
||||
**Notes**:
|
||||
- Defaults can be overridden using any configuration method.
|
||||
@@ -2210,7 +2210,7 @@ import (
|
||||
func main() {
|
||||
// Horizontal Merging (Similar to v0.0.5)
|
||||
tableH := tablewriter.NewTable(os.Stdout,
|
||||
tablewriter.WithConfig(tablewriter.Config{Row: tw.CellConfig{Formatting: tw.CellFormatting{MergeMode: tw.MergeHorizontal}}}),
|
||||
tablewriter.WithConfig(tablewriter.Config{Row: tw.CellConfig{Merging: tw.CellMerging{Mode: tw.MergeHorizontal}}}),
|
||||
tablewriter.WithRenderer(renderer.NewBlueprint(tw.Rendition{Symbols: tw.NewSymbols(tw.StyleASCII)})), // Specify renderer for symbols
|
||||
)
|
||||
tableH.Header("Category", "Item", "Item", "Notes") // Note: Two "Item" headers for demo
|
||||
@@ -2219,7 +2219,7 @@ func main() {
|
||||
|
||||
// Vertical Merging
|
||||
tableV := tablewriter.NewTable(os.Stdout,
|
||||
tablewriter.WithConfig(tablewriter.Config{Row: tw.CellConfig{Formatting: tw.CellFormatting{MergeMode: tw.MergeVertical}}}),
|
||||
tablewriter.WithConfig(tablewriter.Config{Row: tw.CellConfig{Merging: tw.CellMerging{Mode: tw.MergeVertical}}}),
|
||||
tablewriter.WithRenderer(renderer.NewBlueprint(tw.Rendition{Symbols: tw.NewSymbols(tw.StyleASCII)})),
|
||||
)
|
||||
tableV.Header("User", "Permission")
|
||||
@@ -2230,7 +2230,7 @@ func main() {
|
||||
|
||||
// Hierarchical Merging
|
||||
tableHier := tablewriter.NewTable(os.Stdout,
|
||||
tablewriter.WithConfig(tablewriter.Config{Row: tw.CellConfig{Formatting: tw.CellFormatting{MergeMode: tw.MergeHierarchical}}}),
|
||||
tablewriter.WithConfig(tablewriter.Config{Row: tw.CellConfig{Merging: tw.CellMerging{Mode: tw.MergeHierarchical}}}),
|
||||
tablewriter.WithRenderer(renderer.NewBlueprint(tw.Rendition{Symbols: tw.NewSymbols(tw.StyleASCII)})),
|
||||
)
|
||||
tableHier.Header("Group", "SubGroup", "Item")
|
||||
|
||||
20
vendor/github.com/olekukonko/tablewriter/README.md
generated
vendored
20
vendor/github.com/olekukonko/tablewriter/README.md
generated
vendored
@@ -28,7 +28,7 @@ go get github.com/olekukonko/tablewriter@v0.0.5
|
||||
#### Latest Version
|
||||
The latest stable version
|
||||
```bash
|
||||
go get github.com/olekukonko/tablewriter@v1.1.0
|
||||
go get github.com/olekukonko/tablewriter@v1.1.1
|
||||
```
|
||||
|
||||
**Warning:** Version `v1.0.0` contains missing functionality and should not be used.
|
||||
@@ -62,7 +62,7 @@ func main() {
|
||||
data := [][]string{
|
||||
{"Package", "Version", "Status"},
|
||||
{"tablewriter", "v0.0.5", "legacy"},
|
||||
{"tablewriter", "v1.1.0", "latest"},
|
||||
{"tablewriter", "v1.1.1", "latest"},
|
||||
}
|
||||
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
@@ -77,7 +77,7 @@ func main() {
|
||||
│ PACKAGE │ VERSION │ STATUS │
|
||||
├─────────────┼─────────┼────────┤
|
||||
│ tablewriter │ v0.0.5 │ legacy │
|
||||
│ tablewriter │ v1.1.0 │ latest │
|
||||
│ tablewriter │ v1.1.1 │ latest │
|
||||
└─────────────┴─────────┴────────┘
|
||||
```
|
||||
|
||||
@@ -520,7 +520,7 @@ func main() {
|
||||
tablewriter.WithConfig(tablewriter.Config{
|
||||
Header: tw.CellConfig{Alignment: tw.CellAlignment{Global: tw.AlignCenter}},
|
||||
Row: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{MergeMode: tw.MergeHierarchical},
|
||||
Merging: tw.CellMerging{Mode: tw.MergeHierarchical},
|
||||
Alignment: tw.CellAlignment{Global: tw.AlignLeft},
|
||||
},
|
||||
}),
|
||||
@@ -579,8 +579,8 @@ func main() {
|
||||
})),
|
||||
tablewriter.WithConfig(tablewriter.Config{
|
||||
Row: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{MergeMode: tw.MergeBoth},
|
||||
Alignment: tw.CellAlignment{PerColumn: []tw.Align{tw.Skip, tw.Skip, tw.AlignRight, tw.AlignLeft}},
|
||||
Merging: tw.CellMerging{Mode: tw.MergeBoth},
|
||||
Alignment: tw.CellAlignment{PerColumn: []tw.Align{tw.Skip, tw.Skip, tw.AlignRight, tw.AlignLeft}},
|
||||
},
|
||||
|
||||
Footer: tw.CellConfig{
|
||||
@@ -806,12 +806,12 @@ func main() {
|
||||
tablewriter.WithRenderer(renderer.NewHTML(htmlCfg)),
|
||||
tablewriter.WithConfig(tablewriter.Config{
|
||||
Header: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{MergeMode: tw.MergeHorizontal}, // Merge identical header cells
|
||||
Alignment: tw.CellAlignment{Global: tw.AlignCenter},
|
||||
Merging: tw.CellMerging{Mode: tw.MergeHorizontal}, // Merge identical header cells
|
||||
Alignment: tw.CellAlignment{Global: tw.AlignCenter},
|
||||
},
|
||||
Row: tw.CellConfig{
|
||||
Formatting: tw.CellFormatting{MergeMode: tw.MergeHorizontal}, // Merge identical row cells
|
||||
Alignment: tw.CellAlignment{Global: tw.AlignLeft},
|
||||
Merging: tw.CellMerging{Mode: tw.MergeHorizontal}, // Merge identical row cells
|
||||
Alignment: tw.CellAlignment{Global: tw.AlignLeft},
|
||||
},
|
||||
Footer: tw.CellConfig{Alignment: tw.CellAlignment{Global: tw.AlignRight}},
|
||||
}),
|
||||
|
||||
194
vendor/github.com/olekukonko/tablewriter/benchstat.txt
generated
vendored
Normal file
194
vendor/github.com/olekukonko/tablewriter/benchstat.txt
generated
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
goos: darwin
|
||||
goarch: arm64
|
||||
pkg: github.com/olekukonko/tablewriter/pkg/twwarp
|
||||
cpu: Apple M2
|
||||
│ old.txt │ new.txt │
|
||||
│ sec/op │ sec/op vs base │
|
||||
WrapString-8 112.8µ ± 1% 112.9µ ± 2% ~ (p=0.589 n=6)
|
||||
WrapStringWithSpaces-8 113.4µ ± 1% 113.7µ ± 1% ~ (p=0.310 n=6)
|
||||
geomean 113.1µ 113.3µ +0.15%
|
||||
|
||||
│ old.txt │ new.txt │
|
||||
│ B/s │ B/s vs base │
|
||||
WrapString-8 84.92Mi ± 1% 84.82Mi ± 2% ~ (p=0.589 n=6)
|
||||
WrapStringWithSpaces-8 84.43Mi ± 1% 84.27Mi ± 1% ~ (p=0.310 n=6)
|
||||
geomean 84.68Mi 84.55Mi -0.15%
|
||||
|
||||
│ old.txt │ new.txt │
|
||||
│ B/op │ B/op vs base │
|
||||
WrapString-8 47.35Ki ± 0% 47.35Ki ± 0% ~ (p=1.000 n=6) ¹
|
||||
WrapStringWithSpaces-8 52.76Ki ± 0% 52.76Ki ± 0% ~ (p=1.000 n=6) ¹
|
||||
geomean 49.98Ki 49.98Ki +0.00%
|
||||
¹ all samples are equal
|
||||
|
||||
│ old.txt │ new.txt │
|
||||
│ allocs/op │ allocs/op vs base │
|
||||
WrapString-8 33.00 ± 0% 33.00 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WrapStringWithSpaces-8 51.00 ± 0% 51.00 ± 0% ~ (p=1.000 n=6) ¹
|
||||
geomean 41.02 41.02 +0.00%
|
||||
¹ all samples are equal
|
||||
|
||||
pkg: github.com/olekukonko/tablewriter/pkg/twwidth
|
||||
│ old.txt │ new.txt │
|
||||
│ sec/op │ sec/op vs base │
|
||||
WidthFunction/SimpleASCII_EAfalse_NoCache-8 387.6n ± 1% 368.4n ± 2% -4.97% (p=0.002 n=6)
|
||||
WidthFunction/SimpleASCII_EAfalse_CacheMiss-8 219.0n ± 127% 217.5n ± 119% ~ (p=0.372 n=6)
|
||||
WidthFunction/SimpleASCII_EAfalse_CacheHit-8 14.78n ± 1% 14.54n ± 3% ~ (p=0.061 n=6)
|
||||
WidthFunction/SimpleASCII_EAtrue_NoCache-8 676.4n ± 1% 366.8n ± 2% -45.77% (p=0.002 n=6)
|
||||
WidthFunction/SimpleASCII_EAtrue_CacheMiss-8 216.1n ± 375% 216.0n ± 128% ~ (p=0.937 n=6)
|
||||
WidthFunction/SimpleASCII_EAtrue_CacheHit-8 14.71n ± 0% 14.49n ± 0% -1.53% (p=0.002 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1.027µ ± 3% 1.007µ ± 1% -2.00% (p=0.002 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 219.5n ± 516% 221.4n ± 502% ~ (p=0.515 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 14.81n ± 1% 14.61n ± 1% -1.35% (p=0.009 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 1.313µ ± 2% 1.009µ ± 2% -23.15% (p=0.002 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 653.2n ± 150% 218.2n ± 524% ~ (p=0.331 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 14.73n ± 2% 14.50n ± 0% -1.60% (p=0.002 n=6)
|
||||
WidthFunction/EastAsian_EAfalse_NoCache-8 747.3n ± 1% 336.2n ± 1% -55.02% (p=0.002 n=6)
|
||||
WidthFunction/EastAsian_EAfalse_CacheMiss-8 226.3n ± 384% 227.4n ± 113% ~ (p=0.937 n=6)
|
||||
WidthFunction/EastAsian_EAfalse_CacheHit-8 14.74n ± 1% 14.58n ± 1% -1.09% (p=0.011 n=6)
|
||||
WidthFunction/EastAsian_EAtrue_NoCache-8 965.4n ± 2% 348.7n ± 0% -63.88% (p=0.002 n=6)
|
||||
WidthFunction/EastAsian_EAtrue_CacheMiss-8 225.4n ± 511% 225.8n ± 111% ~ (p=1.000 n=6)
|
||||
WidthFunction/EastAsian_EAtrue_CacheHit-8 14.72n ± 1% 14.54n ± 3% ~ (p=0.056 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 1376.0n ± 2% 983.8n ± 2% -28.50% (p=0.002 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 633.6n ± 170% 222.4n ± 513% ~ (p=0.974 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 15.73n ± 1% 15.64n ± 1% ~ (p=0.227 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 1589.5n ± 1% 996.9n ± 2% -37.29% (p=0.002 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 484.8n ± 309% 221.3n ± 516% ~ (p=0.240 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 15.74n ± 1% 15.73n ± 1% ~ (p=0.485 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAfalse_NoCache-8 4.916µ ± 3% 4.512µ ± 4% -8.22% (p=0.002 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 2.430µ ± 114% 2.182µ ± 123% ~ (p=0.699 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 23.75n ± 3% 23.24n ± 3% ~ (p=0.065 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAtrue_NoCache-8 9.273µ ± 1% 4.519µ ± 1% -51.27% (p=0.002 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 4.021µ ± 131% 2.127µ ± 128% ~ (p=0.240 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 23.50n ± 2% 23.48n ± 1% ~ (p=0.589 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 57.36µ ± 1% 57.33µ ± 2% ~ (p=0.818 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 22.18µ ± 135% 14.55µ ± 299% ~ (p=0.589 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 44.21n ± 1% 44.20n ± 2% ~ (p=0.818 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 60.25µ ± 2% 57.90µ ± 2% -3.90% (p=0.002 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 16.11µ ± 263% 20.02µ ± 183% ~ (p=0.699 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 44.57n ± 1% 44.18n ± 2% ~ (p=0.461 n=6)
|
||||
geomean 358.5n 283.9n -20.82%
|
||||
|
||||
│ old.txt │ new.txt │
|
||||
│ B/s │ B/s vs base │
|
||||
WidthFunction/SimpleASCII_EAfalse_NoCache-8 86.11Mi ± 1% 90.63Mi ± 2% +5.24% (p=0.002 n=6)
|
||||
WidthFunction/SimpleASCII_EAfalse_CacheMiss-8 152.4Mi ± 56% 153.5Mi ± 54% ~ (p=0.394 n=6)
|
||||
WidthFunction/SimpleASCII_EAfalse_CacheHit-8 2.205Gi ± 1% 2.242Gi ± 3% ~ (p=0.065 n=6)
|
||||
WidthFunction/SimpleASCII_EAtrue_NoCache-8 49.35Mi ± 1% 91.00Mi ± 2% +84.40% (p=0.002 n=6)
|
||||
WidthFunction/SimpleASCII_EAtrue_CacheMiss-8 154.5Mi ± 79% 154.5Mi ± 56% ~ (p=0.937 n=6)
|
||||
WidthFunction/SimpleASCII_EAtrue_CacheHit-8 2.215Gi ± 0% 2.250Gi ± 0% +1.58% (p=0.002 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 56.66Mi ± 2% 57.78Mi ± 1% +1.99% (p=0.002 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 265.1Mi ± 84% 262.7Mi ± 83% ~ (p=0.485 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 3.836Gi ± 1% 3.888Gi ± 1% +1.34% (p=0.009 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 44.30Mi ± 2% 57.65Mi ± 2% +30.14% (p=0.002 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 147.3Mi ± 81% 266.7Mi ± 84% ~ (p=0.310 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 3.856Gi ± 2% 3.919Gi ± 0% +1.63% (p=0.002 n=6)
|
||||
WidthFunction/EastAsian_EAfalse_NoCache-8 76.58Mi ± 1% 170.21Mi ± 1% +122.28% (p=0.002 n=6)
|
||||
WidthFunction/EastAsian_EAfalse_CacheMiss-8 252.8Mi ± 79% 251.6Mi ± 53% ~ (p=0.937 n=6)
|
||||
WidthFunction/EastAsian_EAfalse_CacheHit-8 3.791Gi ± 1% 3.832Gi ± 1% +1.08% (p=0.009 n=6)
|
||||
WidthFunction/EastAsian_EAtrue_NoCache-8 59.27Mi ± 2% 164.10Mi ± 0% +176.87% (p=0.002 n=6)
|
||||
WidthFunction/EastAsian_EAtrue_CacheMiss-8 253.9Mi ± 84% 253.4Mi ± 53% ~ (p=1.000 n=6)
|
||||
WidthFunction/EastAsian_EAtrue_CacheHit-8 3.796Gi ± 1% 3.841Gi ± 3% ~ (p=0.065 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 60.29Mi ± 1% 84.33Mi ± 2% +39.88% (p=0.002 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 227.1Mi ± 79% 373.2Mi ± 84% ~ (p=1.000 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 5.154Gi ± 1% 5.181Gi ± 1% ~ (p=0.240 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 52.19Mi ± 1% 83.23Mi ± 2% +59.47% (p=0.002 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 230.9Mi ± 82% 374.9Mi ± 84% ~ (p=0.240 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 5.147Gi ± 1% 5.152Gi ± 1% ~ (p=0.485 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAfalse_NoCache-8 104.8Mi ± 3% 114.1Mi ± 4% +8.95% (p=0.002 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 368.0Mi ± 293% 474.3Mi ± 211% ~ (p=0.699 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 21.17Gi ± 3% 21.64Gi ± 2% ~ (p=0.065 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAtrue_NoCache-8 55.54Mi ± 1% 113.97Mi ± 1% +105.21% (p=0.002 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 399.8Mi ± 232% 577.5Mi ± 149% ~ (p=0.240 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 21.40Gi ± 2% 21.41Gi ± 1% ~ (p=0.589 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 34.08Mi ± 1% 34.10Mi ± 2% ~ (p=0.784 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 101.5Mi ± 1396% 643.9Mi ± 320% ~ (p=0.589 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 43.18Gi ± 1% 43.20Gi ± 2% ~ (p=0.818 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 32.45Mi ± 2% 33.76Mi ± 2% +4.06% (p=0.002 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 393.0Mi ± 296% 122.4Mi ± 1610% ~ (p=0.699 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 42.83Gi ± 1% 43.21Gi ± 2% ~ (p=0.485 n=6)
|
||||
geomean 456.4Mi 560.6Mi +22.83%
|
||||
|
||||
│ old.txt │ new.txt │
|
||||
│ B/op │ B/op vs base │
|
||||
WidthFunction/SimpleASCII_EAfalse_NoCache-8 112.0 ± 1% 113.0 ± 0% ~ (p=0.061 n=6)
|
||||
WidthFunction/SimpleASCII_EAfalse_CacheMiss-8 55.00 ± 200% 55.00 ± 202% ~ (p=1.000 n=6)
|
||||
WidthFunction/SimpleASCII_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/SimpleASCII_EAtrue_NoCache-8 113.0 ± 1% 113.0 ± 0% ~ (p=1.000 n=6)
|
||||
WidthFunction/SimpleASCII_EAtrue_CacheMiss-8 55.00 ± 505% 55.00 ± 205% ~ (p=0.697 n=6)
|
||||
WidthFunction/SimpleASCII_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 185.0 ± 0% 185.0 ± 1% ~ (p=0.455 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 87.00 ± 402% 87.00 ± 401% ~ (p=1.000 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 185.0 ± 0% 185.0 ± 1% ~ (p=1.000 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 174.00 ± 115% 87.00 ± 401% ~ (p=0.621 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsian_EAfalse_NoCache-8 145.0 ± 0% 146.0 ± 0% +0.69% (p=0.002 n=6)
|
||||
WidthFunction/EastAsian_EAfalse_CacheMiss-8 87.00 ± 392% 87.00 ± 167% ~ (p=0.697 n=6)
|
||||
WidthFunction/EastAsian_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsian_EAtrue_NoCache-8 145.0 ± 1% 146.0 ± 1% +0.69% (p=0.013 n=6)
|
||||
WidthFunction/EastAsian_EAtrue_CacheMiss-8 87.00 ± 392% 87.00 ± 164% ~ (p=0.697 n=6)
|
||||
WidthFunction/EastAsian_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 193.0 ± 1% 193.0 ± 0% ~ (p=1.000 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 232.0 ± 134% 103.0 ± 485% ~ (p=0.924 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 193.0 ± 0% 193.0 ± 1% ~ (p=1.000 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 185.0 ± 203% 103.0 ± 485% ~ (p=0.621 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongSimpleASCII_EAfalse_NoCache-8 1.153Ki ± 0% 1.150Ki ± 0% ~ (p=0.126 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 1.050Ki ± 72% 1.047Ki ± 74% ~ (p=0.939 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongSimpleASCII_EAtrue_NoCache-8 1.152Ki ± 0% 1.155Ki ± 0% +0.30% (p=0.015 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 1.036Ki ± 71% 1.039Ki ± 76% ~ (p=0.981 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 1.355Ki ± 0% 1.358Ki ± 0% ~ (p=0.065 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 2.787Ki ± 31% 2.613Ki ± 43% ~ (p=0.805 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 1.358Ki ± 0% 1.361Ki ± 0% ~ (p=0.158 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 2.625Ki ± 43% 2.741Ki ± 37% ~ (p=0.987 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
geomean ² -5.62% ²
|
||||
¹ all samples are equal
|
||||
² summaries must be >0 to compute geomean
|
||||
|
||||
│ old.txt │ new.txt │
|
||||
│ allocs/op │ allocs/op vs base │
|
||||
WidthFunction/SimpleASCII_EAfalse_NoCache-8 3.000 ± 0% 3.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/SimpleASCII_EAfalse_CacheMiss-8 1.000 ± 200% 1.000 ± 200% ~ (p=1.000 n=6)
|
||||
WidthFunction/SimpleASCII_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/SimpleASCII_EAtrue_NoCache-8 3.000 ± 0% 3.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/SimpleASCII_EAtrue_CacheMiss-8 1.000 ± 300% 1.000 ± 200% ~ (p=0.697 n=6)
|
||||
WidthFunction/SimpleASCII_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 6.000 ± 0% 6.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 1.000 ± 600% 1.000 ± 600% ~ (p=1.000 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 6.000 ± 0% 6.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 3.500 ± 100% 1.000 ± 600% ~ (p=0.610 n=6)
|
||||
WidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsian_EAfalse_NoCache-8 3.000 ± 0% 3.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsian_EAfalse_CacheMiss-8 1.000 ± 300% 1.000 ± 200% ~ (p=0.697 n=6)
|
||||
WidthFunction/EastAsian_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsian_EAtrue_NoCache-8 3.000 ± 0% 3.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsian_EAtrue_CacheMiss-8 1.000 ± 300% 1.000 ± 200% ~ (p=0.697 n=6)
|
||||
WidthFunction/EastAsian_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 5.000 ± 0% 5.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 3.000 ± 133% 1.000 ± 600% ~ (p=1.000 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 5.000 ± 0% 5.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 2.500 ± 180% 1.000 ± 600% ~ (p=0.610 n=6)
|
||||
WidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongSimpleASCII_EAfalse_NoCache-8 3.000 ± 0% 3.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 3.000 ± 67% 3.000 ± 67% ~ (p=1.000 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongSimpleASCII_EAtrue_NoCache-8 3.000 ± 0% 3.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 3.000 ± 67% 3.000 ± 67% ~ (p=1.000 n=6)
|
||||
WidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 9.000 ± 0% 9.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 5.000 ± 100% 3.500 ± 186% ~ (p=0.978 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 9.000 ± 0% 9.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 4.000 ± 150% 4.500 ± 122% ~ (p=0.952 n=6)
|
||||
WidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 0.000 ± 0% 0.000 ± 0% ~ (p=1.000 n=6) ¹
|
||||
geomean ² -9.28% ²
|
||||
¹ all samples are equal
|
||||
² summaries must be >0 to compute geomean
|
||||
147
vendor/github.com/olekukonko/tablewriter/config.go
generated
vendored
147
vendor/github.com/olekukonko/tablewriter/config.go
generated
vendored
@@ -135,12 +135,12 @@ func (b *ConfigBuilder) WithFooterMaxWidth(maxWidth int) *ConfigBuilder {
|
||||
return b
|
||||
}
|
||||
|
||||
// WithFooterMergeMode sets the merge behavior for footer cells (e.g., horizontal, hierarchical).
|
||||
// Invalid merge modes are ignored.
|
||||
// Deprecated: Use .Footer().CellMerging().WithMode(...) instead. This method will be removed in a future version.
|
||||
func (b *ConfigBuilder) WithFooterMergeMode(mergeMode int) *ConfigBuilder {
|
||||
if mergeMode < tw.MergeNone || mergeMode > tw.MergeHierarchical {
|
||||
return b
|
||||
}
|
||||
b.config.Footer.Merging.Mode = mergeMode
|
||||
b.config.Footer.Formatting.MergeMode = mergeMode
|
||||
return b
|
||||
}
|
||||
@@ -187,12 +187,12 @@ func (b *ConfigBuilder) WithHeaderMaxWidth(maxWidth int) *ConfigBuilder {
|
||||
return b
|
||||
}
|
||||
|
||||
// WithHeaderMergeMode sets the merge behavior for header cells (e.g., horizontal, vertical).
|
||||
// Invalid merge modes are ignored.
|
||||
// Deprecated: Use .Header().CellMerging().WithMode(...) instead. This method will be removed in a future version.
|
||||
func (b *ConfigBuilder) WithHeaderMergeMode(mergeMode int) *ConfigBuilder {
|
||||
if mergeMode < tw.MergeNone || mergeMode > tw.MergeHierarchical {
|
||||
return b
|
||||
}
|
||||
b.config.Header.Merging.Mode = mergeMode
|
||||
b.config.Header.Formatting.MergeMode = mergeMode
|
||||
return b
|
||||
}
|
||||
@@ -246,12 +246,12 @@ func (b *ConfigBuilder) WithRowMaxWidth(maxWidth int) *ConfigBuilder {
|
||||
return b
|
||||
}
|
||||
|
||||
// WithRowMergeMode sets the merge behavior for row cells (e.g., horizontal, hierarchical).
|
||||
// Invalid merge modes are ignored.
|
||||
// Deprecated: Use .Row().CellMerging().WithMode(...) instead. This method will be removed in a future version.
|
||||
func (b *ConfigBuilder) WithRowMergeMode(mergeMode int) *ConfigBuilder {
|
||||
if mergeMode < tw.MergeNone || mergeMode > tw.MergeHierarchical {
|
||||
return b
|
||||
}
|
||||
b.config.Row.Merging.Mode = mergeMode
|
||||
b.config.Row.Formatting.MergeMode = mergeMode
|
||||
return b
|
||||
}
|
||||
@@ -285,6 +285,14 @@ func (h *HeaderConfigBuilder) Formatting() *HeaderFormattingBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
// Merging returns a HeaderMergingBuilder for configuring cell merging.
|
||||
func (h *HeaderConfigBuilder) Merging() *HeaderMergingBuilder {
|
||||
return &HeaderMergingBuilder{
|
||||
parent: h,
|
||||
config: &h.config.Merging,
|
||||
}
|
||||
}
|
||||
|
||||
// Padding returns a HeaderPaddingBuilder for header padding
|
||||
func (h *HeaderConfigBuilder) Padding() *HeaderPaddingBuilder {
|
||||
return &HeaderPaddingBuilder{
|
||||
@@ -341,6 +349,14 @@ func (r *RowConfigBuilder) Formatting() *RowFormattingBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
// Merging returns a RowMergingBuilder for configuring cell merging.
|
||||
func (r *RowConfigBuilder) Merging() *RowMergingBuilder {
|
||||
return &RowMergingBuilder{
|
||||
parent: r,
|
||||
config: &r.config.Merging,
|
||||
}
|
||||
}
|
||||
|
||||
// Padding returns a RowPaddingBuilder for row padding
|
||||
func (r *RowConfigBuilder) Padding() *RowPaddingBuilder {
|
||||
return &RowPaddingBuilder{
|
||||
@@ -397,6 +413,14 @@ func (f *FooterConfigBuilder) Formatting() *FooterFormattingBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
// Merging returns a FooterMergingBuilder for configuring cell merging.
|
||||
func (f *FooterConfigBuilder) Merging() *FooterMergingBuilder {
|
||||
return &FooterMergingBuilder{
|
||||
parent: f,
|
||||
config: &f.config.Merging,
|
||||
}
|
||||
}
|
||||
|
||||
// Padding returns a FooterPaddingBuilder for footer padding
|
||||
func (f *FooterConfigBuilder) Padding() *FooterPaddingBuilder {
|
||||
return &FooterPaddingBuilder{
|
||||
@@ -478,9 +502,10 @@ func (hf *HeaderFormattingBuilder) WithAutoWrap(autoWrap int) *HeaderFormattingB
|
||||
return hf
|
||||
}
|
||||
|
||||
// WithMergeMode sets merge mode
|
||||
// Deprecated: Use .CellMerging().WithMode(...) instead. This method will be removed in a future version.
|
||||
func (hf *HeaderFormattingBuilder) WithMergeMode(mergeMode int) *HeaderFormattingBuilder {
|
||||
if mergeMode >= tw.MergeNone && mergeMode <= tw.MergeHierarchical {
|
||||
hf.parent.config.Merging.Mode = mergeMode
|
||||
hf.config.MergeMode = mergeMode
|
||||
}
|
||||
return hf
|
||||
@@ -512,9 +537,10 @@ func (rf *RowFormattingBuilder) WithAutoWrap(autoWrap int) *RowFormattingBuilder
|
||||
return rf
|
||||
}
|
||||
|
||||
// WithMergeMode sets merge mode
|
||||
// Deprecated: Use .CellMerging().WithMode(...) instead. This method will be removed in a future version.
|
||||
func (rf *RowFormattingBuilder) WithMergeMode(mergeMode int) *RowFormattingBuilder {
|
||||
if mergeMode >= tw.MergeNone && mergeMode <= tw.MergeHierarchical {
|
||||
rf.parent.config.Merging.Mode = mergeMode
|
||||
rf.config.MergeMode = mergeMode
|
||||
}
|
||||
return rf
|
||||
@@ -546,14 +572,117 @@ func (ff *FooterFormattingBuilder) WithAutoWrap(autoWrap int) *FooterFormattingB
|
||||
return ff
|
||||
}
|
||||
|
||||
// WithMergeMode sets merge mode
|
||||
// Deprecated: Use .CellMerging().WithMode(...) instead. This method will be removed in a future version.
|
||||
func (ff *FooterFormattingBuilder) WithMergeMode(mergeMode int) *FooterFormattingBuilder {
|
||||
if mergeMode >= tw.MergeNone && mergeMode <= tw.MergeHierarchical {
|
||||
ff.parent.config.Merging.Mode = mergeMode
|
||||
ff.config.MergeMode = mergeMode
|
||||
}
|
||||
return ff
|
||||
}
|
||||
|
||||
// HeaderMergingBuilder configures header cell merging
|
||||
type HeaderMergingBuilder struct {
|
||||
parent *HeaderConfigBuilder
|
||||
config *tw.CellMerging
|
||||
}
|
||||
|
||||
// Build returns the parent HeaderConfigBuilder.
|
||||
func (hm *HeaderMergingBuilder) Build() *HeaderConfigBuilder {
|
||||
return hm.parent
|
||||
}
|
||||
|
||||
// WithMode sets the merge mode (e.g., tw.MergeHorizontal).
|
||||
func (hm *HeaderMergingBuilder) WithMode(mode int) *HeaderMergingBuilder {
|
||||
hm.config.Mode = mode
|
||||
// Also set the deprecated field for backward compatibility
|
||||
hm.parent.config.Formatting.MergeMode = mode
|
||||
return hm
|
||||
}
|
||||
|
||||
// ByColumnIndex sets specific columns to be merged by their index.
|
||||
// If not called, merging applies to all columns.
|
||||
func (hm *HeaderMergingBuilder) ByColumnIndex(indices []int) *HeaderMergingBuilder {
|
||||
if len(indices) == 0 {
|
||||
hm.config.ByColumnIndex = nil // nil means apply to all
|
||||
} else {
|
||||
mapper := tw.NewMapper[int, bool]()
|
||||
for _, idx := range indices {
|
||||
mapper.Set(idx, true)
|
||||
}
|
||||
hm.config.ByColumnIndex = mapper
|
||||
}
|
||||
return hm
|
||||
}
|
||||
|
||||
// RowMergingBuilder configures row cell merging
|
||||
type RowMergingBuilder struct {
|
||||
parent *RowConfigBuilder
|
||||
config *tw.CellMerging
|
||||
}
|
||||
|
||||
// Build returns the parent RowConfigBuilder.
|
||||
func (rm *RowMergingBuilder) Build() *RowConfigBuilder {
|
||||
return rm.parent
|
||||
}
|
||||
|
||||
// WithMode sets the merge mode (e.g., tw.MergeVertical, tw.MergeHierarchical).
|
||||
func (rm *RowMergingBuilder) WithMode(mode int) *RowMergingBuilder {
|
||||
rm.config.Mode = mode
|
||||
// Also set the deprecated field for backward compatibility
|
||||
rm.parent.config.Formatting.MergeMode = mode
|
||||
return rm
|
||||
}
|
||||
|
||||
// ByColumnIndex sets specific columns to be merged by their index.
|
||||
// If not called, merging applies to all columns.
|
||||
func (rm *RowMergingBuilder) ByColumnIndex(indices []int) *RowMergingBuilder {
|
||||
if len(indices) == 0 {
|
||||
rm.config.ByColumnIndex = nil // nil means apply to all
|
||||
} else {
|
||||
mapper := tw.NewMapper[int, bool]()
|
||||
for _, idx := range indices {
|
||||
mapper.Set(idx, true)
|
||||
}
|
||||
rm.config.ByColumnIndex = mapper
|
||||
}
|
||||
return rm
|
||||
}
|
||||
|
||||
// FooterMergingBuilder configures footer cell merging
|
||||
type FooterMergingBuilder struct {
|
||||
parent *FooterConfigBuilder
|
||||
config *tw.CellMerging
|
||||
}
|
||||
|
||||
// Build returns the parent FooterConfigBuilder.
|
||||
func (fm *FooterMergingBuilder) Build() *FooterConfigBuilder {
|
||||
return fm.parent
|
||||
}
|
||||
|
||||
// WithMode sets the merge mode (e.g., tw.MergeHorizontal).
|
||||
func (fm *FooterMergingBuilder) WithMode(mode int) *FooterMergingBuilder {
|
||||
fm.config.Mode = mode
|
||||
// Also set the deprecated field for backward compatibility
|
||||
fm.parent.config.Formatting.MergeMode = mode
|
||||
return fm
|
||||
}
|
||||
|
||||
// ByColumnIndex sets specific columns to be merged by their index.
|
||||
// If not called, merging applies to all columns.
|
||||
func (fm *FooterMergingBuilder) ByColumnIndex(indices []int) *FooterMergingBuilder {
|
||||
if len(indices) == 0 {
|
||||
fm.config.ByColumnIndex = nil // nil means apply to all
|
||||
} else {
|
||||
mapper := tw.NewMapper[int, bool]()
|
||||
for _, idx := range indices {
|
||||
mapper.Set(idx, true)
|
||||
}
|
||||
fm.config.ByColumnIndex = mapper
|
||||
}
|
||||
return fm
|
||||
}
|
||||
|
||||
// HeaderPaddingBuilder configures header padding
|
||||
type HeaderPaddingBuilder struct {
|
||||
parent *HeaderConfigBuilder
|
||||
|
||||
248
vendor/github.com/olekukonko/tablewriter/new.txt
generated
vendored
Normal file
248
vendor/github.com/olekukonko/tablewriter/new.txt
generated
vendored
Normal file
@@ -0,0 +1,248 @@
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter 0.284s
|
||||
? github.com/olekukonko/tablewriter/cmd/csv2table [no test files]
|
||||
goos: darwin
|
||||
goarch: arm64
|
||||
pkg: github.com/olekukonko/tablewriter/pkg/twwarp
|
||||
cpu: Apple M2
|
||||
BenchmarkWrapString-8 10030 114909 ns/op 87.40 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 112188 ns/op 89.52 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 113708 ns/op 88.32 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 113233 ns/op 88.69 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 112575 ns/op 89.21 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 112604 ns/op 89.19 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113731 ns/op 88.30 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113511 ns/op 88.48 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113575 ns/op 88.43 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113746 ns/op 88.29 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113473 ns/op 88.51 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 114487 ns/op 87.72 MB/s 54024 B/op 51 allocs/op
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter/pkg/twwarp 14.612s
|
||||
goos: darwin
|
||||
goarch: arm64
|
||||
pkg: github.com/olekukonko/tablewriter/pkg/twwidth
|
||||
cpu: Apple M2
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 264374 4533 ns/op 119.12 MB/s 1178 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 265746 4514 ns/op 119.62 MB/s 1177 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 263538 4509 ns/op 119.75 MB/s 1178 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 266173 4510 ns/op 119.72 MB/s 1180 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 265224 4676 ns/op 115.48 MB/s 1180 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 265696 4508 ns/op 119.80 MB/s 1177 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 251047 4859 ns/op 111.13 MB/s 1867 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 1000000 3945 ns/op 136.89 MB/s 1584 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 3504475 3729 ns/op 144.81 MB/s 1474 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 3664098 635.4 ns/op 849.84 MB/s 670 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 3818680 588.6 ns/op 917.47 MB/s 667 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 3761966 348.7 ns/op 1548.66 MB/s 583 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 49524442 23.54 ns/op 22938.55 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 51765230 23.25 ns/op 23221.81 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 51881983 23.83 ns/op 22664.79 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 51665586 23.20 ns/op 23272.39 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 51782077 23.23 ns/op 23250.20 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 51498277 23.21 ns/op 23267.21 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 263586 4520 ns/op 119.47 MB/s 1183 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 265484 4519 ns/op 119.49 MB/s 1182 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 265218 4514 ns/op 119.64 MB/s 1181 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 265957 4515 ns/op 119.60 MB/s 1184 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 265981 4518 ns/op 119.52 MB/s 1183 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 265028 4574 ns/op 118.06 MB/s 1184 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 251682 4853 ns/op 111.27 MB/s 1869 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 1000000 3893 ns/op 138.70 MB/s 1583 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 3596130 3747 ns/op 144.13 MB/s 1499 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 3671358 506.1 ns/op 1066.92 MB/s 628 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 3687993 370.6 ns/op 1456.96 MB/s 594 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 3672946 358.4 ns/op 1506.88 MB/s 583 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 49266897 23.64 ns/op 22844.78 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 50158659 23.54 ns/op 22938.83 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 50689321 23.45 ns/op 23025.77 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 51113672 23.52 ns/op 22954.95 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 51489162 23.21 ns/op 23269.51 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 51705564 23.16 ns/op 23311.21 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 20930 57159 ns/op 35.86 MB/s 1389 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 20882 57502 ns/op 35.65 MB/s 1395 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 21103 57730 ns/op 35.51 MB/s 1391 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 20889 56615 ns/op 36.21 MB/s 1393 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 20808 58303 ns/op 35.16 MB/s 1391 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 21104 56727 ns/op 36.14 MB/s 1387 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 38569 27485 ns/op 74.59 MB/s 3041 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 1000000 58061 ns/op 35.31 MB/s 3835 B/op 10 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 2124566 31025 ns/op 66.08 MB/s 3140 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 1000000 1607 ns/op 1275.74 MB/s 2311 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 1615826 1224 ns/op 1674.27 MB/s 2311 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 1478348 722.9 ns/op 2835.84 MB/s 2311 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 23989044 44.26 ns/op 46313.25 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 27268802 44.13 ns/op 46454.64 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 27292006 44.51 ns/op 46054.40 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 24128786 44.99 ns/op 45569.06 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 26858004 44.09 ns/op 46497.43 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 27259458 44.05 ns/op 46538.64 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 20671 57887 ns/op 35.41 MB/s 1395 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 20966 56795 ns/op 36.09 MB/s 1396 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 20708 57092 ns/op 35.91 MB/s 1388 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 20882 57917 ns/op 35.40 MB/s 1389 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 21244 58013 ns/op 35.34 MB/s 1393 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 20854 58122 ns/op 35.27 MB/s 1396 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 38907 30289 ns/op 67.68 MB/s 3066 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 1000000 56603 ns/op 36.22 MB/s 3835 B/op 10 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 1949059 29030 ns/op 70.62 MB/s 3084 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 1479127 933.7 ns/op 2195.47 MB/s 2311 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 2335996 11012 ns/op 186.17 MB/s 2548 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 983864 1169 ns/op 1753.75 MB/s 2311 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 27291516 44.18 ns/op 46398.32 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 27220657 44.18 ns/op 46402.04 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 27059124 44.91 ns/op 45645.46 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 26679783 44.04 ns/op 46551.62 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 27244114 44.14 ns/op 46448.19 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 27221737 44.61 ns/op 45948.75 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3247359 366.1 ns/op 95.62 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3292773 370.6 ns/op 94.44 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3275070 365.3 ns/op 95.82 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3291489 365.6 ns/op 95.73 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3282121 374.9 ns/op 93.37 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3198205 375.6 ns/op 93.18 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 3092488 419.4 ns/op 83.45 MB/s 152 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6276060 476.4 ns/op 73.46 MB/s 166 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6135336 218.8 ns/op 159.98 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6175833 216.1 ns/op 161.95 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6156606 215.2 ns/op 162.63 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6160923 216.2 ns/op 161.88 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 78655855 15.02 ns/op 2330.76 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 70905223 14.59 ns/op 2398.68 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 82255629 14.49 ns/op 2415.75 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 82383864 14.48 ns/op 2417.21 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 82325931 14.49 ns/op 2415.73 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 82426311 14.66 ns/op 2386.73 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 3265182 365.8 ns/op 95.68 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 3275419 366.3 ns/op 95.56 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 3057087 375.3 ns/op 93.26 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 3239217 372.6 ns/op 93.94 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 3246429 367.3 ns/op 95.29 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 3252763 365.3 ns/op 95.80 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 2986195 396.4 ns/op 88.30 MB/s 142 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6487422 493.6 ns/op 70.90 MB/s 168 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6261225 216.1 ns/op 161.99 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6154988 210.7 ns/op 166.13 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6308702 213.8 ns/op 163.69 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6120438 216.0 ns/op 162.05 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 82184980 14.47 ns/op 2419.17 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 78985473 14.51 ns/op 2412.95 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 82368319 14.47 ns/op 2419.30 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 82366668 14.47 ns/op 2418.96 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 82104614 14.53 ns/op 2409.59 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 82399426 14.53 ns/op 2409.13 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1020 ns/op 59.80 MB/s 186 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1010 ns/op 60.40 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1007 ns/op 60.55 MB/s 186 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1006 ns/op 60.63 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1006 ns/op 60.65 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1006 ns/op 60.63 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 1000000 1334 ns/op 45.74 MB/s 436 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6892693 1204 ns/op 50.65 MB/s 321 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6433399 221.7 ns/op 275.14 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6323521 221.2 ns/op 275.73 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6000822 218.5 ns/op 279.15 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6329578 220.3 ns/op 276.90 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 80806719 14.65 ns/op 4163.13 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 82397774 14.63 ns/op 4169.11 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 82794307 14.76 ns/op 4134.15 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 82610730 14.59 ns/op 4180.13 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 82639170 14.58 ns/op 4183.56 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 82560049 14.45 ns/op 4222.53 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 1000000 1006 ns/op 60.61 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 1000000 1012 ns/op 60.29 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 1000000 1030 ns/op 59.25 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 1000000 1005 ns/op 60.68 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 1000000 1006 ns/op 60.64 MB/s 186 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 1000000 1012 ns/op 60.26 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 1000000 1361 ns/op 44.84 MB/s 436 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 6967185 1216 ns/op 50.17 MB/s 323 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 6413974 219.1 ns/op 278.46 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 6381684 216.9 ns/op 281.27 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 6383749 216.2 ns/op 282.14 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 6360810 217.3 ns/op 280.75 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 81573231 14.53 ns/op 4197.28 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 82780268 14.47 ns/op 4215.84 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 82845276 14.48 ns/op 4212.74 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 82545850 14.51 ns/op 4203.96 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 82419704 14.49 ns/op 4209.69 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 82121707 14.50 ns/op 4206.82 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 3552715 336.1 ns/op 178.50 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 3551234 335.0 ns/op 179.09 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 3588946 338.9 ns/op 177.05 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 3577424 338.5 ns/op 177.25 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 3554505 335.4 ns/op 178.89 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 3575703 336.2 ns/op 178.46 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 2990224 412.6 ns/op 145.42 MB/s 207 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 6066997 484.0 ns/op 123.95 MB/s 232 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 5743347 224.3 ns/op 267.49 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 5870154 220.6 ns/op 271.92 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 5880489 228.0 ns/op 263.14 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 5660132 226.8 ns/op 264.52 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 81708613 14.54 ns/op 4126.40 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 79903231 14.65 ns/op 4094.56 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 80580853 14.62 ns/op 4103.14 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 82036092 14.73 ns/op 4073.52 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 83622964 14.49 ns/op 4139.65 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 82724623 14.53 ns/op 4129.78 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 3463408 349.4 ns/op 171.71 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 3245782 350.0 ns/op 171.41 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 3461160 348.3 ns/op 172.28 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 3453544 349.1 ns/op 171.87 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 3443858 347.0 ns/op 172.92 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 3469286 347.4 ns/op 172.72 MB/s 146 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 3050086 428.5 ns/op 140.04 MB/s 213 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 5927800 476.0 ns/op 126.05 MB/s 230 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 5852149 223.0 ns/op 269.05 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 5721747 224.9 ns/op 266.80 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 5751147 225.7 ns/op 265.84 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 5893626 225.9 ns/op 265.55 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 81984477 14.52 ns/op 4132.81 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 79537578 14.59 ns/op 4112.59 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 82339353 14.56 ns/op 4119.49 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 82286889 14.92 ns/op 4020.68 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 82166224 14.53 ns/op 4129.14 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 83084276 14.52 ns/op 4131.45 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 1221180 982.5 ns/op 88.55 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 1210902 983.5 ns/op 88.46 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 1223528 989.3 ns/op 87.94 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 1212517 984.1 ns/op 88.40 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 1224182 983.5 ns/op 88.46 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 1000000 1007 ns/op 86.36 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 999058 1364 ns/op 63.76 MB/s 603 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 6682279 1218 ns/op 71.40 MB/s 465 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 6339568 220.6 ns/op 394.46 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 6226921 222.3 ns/op 391.34 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 6264051 221.1 ns/op 393.47 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 6234439 222.4 ns/op 391.23 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 75337251 15.64 ns/op 5562.01 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76826634 15.76 ns/op 5521.54 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76836674 15.79 ns/op 5508.81 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76840162 15.64 ns/op 5564.05 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76694060 15.60 ns/op 5577.81 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76737175 15.62 ns/op 5571.56 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 1202406 1012 ns/op 85.93 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 1000000 1000 ns/op 86.99 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 1208559 993.7 ns/op 87.55 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 1209415 990.9 ns/op 87.80 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 1206118 1020 ns/op 85.33 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 1211994 990.6 ns/op 87.82 MB/s 194 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 1000000 1363 ns/op 63.84 MB/s 603 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 6504960 1214 ns/op 71.65 MB/s 465 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 6349030 220.2 ns/op 395.18 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 6183368 220.3 ns/op 394.99 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 6240484 220.6 ns/op 394.32 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 6280713 222.0 ns/op 391.95 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 69630140 15.77 ns/op 5517.31 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 76043014 15.65 ns/op 5559.61 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 76239080 15.63 ns/op 5567.94 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 75864739 15.88 ns/op 5479.13 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 71286422 15.74 ns/op 5527.29 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 75704404 15.71 ns/op 5536.58 MB/s 0 B/op 0 allocs/op
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter/pkg/twwidth 659.150s
|
||||
? github.com/olekukonko/tablewriter/renderer [no test files]
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter/tests 3.025s
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter/tw 0.283s
|
||||
248
vendor/github.com/olekukonko/tablewriter/old.txt
generated
vendored
Normal file
248
vendor/github.com/olekukonko/tablewriter/old.txt
generated
vendored
Normal file
@@ -0,0 +1,248 @@
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter 0.819s
|
||||
? github.com/olekukonko/tablewriter/cmd/csv2table [no test files]
|
||||
goos: darwin
|
||||
goarch: arm64
|
||||
pkg: github.com/olekukonko/tablewriter/pkg/twwarp
|
||||
cpu: Apple M2
|
||||
BenchmarkWrapString-8 10630 111320 ns/op 90.22 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 112981 ns/op 88.89 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 113419 ns/op 88.55 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 112794 ns/op 89.04 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 112400 ns/op 89.35 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapString-8 10000 112767 ns/op 89.06 MB/s 48488 B/op 33 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 115098 ns/op 87.26 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113343 ns/op 88.61 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113702 ns/op 88.33 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113547 ns/op 88.45 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113016 ns/op 88.86 MB/s 54024 B/op 51 allocs/op
|
||||
BenchmarkWrapStringWithSpaces-8 10000 113206 ns/op 88.71 MB/s 54024 B/op 51 allocs/op
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter/pkg/twwarp 15.179s
|
||||
goos: darwin
|
||||
goarch: arm64
|
||||
pkg: github.com/olekukonko/tablewriter/pkg/twwidth
|
||||
cpu: Apple M2
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 2953855 387.1 ns/op 90.40 MB/s 112 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3095179 387.8 ns/op 90.24 MB/s 112 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3096141 391.0 ns/op 89.51 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3090711 387.2 ns/op 90.40 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3066110 387.4 ns/op 90.35 MB/s 112 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_NoCache-8 3098689 389.2 ns/op 89.92 MB/s 112 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 3125685 440.9 ns/op 79.39 MB/s 159 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6477175 496.2 ns/op 70.53 MB/s 165 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6019939 217.7 ns/op 160.79 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6231590 219.2 ns/op 159.67 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6245622 216.2 ns/op 161.90 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheMiss-8 6109658 218.8 ns/op 159.95 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 80977806 14.73 ns/op 2375.87 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 80972566 14.76 ns/op 2371.06 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 81432532 14.90 ns/op 2348.78 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 80644483 14.85 ns/op 2357.10 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 81361905 14.79 ns/op 2365.80 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAfalse_CacheHit-8 81612987 14.78 ns/op 2368.60 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 1777732 682.2 ns/op 51.30 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 1778122 672.9 ns/op 52.01 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 1779956 674.0 ns/op 51.93 MB/s 112 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 1773282 678.7 ns/op 51.57 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 1783092 680.2 ns/op 51.46 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_NoCache-8 1780448 674.0 ns/op 51.93 MB/s 113 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 1000000 1027 ns/op 34.08 MB/s 333 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6891168 958.3 ns/op 36.52 MB/s 227 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6165972 211.7 ns/op 165.30 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6370098 217.4 ns/op 161.02 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6193920 214.8 ns/op 162.92 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheMiss-8 6190384 209.4 ns/op 167.16 MB/s 55 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 79747688 14.75 ns/op 2372.71 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 79607492 14.75 ns/op 2372.90 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 81634501 14.73 ns/op 2376.30 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 81644916 14.70 ns/op 2381.26 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 82505884 14.70 ns/op 2380.77 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/SimpleASCII_EAtrue_CacheHit-8 81840265 14.70 ns/op 2380.34 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1053 ns/op 57.95 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1028 ns/op 59.34 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1029 ns/op 59.27 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1025 ns/op 59.49 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1026 ns/op 59.48 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_NoCache-8 1000000 1025 ns/op 59.54 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 1000000 1352 ns/op 45.13 MB/s 437 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6619118 1219 ns/op 50.06 MB/s 320 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6486976 221.2 ns/op 275.81 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6508150 217.8 ns/op 280.07 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6487533 217.4 ns/op 280.56 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheMiss-8 6243558 216.4 ns/op 281.93 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 80787679 14.90 ns/op 4093.19 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 81640521 14.89 ns/op 4097.92 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 81596338 14.71 ns/op 4145.47 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 81950889 14.84 ns/op 4111.86 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 79321578 14.78 ns/op 4126.88 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAfalse_CacheHit-8 81880058 14.75 ns/op 4134.44 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 906406 1313 ns/op 46.44 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 917503 1313 ns/op 46.46 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 915308 1312 ns/op 46.49 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 918404 1312 ns/op 46.51 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 892551 1338 ns/op 45.58 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_NoCache-8 915020 1333 ns/op 45.76 MB/s 185 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 791368 1633 ns/op 37.36 MB/s 374 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 2314653 1064 ns/op 57.34 MB/s 265 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 6531552 1198 ns/op 50.94 MB/s 258 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 6629763 242.5 ns/op 251.57 MB/s 90 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 6388215 219.1 ns/op 278.36 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheMiss-8 6472197 218.6 ns/op 279.09 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 80704821 14.76 ns/op 4132.33 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 82628028 14.70 ns/op 4149.56 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 81870517 14.70 ns/op 4148.97 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 81944124 14.99 ns/op 4068.84 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 81918950 14.70 ns/op 4150.75 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/ASCIIWithANSI_EAtrue_CacheHit-8 82547270 14.91 ns/op 4092.20 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 1604370 749.9 ns/op 80.02 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 1610148 749.7 ns/op 80.03 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 1585026 744.8 ns/op 80.56 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 1615032 749.9 ns/op 80.01 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 1614980 743.3 ns/op 80.72 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_NoCache-8 1609586 741.8 ns/op 80.88 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 1000000 1095 ns/op 54.77 MB/s 428 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 6214893 995.6 ns/op 60.26 MB/s 316 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 5702408 224.5 ns/op 267.21 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 5712139 220.2 ns/op 272.50 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 5783916 228.2 ns/op 262.91 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheMiss-8 5713358 224.0 ns/op 267.91 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 78757815 14.92 ns/op 4020.51 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 81419875 14.79 ns/op 4057.15 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 81656493 14.75 ns/op 4068.12 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 81522430 14.73 ns/op 4073.37 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 81887037 14.70 ns/op 4080.93 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAfalse_CacheHit-8 82019505 14.72 ns/op 4074.99 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 1241600 965.5 ns/op 62.14 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 1243646 964.8 ns/op 62.19 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 1243516 968.1 ns/op 61.98 MB/s 144 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 1241917 965.3 ns/op 62.16 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 1242903 985.0 ns/op 60.92 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_NoCache-8 1223456 964.3 ns/op 62.22 MB/s 145 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 1000000 1378 ns/op 43.55 MB/s 428 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 6265657 1229 ns/op 48.84 MB/s 316 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 5960497 224.3 ns/op 267.52 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 5961004 222.6 ns/op 269.52 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 5772004 226.5 ns/op 264.87 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheMiss-8 5766748 223.5 ns/op 268.51 MB/s 87 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 78664455 14.76 ns/op 4063.92 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 81305858 14.71 ns/op 4079.19 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 81626406 14.71 ns/op 4078.32 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 81168830 14.71 ns/op 4077.52 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 81860040 14.72 ns/op 4075.37 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsian_EAtrue_CacheHit-8 81093633 14.88 ns/op 4031.15 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 837949 1397 ns/op 62.29 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 869082 1380 ns/op 63.04 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 864015 1377 ns/op 63.18 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 873742 1374 ns/op 63.33 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 875703 1375 ns/op 63.27 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_NoCache-8 866865 1375 ns/op 63.26 MB/s 194 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 772100 1709 ns/op 50.91 MB/s 543 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 2127564 1046 ns/op 83.14 MB/s 361 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 6476034 1274 ns/op 68.30 MB/s 381 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 6401709 221.3 ns/op 393.18 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 6368766 220.2 ns/op 395.14 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheMiss-8 6404850 220.6 ns/op 394.34 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 74606566 15.83 ns/op 5494.39 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76326774 15.72 ns/op 5536.01 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76140116 15.74 ns/op 5525.94 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76340330 15.69 ns/op 5544.89 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76240900 15.69 ns/op 5544.81 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAfalse_CacheHit-8 76301294 15.73 ns/op 5531.49 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 753624 1592 ns/op 54.64 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 757292 1599 ns/op 54.42 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 758196 1588 ns/op 54.79 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 753902 1586 ns/op 54.85 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 758770 1589 ns/op 54.74 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_NoCache-8 757748 1590 ns/op 54.71 MB/s 193 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 653979 1985 ns/op 43.82 MB/s 561 B/op 7 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 2344717 731.5 ns/op 118.93 MB/s 263 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 6440574 1420 ns/op 61.26 MB/s 369 B/op 5 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 6506366 238.2 ns/op 365.22 MB/s 107 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 6504939 220.8 ns/op 394.05 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheMiss-8 6399746 221.0 ns/op 393.66 MB/s 103 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 75646941 15.95 ns/op 5453.57 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 75406885 15.73 ns/op 5532.42 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 76186243 15.69 ns/op 5545.76 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 76350855 15.76 ns/op 5521.29 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 76240896 15.70 ns/op 5542.36 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/EastAsianWithANSI_EAtrue_CacheHit-8 76404126 15.90 ns/op 5471.17 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 241440 4945 ns/op 109.19 MB/s 1181 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 245013 5050 ns/op 106.94 MB/s 1180 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 245098 4887 ns/op 110.49 MB/s 1177 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 244785 4971 ns/op 108.62 MB/s 1179 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 245007 4880 ns/op 110.66 MB/s 1182 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_NoCache-8 245986 4878 ns/op 110.71 MB/s 1181 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 232534 5203 ns/op 103.78 MB/s 1845 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 1000000 4309 ns/op 125.31 MB/s 1613 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 3491629 4013 ns/op 134.57 MB/s 1471 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 3670467 847.5 ns/op 637.15 MB/s 680 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 3669694 385.0 ns/op 1402.66 MB/s 583 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheMiss-8 3242532 356.5 ns/op 1514.63 MB/s 583 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 50391319 23.77 ns/op 22714.54 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 51225590 23.32 ns/op 23159.25 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 51732408 23.74 ns/op 22751.21 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 46074986 24.16 ns/op 22352.67 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 43649127 24.43 ns/op 22104.61 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAfalse_CacheHit-8 49954903 23.53 ns/op 22952.45 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 127574 9378 ns/op 57.58 MB/s 1180 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 128386 9386 ns/op 57.53 MB/s 1183 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 128604 9280 ns/op 58.19 MB/s 1178 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 129218 9264 ns/op 58.29 MB/s 1179 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 129030 9261 ns/op 58.31 MB/s 1179 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_NoCache-8 129080 9266 ns/op 58.28 MB/s 1180 B/op 3 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 123823 9282 ns/op 58.18 MB/s 1817 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 1000000 8943 ns/op 60.38 MB/s 1754 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 3532728 7337 ns/op 73.60 MB/s 1481 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 3610767 705.9 ns/op 764.94 MB/s 626 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 3502867 387.5 ns/op 1393.73 MB/s 583 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheMiss-8 3706471 680.7 ns/op 793.25 MB/s 640 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 51185895 24.01 ns/op 22492.97 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 51442992 23.44 ns/op 23041.30 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 47312392 23.56 ns/op 22917.72 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 51727110 23.33 ns/op 23144.01 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 51212746 23.62 ns/op 22862.18 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongSimpleASCII_EAtrue_CacheHit-8 51598200 23.23 ns/op 23247.62 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 21105 57258 ns/op 35.80 MB/s 1389 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 20656 57558 ns/op 35.62 MB/s 1386 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 21045 57257 ns/op 35.80 MB/s 1386 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 20884 57463 ns/op 35.68 MB/s 1391 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 20984 56898 ns/op 36.03 MB/s 1388 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_NoCache-8 21164 57796 ns/op 35.47 MB/s 1388 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 103934 31906 ns/op 64.25 MB/s 3143 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 1000000 52097 ns/op 39.35 MB/s 3737 B/op 10 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 1298925 14140 ns/op 144.98 MB/s 2637 B/op 4 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 1000000 1288 ns/op 1592.17 MB/s 2311 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 2546826 30224 ns/op 67.83 MB/s 3071 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheMiss-8 1000000 8376 ns/op 244.74 MB/s 2311 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 25786026 44.71 ns/op 45849.62 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 27173578 44.15 ns/op 46427.72 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 27221428 44.54 ns/op 46030.74 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 27213686 44.07 ns/op 46519.79 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 27233990 44.27 ns/op 46310.26 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAfalse_CacheHit-8 27164018 44.12 ns/op 46460.92 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 19785 60051 ns/op 34.14 MB/s 1386 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 20198 60161 ns/op 34.08 MB/s 1391 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 19585 60345 ns/op 33.97 MB/s 1390 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 19956 61714 ns/op 33.22 MB/s 1391 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 19554 61682 ns/op 33.24 MB/s 1388 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_NoCache-8 19830 60050 ns/op 34.14 MB/s 1393 B/op 9 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 38818 29507 ns/op 69.48 MB/s 3059 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 1000000 58539 ns/op 35.02 MB/s 3835 B/op 10 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 2186653 33757 ns/op 60.73 MB/s 3157 B/op 6 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 1000000 1283 ns/op 1597.72 MB/s 2311 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 1653430 1256 ns/op 1632.67 MB/s 2311 B/op 1 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheMiss-8 2195628 2716 ns/op 754.79 MB/s 2317 B/op 2 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 26531894 44.76 ns/op 45801.05 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 26634384 44.68 ns/op 45878.57 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 27184633 44.97 ns/op 45583.96 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 27011893 44.46 ns/op 46104.62 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 27183812 44.09 ns/op 46498.94 MB/s 0 B/op 0 allocs/op
|
||||
BenchmarkWidthFunction/LongASCIIWithANSI_EAtrue_CacheHit-8 27269318 44.17 ns/op 46406.38 MB/s 0 B/op 0 allocs/op
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter/pkg/twwidth 724.296s
|
||||
? github.com/olekukonko/tablewriter/renderer [no test files]
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter/tests 2.959s
|
||||
PASS
|
||||
ok github.com/olekukonko/tablewriter/tw 0.270s
|
||||
40
vendor/github.com/olekukonko/tablewriter/option.go
generated
vendored
40
vendor/github.com/olekukonko/tablewriter/option.go
generated
vendored
@@ -122,13 +122,14 @@ func WithFooterAlignmentConfig(alignment tw.CellAlignment) Option {
|
||||
}
|
||||
}
|
||||
|
||||
// WithFooterMergeMode sets the merge mode for footer cells.
|
||||
// Invalid merge modes are ignored, and the change is logged if debugging is enabled.
|
||||
// Deprecated: Use a ConfigBuilder with .Footer().CellMerging().WithMode(...) instead.
|
||||
// This option will be removed in a future version.
|
||||
func WithFooterMergeMode(mergeMode int) Option {
|
||||
return func(target *Table) {
|
||||
if mergeMode < tw.MergeNone || mergeMode > tw.MergeHierarchical {
|
||||
return
|
||||
}
|
||||
target.config.Footer.Merging.Mode = mergeMode
|
||||
target.config.Footer.Formatting.MergeMode = mergeMode
|
||||
if target.logger != nil {
|
||||
target.logger.Debugf("Option: WithFooterMergeMode applied to Table: %v", mergeMode)
|
||||
@@ -232,13 +233,14 @@ func WithHeaderAutoWrap(wrap int) Option {
|
||||
}
|
||||
}
|
||||
|
||||
// WithHeaderMergeMode sets the merge mode for header cells.
|
||||
// Invalid merge modes are ignored, and the change is logged if debugging is enabled.
|
||||
// Deprecated: Use a ConfigBuilder with .Header().CellMerging().WithMode(...) instead.
|
||||
// This option will be removed in a future version.
|
||||
func WithHeaderMergeMode(mergeMode int) Option {
|
||||
return func(target *Table) {
|
||||
if mergeMode < tw.MergeNone || mergeMode > tw.MergeHierarchical {
|
||||
return
|
||||
}
|
||||
target.config.Header.Merging.Mode = mergeMode
|
||||
target.config.Header.Formatting.MergeMode = mergeMode
|
||||
if target.logger != nil {
|
||||
target.logger.Debugf("Option: WithHeaderMergeMode applied to Table: %v", mergeMode)
|
||||
@@ -321,13 +323,14 @@ func WithRowAutoWrap(wrap int) Option {
|
||||
}
|
||||
}
|
||||
|
||||
// WithRowMergeMode sets the merge mode for row cells.
|
||||
// Invalid merge modes are ignored, and the change is logged if debugging is enabled.
|
||||
// Deprecated: Use a ConfigBuilder with .Row().CellMerging().WithMode(...) instead.
|
||||
// This option will be removed in a future version.
|
||||
func WithRowMergeMode(mergeMode int) Option {
|
||||
return func(target *Table) {
|
||||
if mergeMode < tw.MergeNone || mergeMode > tw.MergeHierarchical {
|
||||
return
|
||||
}
|
||||
target.config.Row.Merging.Mode = mergeMode
|
||||
target.config.Row.Formatting.MergeMode = mergeMode
|
||||
if target.logger != nil {
|
||||
target.logger.Debugf("Option: WithRowMergeMode applied to Table: %v", mergeMode)
|
||||
@@ -644,9 +647,9 @@ func WithEastAsian(enable bool) Option {
|
||||
// The runewidth.Condition object allows for more fine-grained control over how rune widths
|
||||
// are determined, beyond just toggling EastAsianWidth. This could include settings for
|
||||
// ambiguous width characters or other future properties of runewidth.Condition.
|
||||
func WithCondition(condition *runewidth.Condition) Option {
|
||||
func WithCondition(cond *runewidth.Condition) Option {
|
||||
return func(target *Table) {
|
||||
twwidth.SetCondition(condition)
|
||||
twwidth.SetCondition(cond)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -706,6 +709,9 @@ func defaultConfig() Config {
|
||||
AutoFormat: tw.On,
|
||||
MergeMode: tw.MergeNone,
|
||||
},
|
||||
Merging: tw.CellMerging{
|
||||
Mode: tw.MergeNone,
|
||||
},
|
||||
Padding: tw.CellPadding{
|
||||
Global: tw.PaddingDefault,
|
||||
},
|
||||
@@ -720,6 +726,9 @@ func defaultConfig() Config {
|
||||
AutoFormat: tw.Off,
|
||||
MergeMode: tw.MergeNone,
|
||||
},
|
||||
Merging: tw.CellMerging{
|
||||
Mode: tw.MergeNone,
|
||||
},
|
||||
Padding: tw.CellPadding{
|
||||
Global: tw.PaddingDefault,
|
||||
},
|
||||
@@ -734,6 +743,9 @@ func defaultConfig() Config {
|
||||
AutoFormat: tw.Off,
|
||||
MergeMode: tw.MergeNone,
|
||||
},
|
||||
Merging: tw.CellMerging{
|
||||
Mode: tw.MergeNone,
|
||||
},
|
||||
Padding: tw.CellPadding{
|
||||
Global: tw.PaddingDefault,
|
||||
},
|
||||
@@ -772,10 +784,20 @@ func mergeCellConfig(dst, src tw.CellConfig) tw.CellConfig {
|
||||
if src.ColMaxWidths.Global != 0 {
|
||||
dst.ColMaxWidths.Global = src.ColMaxWidths.Global
|
||||
}
|
||||
if src.Formatting.MergeMode != 0 {
|
||||
|
||||
// Handle merging of the new CellMerging struct and the deprecated MergeMode
|
||||
if src.Merging.Mode != 0 {
|
||||
dst.Merging.Mode = src.Merging.Mode
|
||||
dst.Formatting.MergeMode = src.Merging.Mode
|
||||
} else if src.Formatting.MergeMode != 0 {
|
||||
dst.Merging.Mode = src.Formatting.MergeMode
|
||||
dst.Formatting.MergeMode = src.Formatting.MergeMode
|
||||
}
|
||||
|
||||
if src.Merging.ByColumnIndex != nil {
|
||||
dst.Merging.ByColumnIndex = src.Merging.ByColumnIndex.Clone()
|
||||
}
|
||||
|
||||
dst.Formatting.AutoFormat = src.Formatting.AutoFormat
|
||||
|
||||
if src.Padding.Global.Paddable() {
|
||||
|
||||
9
vendor/github.com/olekukonko/tablewriter/pkg/twwarp/wrap.go
generated
vendored
9
vendor/github.com/olekukonko/tablewriter/pkg/twwarp/wrap.go
generated
vendored
@@ -12,8 +12,8 @@ import (
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/clipperhouse/uax29/v2/graphemes"
|
||||
"github.com/olekukonko/tablewriter/pkg/twwidth" // IMPORT YOUR NEW PACKAGE
|
||||
"github.com/rivo/uniseg"
|
||||
// "github.com/mattn/go-runewidth" // This can be removed if all direct uses are gone
|
||||
)
|
||||
|
||||
@@ -153,9 +153,9 @@ func stringToDisplayWidth(s string, targetWidth int) (substring string, actualWi
|
||||
var currentWidth int
|
||||
var endIndex int // Tracks the byte index in the original string
|
||||
|
||||
g := uniseg.NewGraphemes(s)
|
||||
g := graphemes.FromString(s)
|
||||
for g.Next() {
|
||||
grapheme := g.Str()
|
||||
grapheme := g.Value()
|
||||
// graphemeWidth := runewidth.StringWidth(grapheme) // OLD
|
||||
graphemeWidth := twwidth.Width(grapheme) // NEW: Use twdw.Width
|
||||
|
||||
@@ -164,8 +164,7 @@ func stringToDisplayWidth(s string, targetWidth int) (substring string, actualWi
|
||||
}
|
||||
|
||||
currentWidth += graphemeWidth
|
||||
_, e := g.Positions()
|
||||
endIndex = e
|
||||
endIndex = g.End()
|
||||
}
|
||||
return s[:endIndex], currentWidth
|
||||
}
|
||||
|
||||
80
vendor/github.com/olekukonko/tablewriter/pkg/twwidth/width.go
generated
vendored
80
vendor/github.com/olekukonko/tablewriter/pkg/twwidth/width.go
generated
vendored
@@ -6,11 +6,12 @@ import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/clipperhouse/displaywidth"
|
||||
"github.com/mattn/go-runewidth"
|
||||
)
|
||||
|
||||
// condition holds the global runewidth configuration, including East Asian width settings.
|
||||
var condition *runewidth.Condition
|
||||
// globalOptions holds the global displaywidth configuration, including East Asian width settings.
|
||||
var globalOptions displaywidth.Options
|
||||
|
||||
// mu protects access to condition and widthCache for thread safety.
|
||||
var mu sync.Mutex
|
||||
@@ -19,10 +20,21 @@ var mu sync.Mutex
|
||||
var ansi = Filter()
|
||||
|
||||
func init() {
|
||||
condition = runewidth.NewCondition()
|
||||
globalOptions = newOptions()
|
||||
widthCache = make(map[cacheKey]int)
|
||||
}
|
||||
|
||||
func newOptions() displaywidth.Options {
|
||||
// go-runewidth has default logic based on env variables and locale,
|
||||
// we want to keep that compatibility
|
||||
cond := runewidth.NewCondition()
|
||||
options := displaywidth.Options{
|
||||
EastAsianWidth: cond.EastAsianWidth,
|
||||
StrictEmojiNeutral: cond.StrictEmojiNeutral,
|
||||
}
|
||||
return options
|
||||
}
|
||||
|
||||
// cacheKey is used as a key for memoizing string width results in widthCache.
|
||||
type cacheKey struct {
|
||||
str string // Input string
|
||||
@@ -60,26 +72,42 @@ func Filter() *regexp.Regexp {
|
||||
func SetEastAsian(enable bool) {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
if condition.EastAsianWidth != enable {
|
||||
condition.EastAsianWidth = enable
|
||||
if globalOptions.EastAsianWidth != enable {
|
||||
globalOptions.EastAsianWidth = enable
|
||||
widthCache = make(map[cacheKey]int) // Clear cache on setting change
|
||||
}
|
||||
}
|
||||
|
||||
// SetCondition updates the global runewidth.Condition used for width calculations.
|
||||
// When the condition is changed, the width cache is cleared.
|
||||
// IsEastAsian returns the current East Asian width setting.
|
||||
// This function is thread-safe.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// newCond := runewidth.NewCondition()
|
||||
// newCond.EastAsianWidth = true
|
||||
// twdw.SetCondition(newCond)
|
||||
func SetCondition(newCond *runewidth.Condition) {
|
||||
// if twdw.IsEastAsian() {
|
||||
// // Handle East Asian width characters
|
||||
// }
|
||||
func IsEastAsian() bool {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
return globalOptions.EastAsianWidth
|
||||
}
|
||||
|
||||
// SetCondition updates the global runewidth.Condition used for width calculations.
|
||||
// This method is kept for backward compatibility. The condition is converted to
|
||||
// displaywidth.Options internally for better performance.
|
||||
func SetCondition(cond *runewidth.Condition) {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
condition = newCond
|
||||
widthCache = make(map[cacheKey]int) // Clear cache on setting change
|
||||
globalOptions = conditionToOptions(cond)
|
||||
}
|
||||
|
||||
// Convert runewidth.Condition to displaywidth.Options
|
||||
func conditionToOptions(cond *runewidth.Condition) displaywidth.Options {
|
||||
return displaywidth.Options{
|
||||
EastAsianWidth: cond.EastAsianWidth,
|
||||
StrictEmojiNeutral: cond.StrictEmojiNeutral,
|
||||
}
|
||||
}
|
||||
|
||||
// Width calculates the visual width of a string, excluding ANSI escape sequences,
|
||||
@@ -92,19 +120,18 @@ func SetCondition(newCond *runewidth.Condition) {
|
||||
// width := twdw.Width("Hello\x1b[31mWorld") // Returns 10
|
||||
func Width(str string) int {
|
||||
mu.Lock()
|
||||
key := cacheKey{str: str, eastAsianWidth: condition.EastAsianWidth}
|
||||
key := cacheKey{str: str, eastAsianWidth: globalOptions.EastAsianWidth}
|
||||
if w, found := widthCache[key]; found {
|
||||
mu.Unlock()
|
||||
return w
|
||||
}
|
||||
mu.Unlock()
|
||||
|
||||
// Use a temporary condition to avoid holding the lock during calculation
|
||||
tempCond := runewidth.NewCondition()
|
||||
tempCond.EastAsianWidth = key.eastAsianWidth
|
||||
options := newOptions()
|
||||
options.EastAsianWidth = key.eastAsianWidth
|
||||
|
||||
stripped := ansi.ReplaceAllLiteralString(str, "")
|
||||
calculatedWidth := tempCond.StringWidth(stripped)
|
||||
calculatedWidth := options.String(stripped)
|
||||
|
||||
mu.Lock()
|
||||
widthCache[key] = calculatedWidth
|
||||
@@ -122,14 +149,14 @@ func Width(str string) int {
|
||||
// width := twdw.WidthNoCache("Hello\x1b[31mWorld") // Returns 10
|
||||
func WidthNoCache(str string) int {
|
||||
mu.Lock()
|
||||
currentEA := condition.EastAsianWidth
|
||||
currentEA := globalOptions.EastAsianWidth
|
||||
mu.Unlock()
|
||||
|
||||
tempCond := runewidth.NewCondition()
|
||||
tempCond.EastAsianWidth = currentEA
|
||||
options := newOptions()
|
||||
options.EastAsianWidth = currentEA
|
||||
|
||||
stripped := ansi.ReplaceAllLiteralString(str, "")
|
||||
return tempCond.StringWidth(stripped)
|
||||
return options.String(stripped)
|
||||
}
|
||||
|
||||
// Display calculates the visual width of a string, excluding ANSI escape sequences,
|
||||
@@ -142,7 +169,8 @@ func WidthNoCache(str string) int {
|
||||
// cond := runewidth.NewCondition()
|
||||
// width := twdw.Display(cond, "Hello\x1b[31mWorld") // Returns 10
|
||||
func Display(cond *runewidth.Condition, str string) int {
|
||||
return cond.StringWidth(ansi.ReplaceAllLiteralString(str, ""))
|
||||
options := conditionToOptions(cond)
|
||||
return options.String(ansi.ReplaceAllLiteralString(str, ""))
|
||||
}
|
||||
|
||||
// Truncate shortens a string to fit within a specified visual width, optionally
|
||||
@@ -205,7 +233,7 @@ func Truncate(s string, maxWidth int, suffix ...string) string {
|
||||
|
||||
// Capture the global EastAsianWidth setting once for consistent use
|
||||
mu.Lock()
|
||||
currentGlobalEastAsianWidth := condition.EastAsianWidth
|
||||
currentGlobalEastAsianWidth := globalOptions.EastAsianWidth
|
||||
mu.Unlock()
|
||||
|
||||
// Special case for EastAsian true: if only suffix fits, return suffix.
|
||||
@@ -243,8 +271,8 @@ func Truncate(s string, maxWidth int, suffix ...string) string {
|
||||
inAnsiSequence := false
|
||||
ansiWrittenToContent := false
|
||||
|
||||
localRunewidthCond := runewidth.NewCondition()
|
||||
localRunewidthCond.EastAsianWidth = currentGlobalEastAsianWidth
|
||||
options := newOptions()
|
||||
options.EastAsianWidth = currentGlobalEastAsianWidth
|
||||
|
||||
for _, r := range s {
|
||||
if r == '\x1b' {
|
||||
@@ -278,7 +306,7 @@ func Truncate(s string, maxWidth int, suffix ...string) string {
|
||||
ansiSeqBuf.Reset()
|
||||
}
|
||||
} else { // Normal character
|
||||
runeDisplayWidth := localRunewidthCond.RuneWidth(r)
|
||||
runeDisplayWidth := options.Rune(r)
|
||||
if targetContentForIteration == 0 { // No budget for content at all
|
||||
break
|
||||
}
|
||||
|
||||
58
vendor/github.com/olekukonko/tablewriter/tablewriter.go
generated
vendored
58
vendor/github.com/olekukonko/tablewriter/tablewriter.go
generated
vendored
@@ -23,7 +23,7 @@ import (
|
||||
type Table struct {
|
||||
writer io.Writer // Destination for table output
|
||||
counters []tw.Counter // Counters for indices
|
||||
rows [][][]string // Row data, supporting multi-line cells
|
||||
rows [][]string // Row data, one slice of strings per logical row
|
||||
headers [][]string // Header content
|
||||
footers [][]string // Footer content
|
||||
headerWidths tw.Mapper[int, int] // Computed widths for header columns
|
||||
@@ -221,13 +221,12 @@ func (t *Table) Append(rows ...interface{}) error {
|
||||
}
|
||||
}
|
||||
|
||||
// The rest of the function proceeds as before, converting the data to string lines.
|
||||
lines, err := t.toStringLines(cellsSource, t.config.Row)
|
||||
cells, err := t.convertCellsToStrings(cellsSource, t.config.Row)
|
||||
if err != nil {
|
||||
t.logger.Errorf("Append (Batch) failed for cellsSource %v: %v", cellsSource, err)
|
||||
return err
|
||||
}
|
||||
t.rows = append(t.rows, lines)
|
||||
t.rows = append(t.rows, cells)
|
||||
|
||||
t.logger.Debugf("Append (Batch) completed for one row, total rows in table: %d", len(t.rows))
|
||||
return nil
|
||||
@@ -456,7 +455,7 @@ func (t *Table) Reset() {
|
||||
t.logger.Debug("Reset() called. Clearing table data and render state.")
|
||||
|
||||
// Clear data slices
|
||||
t.rows = nil // Or t.rows = make([][][]string, 0)
|
||||
t.rows = nil // Or t.rows = make([][]string, 0)
|
||||
t.headers = nil // Or t.headers = make([][]string, 0)
|
||||
t.footers = nil // Or t.footers = make([][]string, 0)
|
||||
|
||||
@@ -556,16 +555,14 @@ func (t *Table) appendSingle(row interface{}) error {
|
||||
t.logger.Debugf("appendSingle: Dispatching to streamAppendRow for row: %v", row)
|
||||
return t.streamAppendRow(row) // Call the streaming render function
|
||||
}
|
||||
// Existing batch logic:
|
||||
|
||||
t.logger.Debugf("appendSingle: Processing for batch mode, row: %v", row)
|
||||
// toStringLines now uses the new convertCellsToStrings internally, then prepareContent.
|
||||
// This is fine for batch.
|
||||
lines, err := t.toStringLines(row, t.config.Row)
|
||||
cells, err := t.convertCellsToStrings(row, t.config.Row)
|
||||
if err != nil {
|
||||
t.logger.Debugf("Error in toStringLines (batch mode): %v", err)
|
||||
t.logger.Debugf("Error in convertCellsToStrings (batch mode): %v", err)
|
||||
return err
|
||||
}
|
||||
t.rows = append(t.rows, lines) // Add to batch storage
|
||||
t.rows = append(t.rows, cells) // Add to batch storage
|
||||
t.logger.Debugf("Row appended to batch t.rows, total batch rows: %d", len(t.rows))
|
||||
return nil
|
||||
}
|
||||
@@ -775,8 +772,8 @@ func (t *Table) maxColumns() int {
|
||||
m = len(t.headers[0])
|
||||
}
|
||||
for _, row := range t.rows {
|
||||
if len(row) > 0 && len(row[0]) > m {
|
||||
m = len(row[0])
|
||||
if len(row) > m {
|
||||
m = len(row)
|
||||
}
|
||||
}
|
||||
if len(t.footers) > 0 && len(t.footers[0]) > m {
|
||||
@@ -811,7 +808,7 @@ func (t *Table) printTopBottomCaption(w io.Writer, actualTableWidth int) {
|
||||
t.logger.Debugf("[printCaption] Empty table, no user caption.Width: Using natural caption width %d.", captionWrapWidth)
|
||||
} else {
|
||||
captionWrapWidth = actualTableWidth
|
||||
t.logger.Debugf("[printCaption] Non-empty table, no user caption.Width: Using actualTableWidth %d for wrapping.", actualTableWidth)
|
||||
t.logger.Debugf("[printCaption] Non-empty table, no user caption.Width: Using actualTableWidth %d for wrapping.", captionWrapWidth)
|
||||
}
|
||||
|
||||
if captionWrapWidth <= 0 {
|
||||
@@ -1066,13 +1063,20 @@ func (t *Table) prepareContexts() (*renderContext, *mergeContext, error) {
|
||||
logger: t.logger,
|
||||
}
|
||||
|
||||
isEmpty, visibleCount := t.getEmptyColumnInfo(numOriginalCols)
|
||||
// Process raw rows into visual, multi-line rows
|
||||
processedRowLines := make([][][]string, len(t.rows))
|
||||
for i, rawRow := range t.rows {
|
||||
processedRowLines[i] = t.prepareContent(rawRow, t.config.Row)
|
||||
}
|
||||
ctx.rowLines = processedRowLines
|
||||
|
||||
isEmpty, visibleCount := t.getEmptyColumnInfo(ctx.rowLines, numOriginalCols)
|
||||
ctx.emptyColumns = isEmpty
|
||||
ctx.visibleColCount = visibleCount
|
||||
|
||||
mctx := &mergeContext{
|
||||
headerMerges: make(map[int]tw.MergeState),
|
||||
rowMerges: make([]map[int]tw.MergeState, len(t.rows)),
|
||||
rowMerges: make([]map[int]tw.MergeState, len(ctx.rowLines)),
|
||||
footerMerges: make(map[int]tw.MergeState),
|
||||
horzMerges: make(map[tw.Position]map[int]bool),
|
||||
}
|
||||
@@ -1081,7 +1085,6 @@ func (t *Table) prepareContexts() (*renderContext, *mergeContext, error) {
|
||||
}
|
||||
|
||||
ctx.headerLines = t.headers
|
||||
ctx.rowLines = t.rows
|
||||
ctx.footerLines = t.footers
|
||||
|
||||
if err := t.calculateAndNormalizeWidths(ctx); err != nil {
|
||||
@@ -1095,21 +1098,28 @@ func (t *Table) prepareContexts() (*renderContext, *mergeContext, error) {
|
||||
ctx.headerLines = preparedHeaderLines
|
||||
mctx.headerMerges = headerMerges
|
||||
|
||||
processedRowLines := make([][][]string, len(ctx.rowLines))
|
||||
// Re-process row lines for merges now that widths are known
|
||||
processedRowLinesWithMerges := make([][][]string, len(ctx.rowLines))
|
||||
for i, row := range ctx.rowLines {
|
||||
if mctx.rowMerges[i] == nil {
|
||||
mctx.rowMerges[i] = make(map[int]tw.MergeState)
|
||||
}
|
||||
processedRowLines[i], mctx.rowMerges[i], _ = t.prepareWithMerges(row, t.config.Row, tw.Row)
|
||||
processedRowLinesWithMerges[i], mctx.rowMerges[i], _ = t.prepareWithMerges(row, t.config.Row, tw.Row)
|
||||
}
|
||||
ctx.rowLines = processedRowLines
|
||||
ctx.rowLines = processedRowLinesWithMerges
|
||||
|
||||
t.applyHorizontalMergeWidths(tw.Header, ctx, mctx.headerMerges)
|
||||
t.applyHorizontalMerges(tw.Header, ctx, mctx.headerMerges)
|
||||
|
||||
if t.config.Row.Formatting.MergeMode&tw.MergeVertical != 0 {
|
||||
mergeMode := t.config.Row.Merging.Mode
|
||||
if mergeMode == 0 {
|
||||
mergeMode = t.config.Row.Formatting.MergeMode
|
||||
}
|
||||
|
||||
// Now check against the effective mode
|
||||
if mergeMode&tw.MergeVertical != 0 {
|
||||
t.applyVerticalMerges(ctx, mctx)
|
||||
}
|
||||
if t.config.Row.Formatting.MergeMode&tw.MergeHierarchical != 0 {
|
||||
if mergeMode&tw.MergeHierarchical != 0 {
|
||||
t.applyHierarchicalMerges(ctx, mctx)
|
||||
}
|
||||
|
||||
@@ -1166,7 +1176,7 @@ func (t *Table) prepareFooter(ctx *renderContext, mctx *mergeContext) {
|
||||
mctx.footerMerges = mergeStates
|
||||
ctx.footerLines = t.footers
|
||||
t.logger.Debugf("Base footer widths (normalized from rows/header): %v", ctx.widths[tw.Footer])
|
||||
t.applyHorizontalMergeWidths(tw.Footer, ctx, mctx.footerMerges)
|
||||
t.applyHorizontalMerges(tw.Footer, ctx, mctx.footerMerges)
|
||||
ctx.footerPrepared = true
|
||||
t.logger.Debugf("Footer preparation completed. Final footer widths: %v", ctx.widths[tw.Footer])
|
||||
}
|
||||
|
||||
32
vendor/github.com/olekukonko/tablewriter/tw/cell.go
generated
vendored
32
vendor/github.com/olekukonko/tablewriter/tw/cell.go
generated
vendored
@@ -2,16 +2,31 @@ package tw
|
||||
|
||||
// CellFormatting holds formatting options for table cells.
|
||||
type CellFormatting struct {
|
||||
AutoWrap int // Wrapping behavior (e.g., WrapTruncate, WrapNormal)
|
||||
MergeMode int // Bitmask for merge behavior (e.g., MergeHorizontal, MergeVertical)
|
||||
|
||||
// Changed form bool to State
|
||||
// See https://github.com/olekukonko/tablewriter/issues/261
|
||||
AutoWrap int // Wrapping behavior (e.g., WrapTruncate, WrapNormal)
|
||||
AutoFormat State // Enables automatic formatting (e.g., title case for headers)
|
||||
|
||||
// Deprecated: kept for compatibility
|
||||
// will be removed soon
|
||||
Alignment Align // Text alignment within the cell (e.g., Left, Right, Center)
|
||||
// Deprecated: Kept for backward compatibility. Use CellConfig.CellMerging.Mode instead.
|
||||
// This will be removed in a future version.
|
||||
MergeMode int
|
||||
|
||||
// Deprecated: Kept for backward compatibility. Use CellConfig.Alignment instead.
|
||||
// This will be removed in a future version.
|
||||
Alignment Align
|
||||
}
|
||||
|
||||
// CellMerging holds the configuration for how cells should be merged.
|
||||
// This new struct replaces the deprecated MergeMode.
|
||||
type CellMerging struct {
|
||||
// Mode is a bitmask specifying the type of merge (e.g., MergeHorizontal, MergeVertical).
|
||||
Mode int
|
||||
|
||||
// ByColumnIndex specifies which column indices should be considered for merging.
|
||||
// If the mapper is nil or empty, merging applies to all columns (if Mode is set).
|
||||
// Otherwise, only columns with an index present as a key will be merged.
|
||||
ByColumnIndex Mapper[int, bool]
|
||||
|
||||
// ByRowIndex is reserved for future features to specify merging on specific rows.
|
||||
ByRowIndex Mapper[int, bool]
|
||||
}
|
||||
|
||||
// CellPadding defines padding settings for table cells.
|
||||
@@ -47,6 +62,7 @@ type CellConfig struct {
|
||||
Filter CellFilter // Function to filter cell content (renamed from Filter Filter)
|
||||
Alignment CellAlignment // Alignment configuration for cells
|
||||
ColMaxWidths CellWidth // Per-column maximum width overrides
|
||||
Merging CellMerging // Merging holds all configuration related to cell merging.
|
||||
|
||||
// Deprecated: use Alignment.PerColumn instead. Will be removed in a future version.
|
||||
// will be removed soon
|
||||
|
||||
33
vendor/github.com/olekukonko/tablewriter/tw/mapper.go
generated
vendored
33
vendor/github.com/olekukonko/tablewriter/tw/mapper.go
generated
vendored
@@ -210,3 +210,36 @@ func (m Mapper[K, V]) SortedKeys() []K {
|
||||
|
||||
return keys
|
||||
}
|
||||
|
||||
func NewBoolMapper[K comparable](keys ...K) Mapper[K, bool] {
|
||||
if len(keys) == 0 {
|
||||
return nil
|
||||
}
|
||||
mapper := NewMapper[K, bool]()
|
||||
for _, key := range keys {
|
||||
mapper.Set(key, true)
|
||||
}
|
||||
return mapper
|
||||
}
|
||||
|
||||
func NewIntMapper[K comparable](keys ...K) Mapper[K, int] {
|
||||
if len(keys) == 0 {
|
||||
return nil
|
||||
}
|
||||
mapper := NewMapper[K, int]()
|
||||
for _, key := range keys {
|
||||
mapper.Set(key, 0)
|
||||
}
|
||||
return mapper
|
||||
}
|
||||
|
||||
func NewIdentityMapper[K comparable](keys ...K) Mapper[K, K] {
|
||||
if len(keys) == 0 {
|
||||
return nil
|
||||
}
|
||||
mapper := NewMapper[K, K]()
|
||||
for _, key := range keys {
|
||||
mapper.Set(key, key)
|
||||
}
|
||||
return mapper
|
||||
}
|
||||
|
||||
83
vendor/github.com/olekukonko/tablewriter/zoo.go
generated
vendored
83
vendor/github.com/olekukonko/tablewriter/zoo.go
generated
vendored
@@ -18,7 +18,23 @@ import (
|
||||
// Parameters ctx and mctx hold rendering and merge state.
|
||||
// No return value.
|
||||
func (t *Table) applyHierarchicalMerges(ctx *renderContext, mctx *mergeContext) {
|
||||
ctx.logger.Debug("Applying hierarchical merges (left-to-right vertical flow - snapshot comparison)")
|
||||
// First, ensure we should even run this logic.
|
||||
// Check both the new CellMerging struct and the deprecated Formatting field.
|
||||
mergeMode := t.config.Row.Merging.Mode
|
||||
if mergeMode == 0 {
|
||||
mergeMode = t.config.Row.Formatting.MergeMode
|
||||
}
|
||||
if !(mergeMode&tw.MergeHierarchical != 0) {
|
||||
return
|
||||
}
|
||||
|
||||
mergeColumnMapper := t.config.Row.Merging.ByColumnIndex
|
||||
if mergeColumnMapper != nil {
|
||||
ctx.logger.Debugf("Applying hierarchical merges ONLY to specified columns: %v", mergeColumnMapper.Keys())
|
||||
} else {
|
||||
ctx.logger.Debug("Applying hierarchical merges (left-to-right vertical flow - snapshot comparison)")
|
||||
}
|
||||
|
||||
if len(ctx.rowLines) <= 1 {
|
||||
ctx.logger.Debug("Skipping hierarchical merges - less than 2 rows")
|
||||
return
|
||||
@@ -41,6 +57,12 @@ func (t *Table) applyHierarchicalMerges(ctx *renderContext, mctx *mergeContext)
|
||||
leftCellContinuedHierarchical := false
|
||||
|
||||
for c := 0; c < numCols; c++ {
|
||||
// If a column map is specified, skip columns that are not in it.
|
||||
if mergeColumnMapper != nil && !mergeColumnMapper.Has(c) {
|
||||
leftCellContinuedHierarchical = false // Reset hierarchy tracking
|
||||
continue
|
||||
}
|
||||
|
||||
if mctx.rowMerges[r] == nil {
|
||||
mctx.rowMerges[r] = make(map[int]tw.MergeState)
|
||||
}
|
||||
@@ -146,15 +168,15 @@ func (t *Table) applyHierarchicalMerges(ctx *renderContext, mctx *mergeContext)
|
||||
ctx.logger.Debug("Hierarchical merge processing completed")
|
||||
}
|
||||
|
||||
// applyHorizontalMergeWidths adjusts column widths for horizontal merges.
|
||||
// applyHorizontalMerges adjusts column widths for horizontal merges.
|
||||
// Parameters include position, ctx for rendering, and mergeStates for merges.
|
||||
// No return value.
|
||||
func (t *Table) applyHorizontalMergeWidths(position tw.Position, ctx *renderContext, mergeStates map[int]tw.MergeState) {
|
||||
func (t *Table) applyHorizontalMerges(position tw.Position, ctx *renderContext, mergeStates map[int]tw.MergeState) {
|
||||
if mergeStates == nil {
|
||||
t.logger.Debugf("applyHorizontalMergeWidths: Skipping %s - no merge states", position)
|
||||
t.logger.Debugf("applyHorizontalMerges: Skipping %s - no merge states", position)
|
||||
return
|
||||
}
|
||||
t.logger.Debugf("applyHorizontalMergeWidths: Applying HMerge width recalc for %s", position)
|
||||
t.logger.Debugf("applyHorizontalMerges: Applying HMerge width recalc for %s", position)
|
||||
|
||||
numCols := ctx.numCols
|
||||
targetWidthsMap := ctx.widths[position]
|
||||
@@ -211,16 +233,31 @@ func (t *Table) applyHorizontalMergeWidths(position tw.Position, ctx *renderCont
|
||||
}
|
||||
}
|
||||
}
|
||||
ctx.logger.Debugf("applyHorizontalMergeWidths: Final widths for %s: %v", position, targetWidthsMap)
|
||||
ctx.logger.Debugf("applyHorizontalMerges: Final widths for %s: %v", position, targetWidthsMap)
|
||||
}
|
||||
|
||||
// applyVerticalMerges applies vertical merges to row content.
|
||||
// Parameters ctx and mctx hold rendering and merge state.
|
||||
// No return value.
|
||||
func (t *Table) applyVerticalMerges(ctx *renderContext, mctx *mergeContext) {
|
||||
ctx.logger.Debugf("Applying vertical merges across %d rows", len(ctx.rowLines))
|
||||
numCols := ctx.numCols
|
||||
// First, ensure we should even run this logic.
|
||||
// Check both the new CellMerging struct and the deprecated Formatting field.
|
||||
mergeMode := t.config.Row.Merging.Mode
|
||||
if mergeMode == 0 {
|
||||
mergeMode = t.config.Row.Formatting.MergeMode
|
||||
}
|
||||
if !(mergeMode&tw.MergeVertical != 0) {
|
||||
return
|
||||
}
|
||||
|
||||
mergeColumnMapper := t.config.Row.Merging.ByColumnIndex
|
||||
if mergeColumnMapper != nil {
|
||||
ctx.logger.Debugf("Applying vertical merges ONLY to specified columns: %v", mergeColumnMapper.Keys())
|
||||
} else {
|
||||
ctx.logger.Debugf("Applying vertical merges across %d rows", len(ctx.rowLines))
|
||||
}
|
||||
|
||||
numCols := ctx.numCols
|
||||
mergeStartRow := make(map[int]int)
|
||||
mergeStartContent := make(map[int]string)
|
||||
|
||||
@@ -243,6 +280,11 @@ func (t *Table) applyVerticalMerges(ctx *renderContext, mctx *mergeContext) {
|
||||
currentLineContent := ctx.rowLines[i]
|
||||
|
||||
for col := 0; col < numCols; col++ {
|
||||
// If a column map is specified, skip columns that are not in it.
|
||||
if mergeColumnMapper != nil && !mergeColumnMapper.Has(col) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Join all lines of the cell to compare full content
|
||||
var currentVal strings.Builder
|
||||
for _, line := range currentLineContent {
|
||||
@@ -579,11 +621,6 @@ func (t *Table) calculateAndNormalizeWidths(ctx *renderContext) error {
|
||||
ctx.logger.Debugf("calculateAndNormalizeWidths: Computing and normalizing widths for %d columns. Compact: %v",
|
||||
ctx.numCols, t.config.Behavior.Compact.Merge.Enabled())
|
||||
|
||||
// Initialize width maps
|
||||
// t.headerWidths = tw.NewMapper[int, int]()
|
||||
// t.rowWidths = tw.NewMapper[int, int]()
|
||||
// t.footerWidths = tw.NewMapper[int, int]()
|
||||
|
||||
// Compute content-based widths for each section
|
||||
for _, lines := range ctx.headerLines {
|
||||
t.updateWidths(lines, t.headerWidths, t.config.Header.Padding)
|
||||
@@ -1438,7 +1475,7 @@ func (t *Table) getColMaxWidths(position tw.Position) tw.CellWidth {
|
||||
// getEmptyColumnInfo identifies empty columns in row data.
|
||||
// Parameter numOriginalCols specifies the total column count.
|
||||
// Returns a boolean slice (true for empty) and visible column count.
|
||||
func (t *Table) getEmptyColumnInfo(numOriginalCols int) (isEmpty []bool, visibleColCount int) {
|
||||
func (t *Table) getEmptyColumnInfo(processedRows [][][]string, numOriginalCols int) (isEmpty []bool, visibleColCount int) {
|
||||
isEmpty = make([]bool, numOriginalCols)
|
||||
for i := range isEmpty {
|
||||
isEmpty[i] = true
|
||||
@@ -1453,9 +1490,9 @@ func (t *Table) getEmptyColumnInfo(numOriginalCols int) (isEmpty []bool, visible
|
||||
return isEmpty, visibleColCount
|
||||
}
|
||||
|
||||
t.logger.Debugf("getEmptyColumnInfo: Checking %d rows for %d columns...", len(t.rows), numOriginalCols)
|
||||
t.logger.Debugf("getEmptyColumnInfo: Checking %d rows for %d columns...", len(processedRows), numOriginalCols)
|
||||
|
||||
for rowIdx, logicalRow := range t.rows {
|
||||
for rowIdx, logicalRow := range processedRows {
|
||||
for lineIdx, visualLine := range logicalRow {
|
||||
for colIdx, cellContent := range visualLine {
|
||||
if colIdx >= numOriginalCols {
|
||||
@@ -1567,15 +1604,6 @@ func (t *Table) processVariadic(elements []any) []any {
|
||||
return elements
|
||||
}
|
||||
|
||||
// toStringLines converts raw cells to formatted lines for table output
|
||||
func (t *Table) toStringLines(row any, config tw.CellConfig) ([][]string, error) {
|
||||
cells, err := t.convertCellsToStrings(row, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return t.prepareContent(cells, config), nil
|
||||
}
|
||||
|
||||
// updateWidths updates the width map based on cell content and padding.
|
||||
// Parameters include row content, widths map, and padding configuration.
|
||||
// No return value.
|
||||
@@ -1598,10 +1626,9 @@ func (t *Table) updateWidths(row []string, widths tw.Mapper[int, int], padding t
|
||||
lines := strings.Split(cell, tw.NewLine)
|
||||
contentWidth := 0
|
||||
for _, line := range lines {
|
||||
// Always measure the raw line width, because the renderer
|
||||
// will receive the raw line. Do not trim before measuring.
|
||||
lineWidth := twwidth.Width(line)
|
||||
if t.config.Behavior.TrimSpace.Enabled() {
|
||||
lineWidth = twwidth.Width(t.Trimmer(line))
|
||||
}
|
||||
if lineWidth > contentWidth {
|
||||
contentWidth = lineWidth
|
||||
}
|
||||
|
||||
137
vendor/github.com/rivo/uniseg/README.md
generated
vendored
137
vendor/github.com/rivo/uniseg/README.md
generated
vendored
@@ -1,137 +0,0 @@
|
||||
# Unicode Text Segmentation for Go
|
||||
|
||||
[](https://pkg.go.dev/github.com/rivo/uniseg)
|
||||
[](https://goreportcard.com/report/github.com/rivo/uniseg)
|
||||
|
||||
This Go package implements Unicode Text Segmentation according to [Unicode Standard Annex #29](https://unicode.org/reports/tr29/), Unicode Line Breaking according to [Unicode Standard Annex #14](https://unicode.org/reports/tr14/) (Unicode version 15.0.0), and monospace font string width calculation similar to [wcwidth](https://man7.org/linux/man-pages/man3/wcwidth.3.html).
|
||||
|
||||
## Background
|
||||
|
||||
### Grapheme Clusters
|
||||
|
||||
In Go, [strings are read-only slices of bytes](https://go.dev/blog/strings). They can be turned into Unicode code points using the `for` loop or by casting: `[]rune(str)`. However, multiple code points may be combined into one user-perceived character or what the Unicode specification calls "grapheme cluster". Here are some examples:
|
||||
|
||||
|String|Bytes (UTF-8)|Code points (runes)|Grapheme clusters|
|
||||
|-|-|-|-|
|
||||
|Käse|6 bytes: `4b 61 cc 88 73 65`|5 code points: `4b 61 308 73 65`|4 clusters: `[4b],[61 308],[73],[65]`|
|
||||
|🏳️🌈|14 bytes: `f0 9f 8f b3 ef b8 8f e2 80 8d f0 9f 8c 88`|4 code points: `1f3f3 fe0f 200d 1f308`|1 cluster: `[1f3f3 fe0f 200d 1f308]`|
|
||||
|🇩🇪|8 bytes: `f0 9f 87 a9 f0 9f 87 aa`|2 code points: `1f1e9 1f1ea`|1 cluster: `[1f1e9 1f1ea]`|
|
||||
|
||||
This package provides tools to iterate over these grapheme clusters. This may be used to determine the number of user-perceived characters, to split strings in their intended places, or to extract individual characters which form a unit.
|
||||
|
||||
### Word Boundaries
|
||||
|
||||
Word boundaries are used in a number of different contexts. The most familiar ones are selection (double-click mouse selection), cursor movement ("move to next word" control-arrow keys), and the dialog option "Whole Word Search" for search and replace. They are also used in database queries, to determine whether elements are within a certain number of words of one another. Searching may also use word boundaries in determining matching items. This package provides tools to determine word boundaries within strings.
|
||||
|
||||
### Sentence Boundaries
|
||||
|
||||
Sentence boundaries are often used for triple-click or some other method of selecting or iterating through blocks of text that are larger than single words. They are also used to determine whether words occur within the same sentence in database queries. This package provides tools to determine sentence boundaries within strings.
|
||||
|
||||
### Line Breaking
|
||||
|
||||
Line breaking, also known as word wrapping, is the process of breaking a section of text into lines such that it will fit in the available width of a page, window or other display area. This package provides tools to determine where a string may or may not be broken and where it must be broken (for example after newline characters).
|
||||
|
||||
### Monospace Width
|
||||
|
||||
Most terminals or text displays / text editors using a monospace font (for example source code editors) use a fixed width for each character. Some characters such as emojis or characters found in Asian and other languages may take up more than one character cell. This package provides tools to determine the number of cells a string will take up when displayed in a monospace font. See [here](https://pkg.go.dev/github.com/rivo/uniseg#hdr-Monospace_Width) for more information.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
go get github.com/rivo/uniseg
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Counting Characters in a String
|
||||
|
||||
```go
|
||||
n := uniseg.GraphemeClusterCount("🇩🇪🏳️🌈")
|
||||
fmt.Println(n)
|
||||
// 2
|
||||
```
|
||||
|
||||
### Calculating the Monospace String Width
|
||||
|
||||
```go
|
||||
width := uniseg.StringWidth("🇩🇪🏳️🌈!")
|
||||
fmt.Println(width)
|
||||
// 5
|
||||
```
|
||||
|
||||
### Using the [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) Class
|
||||
|
||||
This is the most convenient method of iterating over grapheme clusters:
|
||||
|
||||
```go
|
||||
gr := uniseg.NewGraphemes("👍🏼!")
|
||||
for gr.Next() {
|
||||
fmt.Printf("%x ", gr.Runes())
|
||||
}
|
||||
// [1f44d 1f3fc] [21]
|
||||
```
|
||||
|
||||
### Using the [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) Function
|
||||
|
||||
This avoids allocating a new `Graphemes` object but it requires the handling of states and boundaries:
|
||||
|
||||
```go
|
||||
str := "🇩🇪🏳️🌈"
|
||||
state := -1
|
||||
var c string
|
||||
for len(str) > 0 {
|
||||
c, str, _, state = uniseg.StepString(str, state)
|
||||
fmt.Printf("%x ", []rune(c))
|
||||
}
|
||||
// [1f1e9 1f1ea] [1f3f3 fe0f 200d 1f308]
|
||||
```
|
||||
|
||||
### Advanced Examples
|
||||
|
||||
The [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class offers the most convenient way to access all functionality of this package. But in some cases, it may be better to use the specialized functions directly. For example, if you're only interested in word segmentation, use [`FirstWord`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWord) or [`FirstWordInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWordInString):
|
||||
|
||||
```go
|
||||
str := "Hello, world!"
|
||||
state := -1
|
||||
var c string
|
||||
for len(str) > 0 {
|
||||
c, str, state = uniseg.FirstWordInString(str, state)
|
||||
fmt.Printf("(%s)\n", c)
|
||||
}
|
||||
// (Hello)
|
||||
// (,)
|
||||
// ( )
|
||||
// (world)
|
||||
// (!)
|
||||
```
|
||||
|
||||
Similarly, use
|
||||
|
||||
- [`FirstGraphemeCluster`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeCluster) or [`FirstGraphemeClusterInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeClusterInString) for grapheme cluster determination only,
|
||||
- [`FirstSentence`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentence) or [`FirstSentenceInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentenceInString) for sentence segmentation only, and
|
||||
- [`FirstLineSegment`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegment) or [`FirstLineSegmentInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegmentInString) for line breaking / word wrapping (although using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) is preferred as it will observe grapheme cluster boundaries).
|
||||
|
||||
If you're only interested in the width of characters, use [`FirstGraphemeCluster`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeCluster) or [`FirstGraphemeClusterInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeClusterInString). It is much faster than using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step), [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString), or the [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class because it does not include the logic for word / sentence / line boundaries.
|
||||
|
||||
Finally, if you need to reverse a string while preserving grapheme clusters, use [`ReverseString`](https://pkg.go.dev/github.com/rivo/uniseg#ReverseString):
|
||||
|
||||
```go
|
||||
fmt.Println(uniseg.ReverseString("🇩🇪🏳️🌈"))
|
||||
// 🏳️🌈🇩🇪
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
Refer to https://pkg.go.dev/github.com/rivo/uniseg for the package's documentation.
|
||||
|
||||
## Dependencies
|
||||
|
||||
This package does not depend on any packages outside the standard library.
|
||||
|
||||
## Sponsor this Project
|
||||
|
||||
[Become a Sponsor on GitHub](https://github.com/sponsors/rivo?metadata_source=uniseg_readme) to support this project!
|
||||
|
||||
## Your Feedback
|
||||
|
||||
Add your issue here on GitHub, preferably before submitting any PR's. Feel free to get in touch if you have any questions.
|
||||
108
vendor/github.com/rivo/uniseg/doc.go
generated
vendored
108
vendor/github.com/rivo/uniseg/doc.go
generated
vendored
@@ -1,108 +0,0 @@
|
||||
/*
|
||||
Package uniseg implements Unicode Text Segmentation, Unicode Line Breaking, and
|
||||
string width calculation for monospace fonts. Unicode Text Segmentation conforms
|
||||
to Unicode Standard Annex #29 (https://unicode.org/reports/tr29/) and Unicode
|
||||
Line Breaking conforms to Unicode Standard Annex #14
|
||||
(https://unicode.org/reports/tr14/).
|
||||
|
||||
In short, using this package, you can split a string into grapheme clusters
|
||||
(what people would usually refer to as a "character"), into words, and into
|
||||
sentences. Or, in its simplest case, this package allows you to count the number
|
||||
of characters in a string, especially when it contains complex characters such
|
||||
as emojis, combining characters, or characters from Asian, Arabic, Hebrew, or
|
||||
other languages. Additionally, you can use it to implement line breaking (or
|
||||
"word wrapping"), that is, to determine where text can be broken over to the
|
||||
next line when the width of the line is not big enough to fit the entire text.
|
||||
Finally, you can use it to calculate the display width of a string for monospace
|
||||
fonts.
|
||||
|
||||
# Getting Started
|
||||
|
||||
If you just want to count the number of characters in a string, you can use
|
||||
[GraphemeClusterCount]. If you want to determine the display width of a string,
|
||||
you can use [StringWidth]. If you want to iterate over a string, you can use
|
||||
[Step], [StepString], or the [Graphemes] class (more convenient but less
|
||||
performant). This will provide you with all information: grapheme clusters,
|
||||
word boundaries, sentence boundaries, line breaks, and monospace character
|
||||
widths. The specialized functions [FirstGraphemeCluster],
|
||||
[FirstGraphemeClusterInString], [FirstWord], [FirstWordInString],
|
||||
[FirstSentence], and [FirstSentenceInString] can be used if only one type of
|
||||
information is needed.
|
||||
|
||||
# Grapheme Clusters
|
||||
|
||||
Consider the rainbow flag emoji: 🏳️🌈. On most modern systems, it appears as one
|
||||
character. But its string representation actually has 14 bytes, so counting
|
||||
bytes (or using len("🏳️🌈")) will not work as expected. Counting runes won't,
|
||||
either: The flag has 4 Unicode code points, thus 4 runes. The stdlib function
|
||||
utf8.RuneCountInString("🏳️🌈") and len([]rune("🏳️🌈")) will both return 4.
|
||||
|
||||
The [GraphemeClusterCount] function will return 1 for the rainbow flag emoji.
|
||||
The Graphemes class and a variety of functions in this package will allow you to
|
||||
split strings into its grapheme clusters.
|
||||
|
||||
# Word Boundaries
|
||||
|
||||
Word boundaries are used in a number of different contexts. The most familiar
|
||||
ones are selection (double-click mouse selection), cursor movement ("move to
|
||||
next word" control-arrow keys), and the dialog option "Whole Word Search" for
|
||||
search and replace. This package provides methods for determining word
|
||||
boundaries.
|
||||
|
||||
# Sentence Boundaries
|
||||
|
||||
Sentence boundaries are often used for triple-click or some other method of
|
||||
selecting or iterating through blocks of text that are larger than single words.
|
||||
They are also used to determine whether words occur within the same sentence in
|
||||
database queries. This package provides methods for determining sentence
|
||||
boundaries.
|
||||
|
||||
# Line Breaking
|
||||
|
||||
Line breaking, also known as word wrapping, is the process of breaking a section
|
||||
of text into lines such that it will fit in the available width of a page,
|
||||
window or other display area. This package provides methods to determine the
|
||||
positions in a string where a line must be broken, may be broken, or must not be
|
||||
broken.
|
||||
|
||||
# Monospace Width
|
||||
|
||||
Monospace width, as referred to in this package, is the width of a string in a
|
||||
monospace font. This is commonly used in terminal user interfaces or text
|
||||
displays or editors that don't support proportional fonts. A width of 1
|
||||
corresponds to a single character cell. The C function [wcswidth()] and its
|
||||
implementation in other programming languages is in widespread use for the same
|
||||
purpose. However, there is no standard for the calculation of such widths, and
|
||||
this package differs from wcswidth() in a number of ways, presumably to generate
|
||||
more visually pleasing results.
|
||||
|
||||
To start, we assume that every code point has a width of 1, with the following
|
||||
exceptions:
|
||||
|
||||
- Code points with grapheme cluster break properties Control, CR, LF, Extend,
|
||||
and ZWJ have a width of 0.
|
||||
- U+2E3A, Two-Em Dash, has a width of 3.
|
||||
- U+2E3B, Three-Em Dash, has a width of 4.
|
||||
- Characters with the East-Asian Width properties "Fullwidth" (F) and "Wide"
|
||||
(W) have a width of 2. (Properties "Ambiguous" (A) and "Neutral" (N) both
|
||||
have a width of 1.)
|
||||
- Code points with grapheme cluster break property Regional Indicator have a
|
||||
width of 2.
|
||||
- Code points with grapheme cluster break property Extended Pictographic have
|
||||
a width of 2, unless their Emoji Presentation flag is "No", in which case
|
||||
the width is 1.
|
||||
|
||||
For Hangul grapheme clusters composed of conjoining Jamo and for Regional
|
||||
Indicators (flags), all code points except the first one have a width of 0. For
|
||||
grapheme clusters starting with an Extended Pictographic, any additional code
|
||||
point will force a total width of 2, except if the Variation Selector-15
|
||||
(U+FE0E) is included, in which case the total width is always 1. Grapheme
|
||||
clusters ending with Variation Selector-16 (U+FE0F) have a width of 2.
|
||||
|
||||
Note that whether these widths appear correct depends on your application's
|
||||
render engine, to which extent it conforms to the Unicode Standard, and its
|
||||
choice of font.
|
||||
|
||||
[wcswidth()]: https://man7.org/linux/man-pages/man3/wcswidth.3.html
|
||||
*/
|
||||
package uniseg
|
||||
2588
vendor/github.com/rivo/uniseg/eastasianwidth.go
generated
vendored
2588
vendor/github.com/rivo/uniseg/eastasianwidth.go
generated
vendored
File diff suppressed because it is too large
Load Diff
295
vendor/github.com/rivo/uniseg/emojipresentation.go
generated
vendored
295
vendor/github.com/rivo/uniseg/emojipresentation.go
generated
vendored
@@ -1,295 +0,0 @@
|
||||
// Code generated via go generate from gen_properties.go. DO NOT EDIT.
|
||||
|
||||
package uniseg
|
||||
|
||||
// emojiPresentation are taken from
|
||||
//
|
||||
// and
|
||||
// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
|
||||
// ("Extended_Pictographic" only)
|
||||
// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
|
||||
// license agreement.
|
||||
var emojiPresentation = [][3]int{
|
||||
{0x231A, 0x231B, prEmojiPresentation}, // E0.6 [2] (⌚..⌛) watch..hourglass done
|
||||
{0x23E9, 0x23EC, prEmojiPresentation}, // E0.6 [4] (⏩..⏬) fast-forward button..fast down button
|
||||
{0x23F0, 0x23F0, prEmojiPresentation}, // E0.6 [1] (⏰) alarm clock
|
||||
{0x23F3, 0x23F3, prEmojiPresentation}, // E0.6 [1] (⏳) hourglass not done
|
||||
{0x25FD, 0x25FE, prEmojiPresentation}, // E0.6 [2] (◽..◾) white medium-small square..black medium-small square
|
||||
{0x2614, 0x2615, prEmojiPresentation}, // E0.6 [2] (☔..☕) umbrella with rain drops..hot beverage
|
||||
{0x2648, 0x2653, prEmojiPresentation}, // E0.6 [12] (♈..♓) Aries..Pisces
|
||||
{0x267F, 0x267F, prEmojiPresentation}, // E0.6 [1] (♿) wheelchair symbol
|
||||
{0x2693, 0x2693, prEmojiPresentation}, // E0.6 [1] (⚓) anchor
|
||||
{0x26A1, 0x26A1, prEmojiPresentation}, // E0.6 [1] (⚡) high voltage
|
||||
{0x26AA, 0x26AB, prEmojiPresentation}, // E0.6 [2] (⚪..⚫) white circle..black circle
|
||||
{0x26BD, 0x26BE, prEmojiPresentation}, // E0.6 [2] (⚽..⚾) soccer ball..baseball
|
||||
{0x26C4, 0x26C5, prEmojiPresentation}, // E0.6 [2] (⛄..⛅) snowman without snow..sun behind cloud
|
||||
{0x26CE, 0x26CE, prEmojiPresentation}, // E0.6 [1] (⛎) Ophiuchus
|
||||
{0x26D4, 0x26D4, prEmojiPresentation}, // E0.6 [1] (⛔) no entry
|
||||
{0x26EA, 0x26EA, prEmojiPresentation}, // E0.6 [1] (⛪) church
|
||||
{0x26F2, 0x26F3, prEmojiPresentation}, // E0.6 [2] (⛲..⛳) fountain..flag in hole
|
||||
{0x26F5, 0x26F5, prEmojiPresentation}, // E0.6 [1] (⛵) sailboat
|
||||
{0x26FA, 0x26FA, prEmojiPresentation}, // E0.6 [1] (⛺) tent
|
||||
{0x26FD, 0x26FD, prEmojiPresentation}, // E0.6 [1] (⛽) fuel pump
|
||||
{0x2705, 0x2705, prEmojiPresentation}, // E0.6 [1] (✅) check mark button
|
||||
{0x270A, 0x270B, prEmojiPresentation}, // E0.6 [2] (✊..✋) raised fist..raised hand
|
||||
{0x2728, 0x2728, prEmojiPresentation}, // E0.6 [1] (✨) sparkles
|
||||
{0x274C, 0x274C, prEmojiPresentation}, // E0.6 [1] (❌) cross mark
|
||||
{0x274E, 0x274E, prEmojiPresentation}, // E0.6 [1] (❎) cross mark button
|
||||
{0x2753, 0x2755, prEmojiPresentation}, // E0.6 [3] (❓..❕) red question mark..white exclamation mark
|
||||
{0x2757, 0x2757, prEmojiPresentation}, // E0.6 [1] (❗) red exclamation mark
|
||||
{0x2795, 0x2797, prEmojiPresentation}, // E0.6 [3] (➕..➗) plus..divide
|
||||
{0x27B0, 0x27B0, prEmojiPresentation}, // E0.6 [1] (➰) curly loop
|
||||
{0x27BF, 0x27BF, prEmojiPresentation}, // E1.0 [1] (➿) double curly loop
|
||||
{0x2B1B, 0x2B1C, prEmojiPresentation}, // E0.6 [2] (⬛..⬜) black large square..white large square
|
||||
{0x2B50, 0x2B50, prEmojiPresentation}, // E0.6 [1] (⭐) star
|
||||
{0x2B55, 0x2B55, prEmojiPresentation}, // E0.6 [1] (⭕) hollow red circle
|
||||
{0x1F004, 0x1F004, prEmojiPresentation}, // E0.6 [1] (🀄) mahjong red dragon
|
||||
{0x1F0CF, 0x1F0CF, prEmojiPresentation}, // E0.6 [1] (🃏) joker
|
||||
{0x1F18E, 0x1F18E, prEmojiPresentation}, // E0.6 [1] (🆎) AB button (blood type)
|
||||
{0x1F191, 0x1F19A, prEmojiPresentation}, // E0.6 [10] (🆑..🆚) CL button..VS button
|
||||
{0x1F1E6, 0x1F1FF, prEmojiPresentation}, // E0.0 [26] (🇦..🇿) regional indicator symbol letter a..regional indicator symbol letter z
|
||||
{0x1F201, 0x1F201, prEmojiPresentation}, // E0.6 [1] (🈁) Japanese “here” button
|
||||
{0x1F21A, 0x1F21A, prEmojiPresentation}, // E0.6 [1] (🈚) Japanese “free of charge” button
|
||||
{0x1F22F, 0x1F22F, prEmojiPresentation}, // E0.6 [1] (🈯) Japanese “reserved” button
|
||||
{0x1F232, 0x1F236, prEmojiPresentation}, // E0.6 [5] (🈲..🈶) Japanese “prohibited” button..Japanese “not free of charge” button
|
||||
{0x1F238, 0x1F23A, prEmojiPresentation}, // E0.6 [3] (🈸..🈺) Japanese “application” button..Japanese “open for business” button
|
||||
{0x1F250, 0x1F251, prEmojiPresentation}, // E0.6 [2] (🉐..🉑) Japanese “bargain” button..Japanese “acceptable” button
|
||||
{0x1F300, 0x1F30C, prEmojiPresentation}, // E0.6 [13] (🌀..🌌) cyclone..milky way
|
||||
{0x1F30D, 0x1F30E, prEmojiPresentation}, // E0.7 [2] (🌍..🌎) globe showing Europe-Africa..globe showing Americas
|
||||
{0x1F30F, 0x1F30F, prEmojiPresentation}, // E0.6 [1] (🌏) globe showing Asia-Australia
|
||||
{0x1F310, 0x1F310, prEmojiPresentation}, // E1.0 [1] (🌐) globe with meridians
|
||||
{0x1F311, 0x1F311, prEmojiPresentation}, // E0.6 [1] (🌑) new moon
|
||||
{0x1F312, 0x1F312, prEmojiPresentation}, // E1.0 [1] (🌒) waxing crescent moon
|
||||
{0x1F313, 0x1F315, prEmojiPresentation}, // E0.6 [3] (🌓..🌕) first quarter moon..full moon
|
||||
{0x1F316, 0x1F318, prEmojiPresentation}, // E1.0 [3] (🌖..🌘) waning gibbous moon..waning crescent moon
|
||||
{0x1F319, 0x1F319, prEmojiPresentation}, // E0.6 [1] (🌙) crescent moon
|
||||
{0x1F31A, 0x1F31A, prEmojiPresentation}, // E1.0 [1] (🌚) new moon face
|
||||
{0x1F31B, 0x1F31B, prEmojiPresentation}, // E0.6 [1] (🌛) first quarter moon face
|
||||
{0x1F31C, 0x1F31C, prEmojiPresentation}, // E0.7 [1] (🌜) last quarter moon face
|
||||
{0x1F31D, 0x1F31E, prEmojiPresentation}, // E1.0 [2] (🌝..🌞) full moon face..sun with face
|
||||
{0x1F31F, 0x1F320, prEmojiPresentation}, // E0.6 [2] (🌟..🌠) glowing star..shooting star
|
||||
{0x1F32D, 0x1F32F, prEmojiPresentation}, // E1.0 [3] (🌭..🌯) hot dog..burrito
|
||||
{0x1F330, 0x1F331, prEmojiPresentation}, // E0.6 [2] (🌰..🌱) chestnut..seedling
|
||||
{0x1F332, 0x1F333, prEmojiPresentation}, // E1.0 [2] (🌲..🌳) evergreen tree..deciduous tree
|
||||
{0x1F334, 0x1F335, prEmojiPresentation}, // E0.6 [2] (🌴..🌵) palm tree..cactus
|
||||
{0x1F337, 0x1F34A, prEmojiPresentation}, // E0.6 [20] (🌷..🍊) tulip..tangerine
|
||||
{0x1F34B, 0x1F34B, prEmojiPresentation}, // E1.0 [1] (🍋) lemon
|
||||
{0x1F34C, 0x1F34F, prEmojiPresentation}, // E0.6 [4] (🍌..🍏) banana..green apple
|
||||
{0x1F350, 0x1F350, prEmojiPresentation}, // E1.0 [1] (🍐) pear
|
||||
{0x1F351, 0x1F37B, prEmojiPresentation}, // E0.6 [43] (🍑..🍻) peach..clinking beer mugs
|
||||
{0x1F37C, 0x1F37C, prEmojiPresentation}, // E1.0 [1] (🍼) baby bottle
|
||||
{0x1F37E, 0x1F37F, prEmojiPresentation}, // E1.0 [2] (🍾..🍿) bottle with popping cork..popcorn
|
||||
{0x1F380, 0x1F393, prEmojiPresentation}, // E0.6 [20] (🎀..🎓) ribbon..graduation cap
|
||||
{0x1F3A0, 0x1F3C4, prEmojiPresentation}, // E0.6 [37] (🎠..🏄) carousel horse..person surfing
|
||||
{0x1F3C5, 0x1F3C5, prEmojiPresentation}, // E1.0 [1] (🏅) sports medal
|
||||
{0x1F3C6, 0x1F3C6, prEmojiPresentation}, // E0.6 [1] (🏆) trophy
|
||||
{0x1F3C7, 0x1F3C7, prEmojiPresentation}, // E1.0 [1] (🏇) horse racing
|
||||
{0x1F3C8, 0x1F3C8, prEmojiPresentation}, // E0.6 [1] (🏈) american football
|
||||
{0x1F3C9, 0x1F3C9, prEmojiPresentation}, // E1.0 [1] (🏉) rugby football
|
||||
{0x1F3CA, 0x1F3CA, prEmojiPresentation}, // E0.6 [1] (🏊) person swimming
|
||||
{0x1F3CF, 0x1F3D3, prEmojiPresentation}, // E1.0 [5] (🏏..🏓) cricket game..ping pong
|
||||
{0x1F3E0, 0x1F3E3, prEmojiPresentation}, // E0.6 [4] (🏠..🏣) house..Japanese post office
|
||||
{0x1F3E4, 0x1F3E4, prEmojiPresentation}, // E1.0 [1] (🏤) post office
|
||||
{0x1F3E5, 0x1F3F0, prEmojiPresentation}, // E0.6 [12] (🏥..🏰) hospital..castle
|
||||
{0x1F3F4, 0x1F3F4, prEmojiPresentation}, // E1.0 [1] (🏴) black flag
|
||||
{0x1F3F8, 0x1F407, prEmojiPresentation}, // E1.0 [16] (🏸..🐇) badminton..rabbit
|
||||
{0x1F408, 0x1F408, prEmojiPresentation}, // E0.7 [1] (🐈) cat
|
||||
{0x1F409, 0x1F40B, prEmojiPresentation}, // E1.0 [3] (🐉..🐋) dragon..whale
|
||||
{0x1F40C, 0x1F40E, prEmojiPresentation}, // E0.6 [3] (🐌..🐎) snail..horse
|
||||
{0x1F40F, 0x1F410, prEmojiPresentation}, // E1.0 [2] (🐏..🐐) ram..goat
|
||||
{0x1F411, 0x1F412, prEmojiPresentation}, // E0.6 [2] (🐑..🐒) ewe..monkey
|
||||
{0x1F413, 0x1F413, prEmojiPresentation}, // E1.0 [1] (🐓) rooster
|
||||
{0x1F414, 0x1F414, prEmojiPresentation}, // E0.6 [1] (🐔) chicken
|
||||
{0x1F415, 0x1F415, prEmojiPresentation}, // E0.7 [1] (🐕) dog
|
||||
{0x1F416, 0x1F416, prEmojiPresentation}, // E1.0 [1] (🐖) pig
|
||||
{0x1F417, 0x1F429, prEmojiPresentation}, // E0.6 [19] (🐗..🐩) boar..poodle
|
||||
{0x1F42A, 0x1F42A, prEmojiPresentation}, // E1.0 [1] (🐪) camel
|
||||
{0x1F42B, 0x1F43E, prEmojiPresentation}, // E0.6 [20] (🐫..🐾) two-hump camel..paw prints
|
||||
{0x1F440, 0x1F440, prEmojiPresentation}, // E0.6 [1] (👀) eyes
|
||||
{0x1F442, 0x1F464, prEmojiPresentation}, // E0.6 [35] (👂..👤) ear..bust in silhouette
|
||||
{0x1F465, 0x1F465, prEmojiPresentation}, // E1.0 [1] (👥) busts in silhouette
|
||||
{0x1F466, 0x1F46B, prEmojiPresentation}, // E0.6 [6] (👦..👫) boy..woman and man holding hands
|
||||
{0x1F46C, 0x1F46D, prEmojiPresentation}, // E1.0 [2] (👬..👭) men holding hands..women holding hands
|
||||
{0x1F46E, 0x1F4AC, prEmojiPresentation}, // E0.6 [63] (👮..💬) police officer..speech balloon
|
||||
{0x1F4AD, 0x1F4AD, prEmojiPresentation}, // E1.0 [1] (💭) thought balloon
|
||||
{0x1F4AE, 0x1F4B5, prEmojiPresentation}, // E0.6 [8] (💮..💵) white flower..dollar banknote
|
||||
{0x1F4B6, 0x1F4B7, prEmojiPresentation}, // E1.0 [2] (💶..💷) euro banknote..pound banknote
|
||||
{0x1F4B8, 0x1F4EB, prEmojiPresentation}, // E0.6 [52] (💸..📫) money with wings..closed mailbox with raised flag
|
||||
{0x1F4EC, 0x1F4ED, prEmojiPresentation}, // E0.7 [2] (📬..📭) open mailbox with raised flag..open mailbox with lowered flag
|
||||
{0x1F4EE, 0x1F4EE, prEmojiPresentation}, // E0.6 [1] (📮) postbox
|
||||
{0x1F4EF, 0x1F4EF, prEmojiPresentation}, // E1.0 [1] (📯) postal horn
|
||||
{0x1F4F0, 0x1F4F4, prEmojiPresentation}, // E0.6 [5] (📰..📴) newspaper..mobile phone off
|
||||
{0x1F4F5, 0x1F4F5, prEmojiPresentation}, // E1.0 [1] (📵) no mobile phones
|
||||
{0x1F4F6, 0x1F4F7, prEmojiPresentation}, // E0.6 [2] (📶..📷) antenna bars..camera
|
||||
{0x1F4F8, 0x1F4F8, prEmojiPresentation}, // E1.0 [1] (📸) camera with flash
|
||||
{0x1F4F9, 0x1F4FC, prEmojiPresentation}, // E0.6 [4] (📹..📼) video camera..videocassette
|
||||
{0x1F4FF, 0x1F502, prEmojiPresentation}, // E1.0 [4] (📿..🔂) prayer beads..repeat single button
|
||||
{0x1F503, 0x1F503, prEmojiPresentation}, // E0.6 [1] (🔃) clockwise vertical arrows
|
||||
{0x1F504, 0x1F507, prEmojiPresentation}, // E1.0 [4] (🔄..🔇) counterclockwise arrows button..muted speaker
|
||||
{0x1F508, 0x1F508, prEmojiPresentation}, // E0.7 [1] (🔈) speaker low volume
|
||||
{0x1F509, 0x1F509, prEmojiPresentation}, // E1.0 [1] (🔉) speaker medium volume
|
||||
{0x1F50A, 0x1F514, prEmojiPresentation}, // E0.6 [11] (🔊..🔔) speaker high volume..bell
|
||||
{0x1F515, 0x1F515, prEmojiPresentation}, // E1.0 [1] (🔕) bell with slash
|
||||
{0x1F516, 0x1F52B, prEmojiPresentation}, // E0.6 [22] (🔖..🔫) bookmark..water pistol
|
||||
{0x1F52C, 0x1F52D, prEmojiPresentation}, // E1.0 [2] (🔬..🔭) microscope..telescope
|
||||
{0x1F52E, 0x1F53D, prEmojiPresentation}, // E0.6 [16] (🔮..🔽) crystal ball..downwards button
|
||||
{0x1F54B, 0x1F54E, prEmojiPresentation}, // E1.0 [4] (🕋..🕎) kaaba..menorah
|
||||
{0x1F550, 0x1F55B, prEmojiPresentation}, // E0.6 [12] (🕐..🕛) one o’clock..twelve o’clock
|
||||
{0x1F55C, 0x1F567, prEmojiPresentation}, // E0.7 [12] (🕜..🕧) one-thirty..twelve-thirty
|
||||
{0x1F57A, 0x1F57A, prEmojiPresentation}, // E3.0 [1] (🕺) man dancing
|
||||
{0x1F595, 0x1F596, prEmojiPresentation}, // E1.0 [2] (🖕..🖖) middle finger..vulcan salute
|
||||
{0x1F5A4, 0x1F5A4, prEmojiPresentation}, // E3.0 [1] (🖤) black heart
|
||||
{0x1F5FB, 0x1F5FF, prEmojiPresentation}, // E0.6 [5] (🗻..🗿) mount fuji..moai
|
||||
{0x1F600, 0x1F600, prEmojiPresentation}, // E1.0 [1] (😀) grinning face
|
||||
{0x1F601, 0x1F606, prEmojiPresentation}, // E0.6 [6] (😁..😆) beaming face with smiling eyes..grinning squinting face
|
||||
{0x1F607, 0x1F608, prEmojiPresentation}, // E1.0 [2] (😇..😈) smiling face with halo..smiling face with horns
|
||||
{0x1F609, 0x1F60D, prEmojiPresentation}, // E0.6 [5] (😉..😍) winking face..smiling face with heart-eyes
|
||||
{0x1F60E, 0x1F60E, prEmojiPresentation}, // E1.0 [1] (😎) smiling face with sunglasses
|
||||
{0x1F60F, 0x1F60F, prEmojiPresentation}, // E0.6 [1] (😏) smirking face
|
||||
{0x1F610, 0x1F610, prEmojiPresentation}, // E0.7 [1] (😐) neutral face
|
||||
{0x1F611, 0x1F611, prEmojiPresentation}, // E1.0 [1] (😑) expressionless face
|
||||
{0x1F612, 0x1F614, prEmojiPresentation}, // E0.6 [3] (😒..😔) unamused face..pensive face
|
||||
{0x1F615, 0x1F615, prEmojiPresentation}, // E1.0 [1] (😕) confused face
|
||||
{0x1F616, 0x1F616, prEmojiPresentation}, // E0.6 [1] (😖) confounded face
|
||||
{0x1F617, 0x1F617, prEmojiPresentation}, // E1.0 [1] (😗) kissing face
|
||||
{0x1F618, 0x1F618, prEmojiPresentation}, // E0.6 [1] (😘) face blowing a kiss
|
||||
{0x1F619, 0x1F619, prEmojiPresentation}, // E1.0 [1] (😙) kissing face with smiling eyes
|
||||
{0x1F61A, 0x1F61A, prEmojiPresentation}, // E0.6 [1] (😚) kissing face with closed eyes
|
||||
{0x1F61B, 0x1F61B, prEmojiPresentation}, // E1.0 [1] (😛) face with tongue
|
||||
{0x1F61C, 0x1F61E, prEmojiPresentation}, // E0.6 [3] (😜..😞) winking face with tongue..disappointed face
|
||||
{0x1F61F, 0x1F61F, prEmojiPresentation}, // E1.0 [1] (😟) worried face
|
||||
{0x1F620, 0x1F625, prEmojiPresentation}, // E0.6 [6] (😠..😥) angry face..sad but relieved face
|
||||
{0x1F626, 0x1F627, prEmojiPresentation}, // E1.0 [2] (😦..😧) frowning face with open mouth..anguished face
|
||||
{0x1F628, 0x1F62B, prEmojiPresentation}, // E0.6 [4] (😨..😫) fearful face..tired face
|
||||
{0x1F62C, 0x1F62C, prEmojiPresentation}, // E1.0 [1] (😬) grimacing face
|
||||
{0x1F62D, 0x1F62D, prEmojiPresentation}, // E0.6 [1] (😭) loudly crying face
|
||||
{0x1F62E, 0x1F62F, prEmojiPresentation}, // E1.0 [2] (😮..😯) face with open mouth..hushed face
|
||||
{0x1F630, 0x1F633, prEmojiPresentation}, // E0.6 [4] (😰..😳) anxious face with sweat..flushed face
|
||||
{0x1F634, 0x1F634, prEmojiPresentation}, // E1.0 [1] (😴) sleeping face
|
||||
{0x1F635, 0x1F635, prEmojiPresentation}, // E0.6 [1] (😵) face with crossed-out eyes
|
||||
{0x1F636, 0x1F636, prEmojiPresentation}, // E1.0 [1] (😶) face without mouth
|
||||
{0x1F637, 0x1F640, prEmojiPresentation}, // E0.6 [10] (😷..🙀) face with medical mask..weary cat
|
||||
{0x1F641, 0x1F644, prEmojiPresentation}, // E1.0 [4] (🙁..🙄) slightly frowning face..face with rolling eyes
|
||||
{0x1F645, 0x1F64F, prEmojiPresentation}, // E0.6 [11] (🙅..🙏) person gesturing NO..folded hands
|
||||
{0x1F680, 0x1F680, prEmojiPresentation}, // E0.6 [1] (🚀) rocket
|
||||
{0x1F681, 0x1F682, prEmojiPresentation}, // E1.0 [2] (🚁..🚂) helicopter..locomotive
|
||||
{0x1F683, 0x1F685, prEmojiPresentation}, // E0.6 [3] (🚃..🚅) railway car..bullet train
|
||||
{0x1F686, 0x1F686, prEmojiPresentation}, // E1.0 [1] (🚆) train
|
||||
{0x1F687, 0x1F687, prEmojiPresentation}, // E0.6 [1] (🚇) metro
|
||||
{0x1F688, 0x1F688, prEmojiPresentation}, // E1.0 [1] (🚈) light rail
|
||||
{0x1F689, 0x1F689, prEmojiPresentation}, // E0.6 [1] (🚉) station
|
||||
{0x1F68A, 0x1F68B, prEmojiPresentation}, // E1.0 [2] (🚊..🚋) tram..tram car
|
||||
{0x1F68C, 0x1F68C, prEmojiPresentation}, // E0.6 [1] (🚌) bus
|
||||
{0x1F68D, 0x1F68D, prEmojiPresentation}, // E0.7 [1] (🚍) oncoming bus
|
||||
{0x1F68E, 0x1F68E, prEmojiPresentation}, // E1.0 [1] (🚎) trolleybus
|
||||
{0x1F68F, 0x1F68F, prEmojiPresentation}, // E0.6 [1] (🚏) bus stop
|
||||
{0x1F690, 0x1F690, prEmojiPresentation}, // E1.0 [1] (🚐) minibus
|
||||
{0x1F691, 0x1F693, prEmojiPresentation}, // E0.6 [3] (🚑..🚓) ambulance..police car
|
||||
{0x1F694, 0x1F694, prEmojiPresentation}, // E0.7 [1] (🚔) oncoming police car
|
||||
{0x1F695, 0x1F695, prEmojiPresentation}, // E0.6 [1] (🚕) taxi
|
||||
{0x1F696, 0x1F696, prEmojiPresentation}, // E1.0 [1] (🚖) oncoming taxi
|
||||
{0x1F697, 0x1F697, prEmojiPresentation}, // E0.6 [1] (🚗) automobile
|
||||
{0x1F698, 0x1F698, prEmojiPresentation}, // E0.7 [1] (🚘) oncoming automobile
|
||||
{0x1F699, 0x1F69A, prEmojiPresentation}, // E0.6 [2] (🚙..🚚) sport utility vehicle..delivery truck
|
||||
{0x1F69B, 0x1F6A1, prEmojiPresentation}, // E1.0 [7] (🚛..🚡) articulated lorry..aerial tramway
|
||||
{0x1F6A2, 0x1F6A2, prEmojiPresentation}, // E0.6 [1] (🚢) ship
|
||||
{0x1F6A3, 0x1F6A3, prEmojiPresentation}, // E1.0 [1] (🚣) person rowing boat
|
||||
{0x1F6A4, 0x1F6A5, prEmojiPresentation}, // E0.6 [2] (🚤..🚥) speedboat..horizontal traffic light
|
||||
{0x1F6A6, 0x1F6A6, prEmojiPresentation}, // E1.0 [1] (🚦) vertical traffic light
|
||||
{0x1F6A7, 0x1F6AD, prEmojiPresentation}, // E0.6 [7] (🚧..🚭) construction..no smoking
|
||||
{0x1F6AE, 0x1F6B1, prEmojiPresentation}, // E1.0 [4] (🚮..🚱) litter in bin sign..non-potable water
|
||||
{0x1F6B2, 0x1F6B2, prEmojiPresentation}, // E0.6 [1] (🚲) bicycle
|
||||
{0x1F6B3, 0x1F6B5, prEmojiPresentation}, // E1.0 [3] (🚳..🚵) no bicycles..person mountain biking
|
||||
{0x1F6B6, 0x1F6B6, prEmojiPresentation}, // E0.6 [1] (🚶) person walking
|
||||
{0x1F6B7, 0x1F6B8, prEmojiPresentation}, // E1.0 [2] (🚷..🚸) no pedestrians..children crossing
|
||||
{0x1F6B9, 0x1F6BE, prEmojiPresentation}, // E0.6 [6] (🚹..🚾) men’s room..water closet
|
||||
{0x1F6BF, 0x1F6BF, prEmojiPresentation}, // E1.0 [1] (🚿) shower
|
||||
{0x1F6C0, 0x1F6C0, prEmojiPresentation}, // E0.6 [1] (🛀) person taking bath
|
||||
{0x1F6C1, 0x1F6C5, prEmojiPresentation}, // E1.0 [5] (🛁..🛅) bathtub..left luggage
|
||||
{0x1F6CC, 0x1F6CC, prEmojiPresentation}, // E1.0 [1] (🛌) person in bed
|
||||
{0x1F6D0, 0x1F6D0, prEmojiPresentation}, // E1.0 [1] (🛐) place of worship
|
||||
{0x1F6D1, 0x1F6D2, prEmojiPresentation}, // E3.0 [2] (🛑..🛒) stop sign..shopping cart
|
||||
{0x1F6D5, 0x1F6D5, prEmojiPresentation}, // E12.0 [1] (🛕) hindu temple
|
||||
{0x1F6D6, 0x1F6D7, prEmojiPresentation}, // E13.0 [2] (🛖..🛗) hut..elevator
|
||||
{0x1F6DC, 0x1F6DC, prEmojiPresentation}, // E15.0 [1] (🛜) wireless
|
||||
{0x1F6DD, 0x1F6DF, prEmojiPresentation}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
|
||||
{0x1F6EB, 0x1F6EC, prEmojiPresentation}, // E1.0 [2] (🛫..🛬) airplane departure..airplane arrival
|
||||
{0x1F6F4, 0x1F6F6, prEmojiPresentation}, // E3.0 [3] (🛴..🛶) kick scooter..canoe
|
||||
{0x1F6F7, 0x1F6F8, prEmojiPresentation}, // E5.0 [2] (🛷..🛸) sled..flying saucer
|
||||
{0x1F6F9, 0x1F6F9, prEmojiPresentation}, // E11.0 [1] (🛹) skateboard
|
||||
{0x1F6FA, 0x1F6FA, prEmojiPresentation}, // E12.0 [1] (🛺) auto rickshaw
|
||||
{0x1F6FB, 0x1F6FC, prEmojiPresentation}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate
|
||||
{0x1F7E0, 0x1F7EB, prEmojiPresentation}, // E12.0 [12] (🟠..🟫) orange circle..brown square
|
||||
{0x1F7F0, 0x1F7F0, prEmojiPresentation}, // E14.0 [1] (🟰) heavy equals sign
|
||||
{0x1F90C, 0x1F90C, prEmojiPresentation}, // E13.0 [1] (🤌) pinched fingers
|
||||
{0x1F90D, 0x1F90F, prEmojiPresentation}, // E12.0 [3] (🤍..🤏) white heart..pinching hand
|
||||
{0x1F910, 0x1F918, prEmojiPresentation}, // E1.0 [9] (🤐..🤘) zipper-mouth face..sign of the horns
|
||||
{0x1F919, 0x1F91E, prEmojiPresentation}, // E3.0 [6] (🤙..🤞) call me hand..crossed fingers
|
||||
{0x1F91F, 0x1F91F, prEmojiPresentation}, // E5.0 [1] (🤟) love-you gesture
|
||||
{0x1F920, 0x1F927, prEmojiPresentation}, // E3.0 [8] (🤠..🤧) cowboy hat face..sneezing face
|
||||
{0x1F928, 0x1F92F, prEmojiPresentation}, // E5.0 [8] (🤨..🤯) face with raised eyebrow..exploding head
|
||||
{0x1F930, 0x1F930, prEmojiPresentation}, // E3.0 [1] (🤰) pregnant woman
|
||||
{0x1F931, 0x1F932, prEmojiPresentation}, // E5.0 [2] (🤱..🤲) breast-feeding..palms up together
|
||||
{0x1F933, 0x1F93A, prEmojiPresentation}, // E3.0 [8] (🤳..🤺) selfie..person fencing
|
||||
{0x1F93C, 0x1F93E, prEmojiPresentation}, // E3.0 [3] (🤼..🤾) people wrestling..person playing handball
|
||||
{0x1F93F, 0x1F93F, prEmojiPresentation}, // E12.0 [1] (🤿) diving mask
|
||||
{0x1F940, 0x1F945, prEmojiPresentation}, // E3.0 [6] (🥀..🥅) wilted flower..goal net
|
||||
{0x1F947, 0x1F94B, prEmojiPresentation}, // E3.0 [5] (🥇..🥋) 1st place medal..martial arts uniform
|
||||
{0x1F94C, 0x1F94C, prEmojiPresentation}, // E5.0 [1] (🥌) curling stone
|
||||
{0x1F94D, 0x1F94F, prEmojiPresentation}, // E11.0 [3] (🥍..🥏) lacrosse..flying disc
|
||||
{0x1F950, 0x1F95E, prEmojiPresentation}, // E3.0 [15] (🥐..🥞) croissant..pancakes
|
||||
{0x1F95F, 0x1F96B, prEmojiPresentation}, // E5.0 [13] (🥟..🥫) dumpling..canned food
|
||||
{0x1F96C, 0x1F970, prEmojiPresentation}, // E11.0 [5] (🥬..🥰) leafy green..smiling face with hearts
|
||||
{0x1F971, 0x1F971, prEmojiPresentation}, // E12.0 [1] (🥱) yawning face
|
||||
{0x1F972, 0x1F972, prEmojiPresentation}, // E13.0 [1] (🥲) smiling face with tear
|
||||
{0x1F973, 0x1F976, prEmojiPresentation}, // E11.0 [4] (🥳..🥶) partying face..cold face
|
||||
{0x1F977, 0x1F978, prEmojiPresentation}, // E13.0 [2] (🥷..🥸) ninja..disguised face
|
||||
{0x1F979, 0x1F979, prEmojiPresentation}, // E14.0 [1] (🥹) face holding back tears
|
||||
{0x1F97A, 0x1F97A, prEmojiPresentation}, // E11.0 [1] (🥺) pleading face
|
||||
{0x1F97B, 0x1F97B, prEmojiPresentation}, // E12.0 [1] (🥻) sari
|
||||
{0x1F97C, 0x1F97F, prEmojiPresentation}, // E11.0 [4] (🥼..🥿) lab coat..flat shoe
|
||||
{0x1F980, 0x1F984, prEmojiPresentation}, // E1.0 [5] (🦀..🦄) crab..unicorn
|
||||
{0x1F985, 0x1F991, prEmojiPresentation}, // E3.0 [13] (🦅..🦑) eagle..squid
|
||||
{0x1F992, 0x1F997, prEmojiPresentation}, // E5.0 [6] (🦒..🦗) giraffe..cricket
|
||||
{0x1F998, 0x1F9A2, prEmojiPresentation}, // E11.0 [11] (🦘..🦢) kangaroo..swan
|
||||
{0x1F9A3, 0x1F9A4, prEmojiPresentation}, // E13.0 [2] (🦣..🦤) mammoth..dodo
|
||||
{0x1F9A5, 0x1F9AA, prEmojiPresentation}, // E12.0 [6] (🦥..🦪) sloth..oyster
|
||||
{0x1F9AB, 0x1F9AD, prEmojiPresentation}, // E13.0 [3] (🦫..🦭) beaver..seal
|
||||
{0x1F9AE, 0x1F9AF, prEmojiPresentation}, // E12.0 [2] (🦮..🦯) guide dog..white cane
|
||||
{0x1F9B0, 0x1F9B9, prEmojiPresentation}, // E11.0 [10] (🦰..🦹) red hair..supervillain
|
||||
{0x1F9BA, 0x1F9BF, prEmojiPresentation}, // E12.0 [6] (🦺..🦿) safety vest..mechanical leg
|
||||
{0x1F9C0, 0x1F9C0, prEmojiPresentation}, // E1.0 [1] (🧀) cheese wedge
|
||||
{0x1F9C1, 0x1F9C2, prEmojiPresentation}, // E11.0 [2] (🧁..🧂) cupcake..salt
|
||||
{0x1F9C3, 0x1F9CA, prEmojiPresentation}, // E12.0 [8] (🧃..🧊) beverage box..ice
|
||||
{0x1F9CB, 0x1F9CB, prEmojiPresentation}, // E13.0 [1] (🧋) bubble tea
|
||||
{0x1F9CC, 0x1F9CC, prEmojiPresentation}, // E14.0 [1] (🧌) troll
|
||||
{0x1F9CD, 0x1F9CF, prEmojiPresentation}, // E12.0 [3] (🧍..🧏) person standing..deaf person
|
||||
{0x1F9D0, 0x1F9E6, prEmojiPresentation}, // E5.0 [23] (🧐..🧦) face with monocle..socks
|
||||
{0x1F9E7, 0x1F9FF, prEmojiPresentation}, // E11.0 [25] (🧧..🧿) red envelope..nazar amulet
|
||||
{0x1FA70, 0x1FA73, prEmojiPresentation}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
|
||||
{0x1FA74, 0x1FA74, prEmojiPresentation}, // E13.0 [1] (🩴) thong sandal
|
||||
{0x1FA75, 0x1FA77, prEmojiPresentation}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart
|
||||
{0x1FA78, 0x1FA7A, prEmojiPresentation}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
|
||||
{0x1FA7B, 0x1FA7C, prEmojiPresentation}, // E14.0 [2] (🩻..🩼) x-ray..crutch
|
||||
{0x1FA80, 0x1FA82, prEmojiPresentation}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
|
||||
{0x1FA83, 0x1FA86, prEmojiPresentation}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
|
||||
{0x1FA87, 0x1FA88, prEmojiPresentation}, // E15.0 [2] (🪇..🪈) maracas..flute
|
||||
{0x1FA90, 0x1FA95, prEmojiPresentation}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
|
||||
{0x1FA96, 0x1FAA8, prEmojiPresentation}, // E13.0 [19] (🪖..🪨) military helmet..rock
|
||||
{0x1FAA9, 0x1FAAC, prEmojiPresentation}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
|
||||
{0x1FAAD, 0x1FAAF, prEmojiPresentation}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda
|
||||
{0x1FAB0, 0x1FAB6, prEmojiPresentation}, // E13.0 [7] (🪰..🪶) fly..feather
|
||||
{0x1FAB7, 0x1FABA, prEmojiPresentation}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
|
||||
{0x1FABB, 0x1FABD, prEmojiPresentation}, // E15.0 [3] (🪻..🪽) hyacinth..wing
|
||||
{0x1FABF, 0x1FABF, prEmojiPresentation}, // E15.0 [1] (🪿) goose
|
||||
{0x1FAC0, 0x1FAC2, prEmojiPresentation}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
|
||||
{0x1FAC3, 0x1FAC5, prEmojiPresentation}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
|
||||
{0x1FACE, 0x1FACF, prEmojiPresentation}, // E15.0 [2] (🫎..🫏) moose..donkey
|
||||
{0x1FAD0, 0x1FAD6, prEmojiPresentation}, // E13.0 [7] (🫐..🫖) blueberries..teapot
|
||||
{0x1FAD7, 0x1FAD9, prEmojiPresentation}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
|
||||
{0x1FADA, 0x1FADB, prEmojiPresentation}, // E15.0 [2] (🫚..🫛) ginger root..pea pod
|
||||
{0x1FAE0, 0x1FAE7, prEmojiPresentation}, // E14.0 [8] (🫠..🫧) melting face..bubbles
|
||||
{0x1FAE8, 0x1FAE8, prEmojiPresentation}, // E15.0 [1] (🫨) shaking face
|
||||
{0x1FAF0, 0x1FAF6, prEmojiPresentation}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
|
||||
{0x1FAF7, 0x1FAF8, prEmojiPresentation}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand
|
||||
}
|
||||
215
vendor/github.com/rivo/uniseg/gen_breaktest.go
generated
vendored
215
vendor/github.com/rivo/uniseg/gen_breaktest.go
generated
vendored
@@ -1,215 +0,0 @@
|
||||
//go:build generate
|
||||
|
||||
// This program generates a Go containing a slice of test cases based on the
|
||||
// Unicode Character Database auxiliary data files. The command line arguments
|
||||
// are as follows:
|
||||
//
|
||||
// 1. The name of the Unicode data file (just the filename, without extension).
|
||||
// 2. The name of the locally generated Go file.
|
||||
// 3. The name of the slice containing the test cases.
|
||||
// 4. The name of the generator, for logging purposes.
|
||||
//
|
||||
//go:generate go run gen_breaktest.go GraphemeBreakTest graphemebreak_test.go graphemeBreakTestCases graphemes
|
||||
//go:generate go run gen_breaktest.go WordBreakTest wordbreak_test.go wordBreakTestCases words
|
||||
//go:generate go run gen_breaktest.go SentenceBreakTest sentencebreak_test.go sentenceBreakTestCases sentences
|
||||
//go:generate go run gen_breaktest.go LineBreakTest linebreak_test.go lineBreakTestCases lines
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
)
|
||||
|
||||
// We want to test against a specific version rather than the latest. When the
|
||||
// package is upgraded to a new version, change these to generate new tests.
|
||||
const (
|
||||
testCaseURL = `https://www.unicode.org/Public/15.0.0/ucd/auxiliary/%s.txt`
|
||||
)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) < 5 {
|
||||
fmt.Println("Not enough arguments, see code for details")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
log.SetPrefix("gen_breaktest (" + os.Args[4] + "): ")
|
||||
log.SetFlags(0)
|
||||
|
||||
// Read text of testcases and parse into Go source code.
|
||||
src, err := parse(fmt.Sprintf(testCaseURL, os.Args[1]))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Format the Go code.
|
||||
formatted, err := format.Source(src)
|
||||
if err != nil {
|
||||
log.Fatalln("gofmt:", err)
|
||||
}
|
||||
|
||||
// Write it out.
|
||||
log.Print("Writing to ", os.Args[2])
|
||||
if err := ioutil.WriteFile(os.Args[2], formatted, 0644); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
// parse reads a break text file, either from a local file or from a URL. It
|
||||
// parses the file data into Go source code representing the test cases.
|
||||
func parse(url string) ([]byte, error) {
|
||||
log.Printf("Parsing %s", url)
|
||||
res, err := http.Get(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
body := res.Body
|
||||
defer body.Close()
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
buf.Grow(120 << 10)
|
||||
buf.WriteString(`// Code generated via go generate from gen_breaktest.go. DO NOT EDIT.
|
||||
|
||||
package uniseg
|
||||
|
||||
// ` + os.Args[3] + ` are Grapheme testcases taken from
|
||||
// ` + url + `
|
||||
// on ` + time.Now().Format("January 2, 2006") + `. See
|
||||
// https://www.unicode.org/license.html for the Unicode license agreement.
|
||||
var ` + os.Args[3] + ` = []testCase {
|
||||
`)
|
||||
|
||||
sc := bufio.NewScanner(body)
|
||||
num := 1
|
||||
var line []byte
|
||||
original := make([]byte, 0, 64)
|
||||
expected := make([]byte, 0, 64)
|
||||
for sc.Scan() {
|
||||
num++
|
||||
line = sc.Bytes()
|
||||
if len(line) == 0 || line[0] == '#' {
|
||||
continue
|
||||
}
|
||||
var comment []byte
|
||||
if i := bytes.IndexByte(line, '#'); i >= 0 {
|
||||
comment = bytes.TrimSpace(line[i+1:])
|
||||
line = bytes.TrimSpace(line[:i])
|
||||
}
|
||||
original, expected, err := parseRuneSequence(line, original[:0], expected[:0])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(`line %d: %v: %q`, num, err, line)
|
||||
}
|
||||
fmt.Fprintf(buf, "\t{original: \"%s\", expected: %s}, // %s\n", original, expected, comment)
|
||||
}
|
||||
if err := sc.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check for final "# EOF", useful check if we're streaming via HTTP
|
||||
if !bytes.Equal(line, []byte("# EOF")) {
|
||||
return nil, fmt.Errorf(`line %d: exected "# EOF" as final line, got %q`, num, line)
|
||||
}
|
||||
buf.WriteString("}\n")
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
// Used by parseRuneSequence to match input via bytes.HasPrefix.
|
||||
var (
|
||||
prefixBreak = []byte("÷ ")
|
||||
prefixDontBreak = []byte("× ")
|
||||
breakOk = []byte("÷")
|
||||
breakNo = []byte("×")
|
||||
)
|
||||
|
||||
// parseRuneSequence parses a rune + breaking opportunity sequence from b
|
||||
// and appends the Go code for testcase.original to orig
|
||||
// and appends the Go code for testcase.expected to exp.
|
||||
// It retuns the new orig and exp slices.
|
||||
//
|
||||
// E.g. for the input b="÷ 0020 × 0308 ÷ 1F1E6 ÷"
|
||||
// it will append
|
||||
//
|
||||
// "\u0020\u0308\U0001F1E6"
|
||||
//
|
||||
// and "[][]rune{{0x0020,0x0308},{0x1F1E6},}"
|
||||
// to orig and exp respectively.
|
||||
//
|
||||
// The formatting of exp is expected to be cleaned up by gofmt or format.Source.
|
||||
// Note we explicitly require the sequence to start with ÷ and we implicitly
|
||||
// require it to end with ÷.
|
||||
func parseRuneSequence(b, orig, exp []byte) ([]byte, []byte, error) {
|
||||
// Check for and remove first ÷ or ×.
|
||||
if !bytes.HasPrefix(b, prefixBreak) && !bytes.HasPrefix(b, prefixDontBreak) {
|
||||
return nil, nil, errors.New("expected ÷ or × as first character")
|
||||
}
|
||||
if bytes.HasPrefix(b, prefixBreak) {
|
||||
b = b[len(prefixBreak):]
|
||||
} else {
|
||||
b = b[len(prefixDontBreak):]
|
||||
}
|
||||
|
||||
boundary := true
|
||||
exp = append(exp, "[][]rune{"...)
|
||||
for len(b) > 0 {
|
||||
if boundary {
|
||||
exp = append(exp, '{')
|
||||
}
|
||||
exp = append(exp, "0x"...)
|
||||
// Find end of hex digits.
|
||||
var i int
|
||||
for i = 0; i < len(b) && b[i] != ' '; i++ {
|
||||
if d := b[i]; ('0' <= d || d <= '9') ||
|
||||
('A' <= d || d <= 'F') ||
|
||||
('a' <= d || d <= 'f') {
|
||||
continue
|
||||
}
|
||||
return nil, nil, errors.New("bad hex digit")
|
||||
}
|
||||
switch i {
|
||||
case 4:
|
||||
orig = append(orig, "\\u"...)
|
||||
case 5:
|
||||
orig = append(orig, "\\U000"...)
|
||||
default:
|
||||
return nil, nil, errors.New("unsupport code point hex length")
|
||||
}
|
||||
orig = append(orig, b[:i]...)
|
||||
exp = append(exp, b[:i]...)
|
||||
b = b[i:]
|
||||
|
||||
// Check for space between hex and ÷ or ×.
|
||||
if len(b) < 1 || b[0] != ' ' {
|
||||
return nil, nil, errors.New("bad input")
|
||||
}
|
||||
b = b[1:]
|
||||
|
||||
// Check for next boundary.
|
||||
switch {
|
||||
case bytes.HasPrefix(b, breakOk):
|
||||
boundary = true
|
||||
b = b[len(breakOk):]
|
||||
case bytes.HasPrefix(b, breakNo):
|
||||
boundary = false
|
||||
b = b[len(breakNo):]
|
||||
default:
|
||||
return nil, nil, errors.New("missing ÷ or ×")
|
||||
}
|
||||
if boundary {
|
||||
exp = append(exp, '}')
|
||||
}
|
||||
exp = append(exp, ',')
|
||||
if len(b) > 0 && b[0] == ' ' {
|
||||
b = b[1:]
|
||||
}
|
||||
}
|
||||
exp = append(exp, '}')
|
||||
return orig, exp, nil
|
||||
}
|
||||
261
vendor/github.com/rivo/uniseg/gen_properties.go
generated
vendored
261
vendor/github.com/rivo/uniseg/gen_properties.go
generated
vendored
@@ -1,261 +0,0 @@
|
||||
//go:build generate
|
||||
|
||||
// This program generates a property file in Go file from Unicode Character
|
||||
// Database auxiliary data files. The command line arguments are as follows:
|
||||
//
|
||||
// 1. The name of the Unicode data file (just the filename, without extension).
|
||||
// Can be "-" (to skip) if the emoji flag is included.
|
||||
// 2. The name of the locally generated Go file.
|
||||
// 3. The name of the slice mapping code points to properties.
|
||||
// 4. The name of the generator, for logging purposes.
|
||||
// 5. (Optional) Flags, comma-separated. The following flags are available:
|
||||
// - "emojis=<property>": include the specified emoji properties (e.g.
|
||||
// "Extended_Pictographic").
|
||||
// - "gencat": include general category properties.
|
||||
//
|
||||
//go:generate go run gen_properties.go auxiliary/GraphemeBreakProperty graphemeproperties.go graphemeCodePoints graphemes emojis=Extended_Pictographic
|
||||
//go:generate go run gen_properties.go auxiliary/WordBreakProperty wordproperties.go workBreakCodePoints words emojis=Extended_Pictographic
|
||||
//go:generate go run gen_properties.go auxiliary/SentenceBreakProperty sentenceproperties.go sentenceBreakCodePoints sentences
|
||||
//go:generate go run gen_properties.go LineBreak lineproperties.go lineBreakCodePoints lines gencat
|
||||
//go:generate go run gen_properties.go EastAsianWidth eastasianwidth.go eastAsianWidth eastasianwidth
|
||||
//go:generate go run gen_properties.go - emojipresentation.go emojiPresentation emojipresentation emojis=Emoji_Presentation
|
||||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// We want to test against a specific version rather than the latest. When the
|
||||
// package is upgraded to a new version, change these to generate new tests.
|
||||
const (
|
||||
propertyURL = `https://www.unicode.org/Public/15.0.0/ucd/%s.txt`
|
||||
emojiURL = `https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt`
|
||||
)
|
||||
|
||||
// The regular expression for a line containing a code point range property.
|
||||
var propertyPattern = regexp.MustCompile(`^([0-9A-F]{4,6})(\.\.([0-9A-F]{4,6}))?\s*;\s*([A-Za-z0-9_]+)\s*#\s(.+)$`)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) < 5 {
|
||||
fmt.Println("Not enough arguments, see code for details")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
log.SetPrefix("gen_properties (" + os.Args[4] + "): ")
|
||||
log.SetFlags(0)
|
||||
|
||||
// Parse flags.
|
||||
flags := make(map[string]string)
|
||||
if len(os.Args) >= 6 {
|
||||
for _, flag := range strings.Split(os.Args[5], ",") {
|
||||
flagFields := strings.Split(flag, "=")
|
||||
if len(flagFields) == 1 {
|
||||
flags[flagFields[0]] = "yes"
|
||||
} else {
|
||||
flags[flagFields[0]] = flagFields[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse the text file and generate Go source code from it.
|
||||
_, includeGeneralCategory := flags["gencat"]
|
||||
var mainURL string
|
||||
if os.Args[1] != "-" {
|
||||
mainURL = fmt.Sprintf(propertyURL, os.Args[1])
|
||||
}
|
||||
src, err := parse(mainURL, flags["emojis"], includeGeneralCategory)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Format the Go code.
|
||||
formatted, err := format.Source([]byte(src))
|
||||
if err != nil {
|
||||
log.Fatal("gofmt:", err)
|
||||
}
|
||||
|
||||
// Save it to the (local) target file.
|
||||
log.Print("Writing to ", os.Args[2])
|
||||
if err := ioutil.WriteFile(os.Args[2], formatted, 0644); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
// parse parses the Unicode Properties text files located at the given URLs and
|
||||
// returns their equivalent Go source code to be used in the uniseg package. If
|
||||
// "emojiProperty" is not an empty string, emoji code points for that emoji
|
||||
// property (e.g. "Extended_Pictographic") will be included. In those cases, you
|
||||
// may pass an empty "propertyURL" to skip parsing the main properties file. If
|
||||
// "includeGeneralCategory" is true, the Unicode General Category property will
|
||||
// be extracted from the comments and included in the output.
|
||||
func parse(propertyURL, emojiProperty string, includeGeneralCategory bool) (string, error) {
|
||||
if propertyURL == "" && emojiProperty == "" {
|
||||
return "", errors.New("no properties to parse")
|
||||
}
|
||||
|
||||
// Temporary buffer to hold properties.
|
||||
var properties [][4]string
|
||||
|
||||
// Open the first URL.
|
||||
if propertyURL != "" {
|
||||
log.Printf("Parsing %s", propertyURL)
|
||||
res, err := http.Get(propertyURL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
in1 := res.Body
|
||||
defer in1.Close()
|
||||
|
||||
// Parse it.
|
||||
scanner := bufio.NewScanner(in1)
|
||||
num := 0
|
||||
for scanner.Scan() {
|
||||
num++
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
|
||||
// Skip comments and empty lines.
|
||||
if strings.HasPrefix(line, "#") || line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Everything else must be a code point range, a property and a comment.
|
||||
from, to, property, comment, err := parseProperty(line)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("%s line %d: %v", os.Args[4], num, err)
|
||||
}
|
||||
properties = append(properties, [4]string{from, to, property, comment})
|
||||
}
|
||||
if err := scanner.Err(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
// Open the second URL.
|
||||
if emojiProperty != "" {
|
||||
log.Printf("Parsing %s", emojiURL)
|
||||
res, err := http.Get(emojiURL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
in2 := res.Body
|
||||
defer in2.Close()
|
||||
|
||||
// Parse it.
|
||||
scanner := bufio.NewScanner(in2)
|
||||
num := 0
|
||||
for scanner.Scan() {
|
||||
num++
|
||||
line := scanner.Text()
|
||||
|
||||
// Skip comments, empty lines, and everything not containing
|
||||
// "Extended_Pictographic".
|
||||
if strings.HasPrefix(line, "#") || line == "" || !strings.Contains(line, emojiProperty) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Everything else must be a code point range, a property and a comment.
|
||||
from, to, property, comment, err := parseProperty(line)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("emojis line %d: %v", num, err)
|
||||
}
|
||||
properties = append(properties, [4]string{from, to, property, comment})
|
||||
}
|
||||
if err := scanner.Err(); err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
// Avoid overflow during binary search.
|
||||
if len(properties) >= 1<<31 {
|
||||
return "", errors.New("too many properties")
|
||||
}
|
||||
|
||||
// Sort properties.
|
||||
sort.Slice(properties, func(i, j int) bool {
|
||||
left, _ := strconv.ParseUint(properties[i][0], 16, 64)
|
||||
right, _ := strconv.ParseUint(properties[j][0], 16, 64)
|
||||
return left < right
|
||||
})
|
||||
|
||||
// Header.
|
||||
var (
|
||||
buf bytes.Buffer
|
||||
emojiComment string
|
||||
)
|
||||
columns := 3
|
||||
if includeGeneralCategory {
|
||||
columns = 4
|
||||
}
|
||||
if emojiURL != "" {
|
||||
emojiComment = `
|
||||
// and
|
||||
// ` + emojiURL + `
|
||||
// ("Extended_Pictographic" only)`
|
||||
}
|
||||
buf.WriteString(`// Code generated via go generate from gen_properties.go. DO NOT EDIT.
|
||||
|
||||
package uniseg
|
||||
|
||||
// ` + os.Args[3] + ` are taken from
|
||||
// ` + propertyURL + emojiComment + `
|
||||
// on ` + time.Now().Format("January 2, 2006") + `. See https://www.unicode.org/license.html for the Unicode
|
||||
// license agreement.
|
||||
var ` + os.Args[3] + ` = [][` + strconv.Itoa(columns) + `]int{
|
||||
`)
|
||||
|
||||
// Properties.
|
||||
for _, prop := range properties {
|
||||
if includeGeneralCategory {
|
||||
generalCategory := "gc" + prop[3][:2]
|
||||
if generalCategory == "gcL&" {
|
||||
generalCategory = "gcLC"
|
||||
}
|
||||
prop[3] = prop[3][3:]
|
||||
fmt.Fprintf(&buf, "{0x%s,0x%s,%s,%s}, // %s\n", prop[0], prop[1], translateProperty("pr", prop[2]), generalCategory, prop[3])
|
||||
} else {
|
||||
fmt.Fprintf(&buf, "{0x%s,0x%s,%s}, // %s\n", prop[0], prop[1], translateProperty("pr", prop[2]), prop[3])
|
||||
}
|
||||
}
|
||||
|
||||
// Tail.
|
||||
buf.WriteString("}")
|
||||
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// parseProperty parses a line of the Unicode properties text file containing a
|
||||
// property for a code point range and returns it along with its comment.
|
||||
func parseProperty(line string) (from, to, property, comment string, err error) {
|
||||
fields := propertyPattern.FindStringSubmatch(line)
|
||||
if fields == nil {
|
||||
err = errors.New("no property found")
|
||||
return
|
||||
}
|
||||
from = fields[1]
|
||||
to = fields[3]
|
||||
if to == "" {
|
||||
to = from
|
||||
}
|
||||
property = fields[4]
|
||||
comment = fields[5]
|
||||
return
|
||||
}
|
||||
|
||||
// translateProperty translates a property name as used in the Unicode data file
|
||||
// to a variable used in the Go code.
|
||||
func translateProperty(prefix, property string) string {
|
||||
return prefix + strings.ReplaceAll(property, "_", "")
|
||||
}
|
||||
331
vendor/github.com/rivo/uniseg/grapheme.go
generated
vendored
331
vendor/github.com/rivo/uniseg/grapheme.go
generated
vendored
@@ -1,331 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// Graphemes implements an iterator over Unicode grapheme clusters, or
|
||||
// user-perceived characters. While iterating, it also provides information
|
||||
// about word boundaries, sentence boundaries, line breaks, and monospace
|
||||
// character widths.
|
||||
//
|
||||
// After constructing the class via [NewGraphemes] for a given string "str",
|
||||
// [Graphemes.Next] is called for every grapheme cluster in a loop until it
|
||||
// returns false. Inside the loop, information about the grapheme cluster as
|
||||
// well as boundary information and character width is available via the various
|
||||
// methods (see examples below).
|
||||
//
|
||||
// This class basically wraps the [StepString] parser and provides a convenient
|
||||
// interface to it. If you are only interested in some parts of this package's
|
||||
// functionality, using the specialized functions starting with "First" is
|
||||
// almost always faster.
|
||||
type Graphemes struct {
|
||||
// The original string.
|
||||
original string
|
||||
|
||||
// The remaining string to be parsed.
|
||||
remaining string
|
||||
|
||||
// The current grapheme cluster.
|
||||
cluster string
|
||||
|
||||
// The byte offset of the current grapheme cluster relative to the original
|
||||
// string.
|
||||
offset int
|
||||
|
||||
// The current boundary information of the [Step] parser.
|
||||
boundaries int
|
||||
|
||||
// The current state of the [Step] parser.
|
||||
state int
|
||||
}
|
||||
|
||||
// NewGraphemes returns a new grapheme cluster iterator.
|
||||
func NewGraphemes(str string) *Graphemes {
|
||||
return &Graphemes{
|
||||
original: str,
|
||||
remaining: str,
|
||||
state: -1,
|
||||
}
|
||||
}
|
||||
|
||||
// Next advances the iterator by one grapheme cluster and returns false if no
|
||||
// clusters are left. This function must be called before the first cluster is
|
||||
// accessed.
|
||||
func (g *Graphemes) Next() bool {
|
||||
if len(g.remaining) == 0 {
|
||||
// We're already past the end.
|
||||
g.state = -2
|
||||
g.cluster = ""
|
||||
return false
|
||||
}
|
||||
g.offset += len(g.cluster)
|
||||
g.cluster, g.remaining, g.boundaries, g.state = StepString(g.remaining, g.state)
|
||||
return true
|
||||
}
|
||||
|
||||
// Runes returns a slice of runes (code points) which corresponds to the current
|
||||
// grapheme cluster. If the iterator is already past the end or [Graphemes.Next]
|
||||
// has not yet been called, nil is returned.
|
||||
func (g *Graphemes) Runes() []rune {
|
||||
if g.state < 0 {
|
||||
return nil
|
||||
}
|
||||
return []rune(g.cluster)
|
||||
}
|
||||
|
||||
// Str returns a substring of the original string which corresponds to the
|
||||
// current grapheme cluster. If the iterator is already past the end or
|
||||
// [Graphemes.Next] has not yet been called, an empty string is returned.
|
||||
func (g *Graphemes) Str() string {
|
||||
return g.cluster
|
||||
}
|
||||
|
||||
// Bytes returns a byte slice which corresponds to the current grapheme cluster.
|
||||
// If the iterator is already past the end or [Graphemes.Next] has not yet been
|
||||
// called, nil is returned.
|
||||
func (g *Graphemes) Bytes() []byte {
|
||||
if g.state < 0 {
|
||||
return nil
|
||||
}
|
||||
return []byte(g.cluster)
|
||||
}
|
||||
|
||||
// Positions returns the interval of the current grapheme cluster as byte
|
||||
// positions into the original string. The first returned value "from" indexes
|
||||
// the first byte and the second returned value "to" indexes the first byte that
|
||||
// is not included anymore, i.e. str[from:to] is the current grapheme cluster of
|
||||
// the original string "str". If [Graphemes.Next] has not yet been called, both
|
||||
// values are 0. If the iterator is already past the end, both values are 1.
|
||||
func (g *Graphemes) Positions() (int, int) {
|
||||
if g.state == -1 {
|
||||
return 0, 0
|
||||
} else if g.state == -2 {
|
||||
return 1, 1
|
||||
}
|
||||
return g.offset, g.offset + len(g.cluster)
|
||||
}
|
||||
|
||||
// IsWordBoundary returns true if a word ends after the current grapheme
|
||||
// cluster.
|
||||
func (g *Graphemes) IsWordBoundary() bool {
|
||||
if g.state < 0 {
|
||||
return true
|
||||
}
|
||||
return g.boundaries&MaskWord != 0
|
||||
}
|
||||
|
||||
// IsSentenceBoundary returns true if a sentence ends after the current
|
||||
// grapheme cluster.
|
||||
func (g *Graphemes) IsSentenceBoundary() bool {
|
||||
if g.state < 0 {
|
||||
return true
|
||||
}
|
||||
return g.boundaries&MaskSentence != 0
|
||||
}
|
||||
|
||||
// LineBreak returns whether the line can be broken after the current grapheme
|
||||
// cluster. A value of [LineDontBreak] means the line may not be broken, a value
|
||||
// of [LineMustBreak] means the line must be broken, and a value of
|
||||
// [LineCanBreak] means the line may or may not be broken.
|
||||
func (g *Graphemes) LineBreak() int {
|
||||
if g.state == -1 {
|
||||
return LineDontBreak
|
||||
}
|
||||
if g.state == -2 {
|
||||
return LineMustBreak
|
||||
}
|
||||
return g.boundaries & MaskLine
|
||||
}
|
||||
|
||||
// Width returns the monospace width of the current grapheme cluster.
|
||||
func (g *Graphemes) Width() int {
|
||||
if g.state < 0 {
|
||||
return 0
|
||||
}
|
||||
return g.boundaries >> ShiftWidth
|
||||
}
|
||||
|
||||
// Reset puts the iterator into its initial state such that the next call to
|
||||
// [Graphemes.Next] sets it to the first grapheme cluster again.
|
||||
func (g *Graphemes) Reset() {
|
||||
g.state = -1
|
||||
g.offset = 0
|
||||
g.cluster = ""
|
||||
g.remaining = g.original
|
||||
}
|
||||
|
||||
// GraphemeClusterCount returns the number of user-perceived characters
|
||||
// (grapheme clusters) for the given string.
|
||||
func GraphemeClusterCount(s string) (n int) {
|
||||
state := -1
|
||||
for len(s) > 0 {
|
||||
_, s, _, state = FirstGraphemeClusterInString(s, state)
|
||||
n++
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// ReverseString reverses the given string while observing grapheme cluster
|
||||
// boundaries.
|
||||
func ReverseString(s string) string {
|
||||
str := []byte(s)
|
||||
reversed := make([]byte, len(str))
|
||||
state := -1
|
||||
index := len(str)
|
||||
for len(str) > 0 {
|
||||
var cluster []byte
|
||||
cluster, str, _, state = FirstGraphemeCluster(str, state)
|
||||
index -= len(cluster)
|
||||
copy(reversed[index:], cluster)
|
||||
if index <= len(str)/2 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return string(reversed)
|
||||
}
|
||||
|
||||
// The number of bits the grapheme property must be shifted to make place for
|
||||
// grapheme states.
|
||||
const shiftGraphemePropState = 4
|
||||
|
||||
// FirstGraphemeCluster returns the first grapheme cluster found in the given
|
||||
// byte slice according to the rules of [Unicode Standard Annex #29, Grapheme
|
||||
// Cluster Boundaries]. This function can be called continuously to extract all
|
||||
// grapheme clusters from a byte slice, as illustrated in the example below.
|
||||
//
|
||||
// If you don't know the current state, for example when calling the function
|
||||
// for the first time, you must pass -1. For consecutive calls, pass the state
|
||||
// and rest slice returned by the previous call.
|
||||
//
|
||||
// The "rest" slice is the sub-slice of the original byte slice "b" starting
|
||||
// after the last byte of the identified grapheme cluster. If the length of the
|
||||
// "rest" slice is 0, the entire byte slice "b" has been processed. The
|
||||
// "cluster" byte slice is the sub-slice of the input slice containing the
|
||||
// identified grapheme cluster.
|
||||
//
|
||||
// The returned width is the width of the grapheme cluster for most monospace
|
||||
// fonts where a value of 1 represents one character cell.
|
||||
//
|
||||
// Given an empty byte slice "b", the function returns nil values.
|
||||
//
|
||||
// While slightly less convenient than using the Graphemes class, this function
|
||||
// has much better performance and makes no allocations. It lends itself well to
|
||||
// large byte slices.
|
||||
//
|
||||
// [Unicode Standard Annex #29, Grapheme Cluster Boundaries]: http://unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries
|
||||
func FirstGraphemeCluster(b []byte, state int) (cluster, rest []byte, width, newState int) {
|
||||
// An empty byte slice returns nothing.
|
||||
if len(b) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRune(b)
|
||||
if len(b) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
var prop int
|
||||
if state < 0 {
|
||||
prop = propertyGraphemes(r)
|
||||
} else {
|
||||
prop = state >> shiftGraphemePropState
|
||||
}
|
||||
return b, nil, runeWidth(r, prop), grAny | (prop << shiftGraphemePropState)
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
var firstProp int
|
||||
if state < 0 {
|
||||
state, firstProp, _ = transitionGraphemeState(state, r)
|
||||
} else {
|
||||
firstProp = state >> shiftGraphemePropState
|
||||
}
|
||||
width += runeWidth(r, firstProp)
|
||||
|
||||
// Transition until we find a boundary.
|
||||
for {
|
||||
var (
|
||||
prop int
|
||||
boundary bool
|
||||
)
|
||||
|
||||
r, l := utf8.DecodeRune(b[length:])
|
||||
state, prop, boundary = transitionGraphemeState(state&maskGraphemeState, r)
|
||||
|
||||
if boundary {
|
||||
return b[:length], b[length:], width, state | (prop << shiftGraphemePropState)
|
||||
}
|
||||
|
||||
if firstProp == prExtendedPictographic {
|
||||
if r == vs15 {
|
||||
width = 1
|
||||
} else if r == vs16 {
|
||||
width = 2
|
||||
}
|
||||
} else if firstProp != prRegionalIndicator && firstProp != prL {
|
||||
width += runeWidth(r, prop)
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(b) <= length {
|
||||
return b, nil, width, grAny | (prop << shiftGraphemePropState)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FirstGraphemeClusterInString is like [FirstGraphemeCluster] but its input and
|
||||
// outputs are strings.
|
||||
func FirstGraphemeClusterInString(str string, state int) (cluster, rest string, width, newState int) {
|
||||
// An empty string returns nothing.
|
||||
if len(str) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRuneInString(str)
|
||||
if len(str) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
var prop int
|
||||
if state < 0 {
|
||||
prop = propertyGraphemes(r)
|
||||
} else {
|
||||
prop = state >> shiftGraphemePropState
|
||||
}
|
||||
return str, "", runeWidth(r, prop), grAny | (prop << shiftGraphemePropState)
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
var firstProp int
|
||||
if state < 0 {
|
||||
state, firstProp, _ = transitionGraphemeState(state, r)
|
||||
} else {
|
||||
firstProp = state >> shiftGraphemePropState
|
||||
}
|
||||
width += runeWidth(r, firstProp)
|
||||
|
||||
// Transition until we find a boundary.
|
||||
for {
|
||||
var (
|
||||
prop int
|
||||
boundary bool
|
||||
)
|
||||
|
||||
r, l := utf8.DecodeRuneInString(str[length:])
|
||||
state, prop, boundary = transitionGraphemeState(state&maskGraphemeState, r)
|
||||
|
||||
if boundary {
|
||||
return str[:length], str[length:], width, state | (prop << shiftGraphemePropState)
|
||||
}
|
||||
|
||||
if firstProp == prExtendedPictographic {
|
||||
if r == vs15 {
|
||||
width = 1
|
||||
} else if r == vs16 {
|
||||
width = 2
|
||||
}
|
||||
} else if firstProp != prRegionalIndicator && firstProp != prL {
|
||||
width += runeWidth(r, prop)
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(str) <= length {
|
||||
return str, "", width, grAny | (prop << shiftGraphemePropState)
|
||||
}
|
||||
}
|
||||
}
|
||||
1915
vendor/github.com/rivo/uniseg/graphemeproperties.go
generated
vendored
1915
vendor/github.com/rivo/uniseg/graphemeproperties.go
generated
vendored
File diff suppressed because it is too large
Load Diff
176
vendor/github.com/rivo/uniseg/graphemerules.go
generated
vendored
176
vendor/github.com/rivo/uniseg/graphemerules.go
generated
vendored
@@ -1,176 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
// The states of the grapheme cluster parser.
|
||||
const (
|
||||
grAny = iota
|
||||
grCR
|
||||
grControlLF
|
||||
grL
|
||||
grLVV
|
||||
grLVTT
|
||||
grPrepend
|
||||
grExtendedPictographic
|
||||
grExtendedPictographicZWJ
|
||||
grRIOdd
|
||||
grRIEven
|
||||
)
|
||||
|
||||
// The grapheme cluster parser's breaking instructions.
|
||||
const (
|
||||
grNoBoundary = iota
|
||||
grBoundary
|
||||
)
|
||||
|
||||
// grTransitions implements the grapheme cluster parser's state transitions.
|
||||
// Maps state and property to a new state, a breaking instruction, and rule
|
||||
// number. The breaking instruction always refers to the boundary between the
|
||||
// last and next code point. Returns negative values if no transition is found.
|
||||
//
|
||||
// This function is used as follows:
|
||||
//
|
||||
// 1. Find specific state + specific property. Stop if found.
|
||||
// 2. Find specific state + any property.
|
||||
// 3. Find any state + specific property.
|
||||
// 4. If only (2) or (3) (but not both) was found, stop.
|
||||
// 5. If both (2) and (3) were found, use state from (3) and breaking instruction
|
||||
// from the transition with the lower rule number, prefer (3) if rule numbers
|
||||
// are equal. Stop.
|
||||
// 6. Assume grAny and grBoundary.
|
||||
//
|
||||
// Unicode version 15.0.0.
|
||||
func grTransitions(state, prop int) (newState int, newProp int, boundary int) {
|
||||
// It turns out that using a big switch statement is much faster than using
|
||||
// a map.
|
||||
|
||||
switch uint64(state) | uint64(prop)<<32 {
|
||||
// GB5
|
||||
case grAny | prCR<<32:
|
||||
return grCR, grBoundary, 50
|
||||
case grAny | prLF<<32:
|
||||
return grControlLF, grBoundary, 50
|
||||
case grAny | prControl<<32:
|
||||
return grControlLF, grBoundary, 50
|
||||
|
||||
// GB4
|
||||
case grCR | prAny<<32:
|
||||
return grAny, grBoundary, 40
|
||||
case grControlLF | prAny<<32:
|
||||
return grAny, grBoundary, 40
|
||||
|
||||
// GB3
|
||||
case grCR | prLF<<32:
|
||||
return grControlLF, grNoBoundary, 30
|
||||
|
||||
// GB6
|
||||
case grAny | prL<<32:
|
||||
return grL, grBoundary, 9990
|
||||
case grL | prL<<32:
|
||||
return grL, grNoBoundary, 60
|
||||
case grL | prV<<32:
|
||||
return grLVV, grNoBoundary, 60
|
||||
case grL | prLV<<32:
|
||||
return grLVV, grNoBoundary, 60
|
||||
case grL | prLVT<<32:
|
||||
return grLVTT, grNoBoundary, 60
|
||||
|
||||
// GB7
|
||||
case grAny | prLV<<32:
|
||||
return grLVV, grBoundary, 9990
|
||||
case grAny | prV<<32:
|
||||
return grLVV, grBoundary, 9990
|
||||
case grLVV | prV<<32:
|
||||
return grLVV, grNoBoundary, 70
|
||||
case grLVV | prT<<32:
|
||||
return grLVTT, grNoBoundary, 70
|
||||
|
||||
// GB8
|
||||
case grAny | prLVT<<32:
|
||||
return grLVTT, grBoundary, 9990
|
||||
case grAny | prT<<32:
|
||||
return grLVTT, grBoundary, 9990
|
||||
case grLVTT | prT<<32:
|
||||
return grLVTT, grNoBoundary, 80
|
||||
|
||||
// GB9
|
||||
case grAny | prExtend<<32:
|
||||
return grAny, grNoBoundary, 90
|
||||
case grAny | prZWJ<<32:
|
||||
return grAny, grNoBoundary, 90
|
||||
|
||||
// GB9a
|
||||
case grAny | prSpacingMark<<32:
|
||||
return grAny, grNoBoundary, 91
|
||||
|
||||
// GB9b
|
||||
case grAny | prPrepend<<32:
|
||||
return grPrepend, grBoundary, 9990
|
||||
case grPrepend | prAny<<32:
|
||||
return grAny, grNoBoundary, 92
|
||||
|
||||
// GB11
|
||||
case grAny | prExtendedPictographic<<32:
|
||||
return grExtendedPictographic, grBoundary, 9990
|
||||
case grExtendedPictographic | prExtend<<32:
|
||||
return grExtendedPictographic, grNoBoundary, 110
|
||||
case grExtendedPictographic | prZWJ<<32:
|
||||
return grExtendedPictographicZWJ, grNoBoundary, 110
|
||||
case grExtendedPictographicZWJ | prExtendedPictographic<<32:
|
||||
return grExtendedPictographic, grNoBoundary, 110
|
||||
|
||||
// GB12 / GB13
|
||||
case grAny | prRegionalIndicator<<32:
|
||||
return grRIOdd, grBoundary, 9990
|
||||
case grRIOdd | prRegionalIndicator<<32:
|
||||
return grRIEven, grNoBoundary, 120
|
||||
case grRIEven | prRegionalIndicator<<32:
|
||||
return grRIOdd, grBoundary, 120
|
||||
default:
|
||||
return -1, -1, -1
|
||||
}
|
||||
}
|
||||
|
||||
// transitionGraphemeState determines the new state of the grapheme cluster
|
||||
// parser given the current state and the next code point. It also returns the
|
||||
// code point's grapheme property (the value mapped by the [graphemeCodePoints]
|
||||
// table) and whether a cluster boundary was detected.
|
||||
func transitionGraphemeState(state int, r rune) (newState, prop int, boundary bool) {
|
||||
// Determine the property of the next character.
|
||||
prop = propertyGraphemes(r)
|
||||
|
||||
// Find the applicable transition.
|
||||
nextState, nextProp, _ := grTransitions(state, prop)
|
||||
if nextState >= 0 {
|
||||
// We have a specific transition. We'll use it.
|
||||
return nextState, prop, nextProp == grBoundary
|
||||
}
|
||||
|
||||
// No specific transition found. Try the less specific ones.
|
||||
anyPropState, anyPropProp, anyPropRule := grTransitions(state, prAny)
|
||||
anyStateState, anyStateProp, anyStateRule := grTransitions(grAny, prop)
|
||||
if anyPropState >= 0 && anyStateState >= 0 {
|
||||
// Both apply. We'll use a mix (see comments for grTransitions).
|
||||
newState = anyStateState
|
||||
boundary = anyStateProp == grBoundary
|
||||
if anyPropRule < anyStateRule {
|
||||
boundary = anyPropProp == grBoundary
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if anyPropState >= 0 {
|
||||
// We only have a specific state.
|
||||
return anyPropState, prop, anyPropProp == grBoundary
|
||||
// This branch will probably never be reached because okAnyState will
|
||||
// always be true given the current transition map. But we keep it here
|
||||
// for future modifications to the transition map where this may not be
|
||||
// true anymore.
|
||||
}
|
||||
|
||||
if anyStateState >= 0 {
|
||||
// We only have a specific property.
|
||||
return anyStateState, prop, anyStateProp == grBoundary
|
||||
}
|
||||
|
||||
// No known transition. GB999: Any ÷ Any.
|
||||
return grAny, prop, true
|
||||
}
|
||||
134
vendor/github.com/rivo/uniseg/line.go
generated
vendored
134
vendor/github.com/rivo/uniseg/line.go
generated
vendored
@@ -1,134 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// FirstLineSegment returns the prefix of the given byte slice after which a
|
||||
// decision to break the string over to the next line can or must be made,
|
||||
// according to the rules of [Unicode Standard Annex #14]. This is used to
|
||||
// implement line breaking.
|
||||
//
|
||||
// Line breaking, also known as word wrapping, is the process of breaking a
|
||||
// section of text into lines such that it will fit in the available width of a
|
||||
// page, window or other display area.
|
||||
//
|
||||
// The returned "segment" may not be broken into smaller parts, unless no other
|
||||
// breaking opportunities present themselves, in which case you may break by
|
||||
// grapheme clusters (using the [FirstGraphemeCluster] function to determine the
|
||||
// grapheme clusters).
|
||||
//
|
||||
// The "mustBreak" flag indicates whether you MUST break the line after the
|
||||
// given segment (true), for example after newline characters, or you MAY break
|
||||
// the line after the given segment (false).
|
||||
//
|
||||
// This function can be called continuously to extract all non-breaking sub-sets
|
||||
// from a byte slice, as illustrated in the example below.
|
||||
//
|
||||
// If you don't know the current state, for example when calling the function
|
||||
// for the first time, you must pass -1. For consecutive calls, pass the state
|
||||
// and rest slice returned by the previous call.
|
||||
//
|
||||
// The "rest" slice is the sub-slice of the original byte slice "b" starting
|
||||
// after the last byte of the identified line segment. If the length of the
|
||||
// "rest" slice is 0, the entire byte slice "b" has been processed. The
|
||||
// "segment" byte slice is the sub-slice of the input slice containing the
|
||||
// identified line segment.
|
||||
//
|
||||
// Given an empty byte slice "b", the function returns nil values.
|
||||
//
|
||||
// Note that in accordance with [UAX #14 LB3], the final segment will end with
|
||||
// "mustBreak" set to true. You can choose to ignore this by checking if the
|
||||
// length of the "rest" slice is 0 and calling [HasTrailingLineBreak] or
|
||||
// [HasTrailingLineBreakInString] on the last rune.
|
||||
//
|
||||
// Note also that this algorithm may break within grapheme clusters. This is
|
||||
// addressed in Section 8.2 Example 6 of UAX #14. To avoid this, you can use
|
||||
// the [Step] function instead.
|
||||
//
|
||||
// [Unicode Standard Annex #14]: https://www.unicode.org/reports/tr14/
|
||||
// [UAX #14 LB3]: https://www.unicode.org/reports/tr14/#Algorithm
|
||||
func FirstLineSegment(b []byte, state int) (segment, rest []byte, mustBreak bool, newState int) {
|
||||
// An empty byte slice returns nothing.
|
||||
if len(b) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRune(b)
|
||||
if len(b) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
return b, nil, true, lbAny // LB3.
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
if state < 0 {
|
||||
state, _ = transitionLineBreakState(state, r, b[length:], "")
|
||||
}
|
||||
|
||||
// Transition until we find a boundary.
|
||||
var boundary int
|
||||
for {
|
||||
r, l := utf8.DecodeRune(b[length:])
|
||||
state, boundary = transitionLineBreakState(state, r, b[length+l:], "")
|
||||
|
||||
if boundary != LineDontBreak {
|
||||
return b[:length], b[length:], boundary == LineMustBreak, state
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(b) <= length {
|
||||
return b, nil, true, lbAny // LB3
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FirstLineSegmentInString is like [FirstLineSegment] but its input and outputs
|
||||
// are strings.
|
||||
func FirstLineSegmentInString(str string, state int) (segment, rest string, mustBreak bool, newState int) {
|
||||
// An empty byte slice returns nothing.
|
||||
if len(str) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRuneInString(str)
|
||||
if len(str) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
return str, "", true, lbAny // LB3.
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
if state < 0 {
|
||||
state, _ = transitionLineBreakState(state, r, nil, str[length:])
|
||||
}
|
||||
|
||||
// Transition until we find a boundary.
|
||||
var boundary int
|
||||
for {
|
||||
r, l := utf8.DecodeRuneInString(str[length:])
|
||||
state, boundary = transitionLineBreakState(state, r, nil, str[length+l:])
|
||||
|
||||
if boundary != LineDontBreak {
|
||||
return str[:length], str[length:], boundary == LineMustBreak, state
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(str) <= length {
|
||||
return str, "", true, lbAny // LB3.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// HasTrailingLineBreak returns true if the last rune in the given byte slice is
|
||||
// one of the hard line break code points defined in LB4 and LB5 of [UAX #14].
|
||||
//
|
||||
// [UAX #14]: https://www.unicode.org/reports/tr14/#Algorithm
|
||||
func HasTrailingLineBreak(b []byte) bool {
|
||||
r, _ := utf8.DecodeLastRune(b)
|
||||
property, _ := propertyLineBreak(r)
|
||||
return property == prBK || property == prCR || property == prLF || property == prNL
|
||||
}
|
||||
|
||||
// HasTrailingLineBreakInString is like [HasTrailingLineBreak] but for a string.
|
||||
func HasTrailingLineBreakInString(str string) bool {
|
||||
r, _ := utf8.DecodeLastRuneInString(str)
|
||||
property, _ := propertyLineBreak(r)
|
||||
return property == prBK || property == prCR || property == prLF || property == prNL
|
||||
}
|
||||
3554
vendor/github.com/rivo/uniseg/lineproperties.go
generated
vendored
3554
vendor/github.com/rivo/uniseg/lineproperties.go
generated
vendored
File diff suppressed because it is too large
Load Diff
626
vendor/github.com/rivo/uniseg/linerules.go
generated
vendored
626
vendor/github.com/rivo/uniseg/linerules.go
generated
vendored
@@ -1,626 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// The states of the line break parser.
|
||||
const (
|
||||
lbAny = iota
|
||||
lbBK
|
||||
lbCR
|
||||
lbLF
|
||||
lbNL
|
||||
lbSP
|
||||
lbZW
|
||||
lbWJ
|
||||
lbGL
|
||||
lbBA
|
||||
lbHY
|
||||
lbCL
|
||||
lbCP
|
||||
lbEX
|
||||
lbIS
|
||||
lbSY
|
||||
lbOP
|
||||
lbQU
|
||||
lbQUSP
|
||||
lbNS
|
||||
lbCLCPSP
|
||||
lbB2
|
||||
lbB2SP
|
||||
lbCB
|
||||
lbBB
|
||||
lbLB21a
|
||||
lbHL
|
||||
lbAL
|
||||
lbNU
|
||||
lbPR
|
||||
lbEB
|
||||
lbIDEM
|
||||
lbNUNU
|
||||
lbNUSY
|
||||
lbNUIS
|
||||
lbNUCL
|
||||
lbNUCP
|
||||
lbPO
|
||||
lbJL
|
||||
lbJV
|
||||
lbJT
|
||||
lbH2
|
||||
lbH3
|
||||
lbOddRI
|
||||
lbEvenRI
|
||||
lbExtPicCn
|
||||
lbZWJBit = 64
|
||||
lbCPeaFWHBit = 128
|
||||
)
|
||||
|
||||
// These constants define whether a given text may be broken into the next line.
|
||||
// If the break is optional (LineCanBreak), you may choose to break or not based
|
||||
// on your own criteria, for example, if the text has reached the available
|
||||
// width.
|
||||
const (
|
||||
LineDontBreak = iota // You may not break the line here.
|
||||
LineCanBreak // You may or may not break the line here.
|
||||
LineMustBreak // You must break the line here.
|
||||
)
|
||||
|
||||
// lbTransitions implements the line break parser's state transitions. It's
|
||||
// anologous to [grTransitions], see comments there for details.
|
||||
//
|
||||
// Unicode version 15.0.0.
|
||||
func lbTransitions(state, prop int) (newState, lineBreak, rule int) {
|
||||
switch uint64(state) | uint64(prop)<<32 {
|
||||
// LB4.
|
||||
case lbBK | prAny<<32:
|
||||
return lbAny, LineMustBreak, 40
|
||||
|
||||
// LB5.
|
||||
case lbCR | prLF<<32:
|
||||
return lbLF, LineDontBreak, 50
|
||||
case lbCR | prAny<<32:
|
||||
return lbAny, LineMustBreak, 50
|
||||
case lbLF | prAny<<32:
|
||||
return lbAny, LineMustBreak, 50
|
||||
case lbNL | prAny<<32:
|
||||
return lbAny, LineMustBreak, 50
|
||||
|
||||
// LB6.
|
||||
case lbAny | prBK<<32:
|
||||
return lbBK, LineDontBreak, 60
|
||||
case lbAny | prCR<<32:
|
||||
return lbCR, LineDontBreak, 60
|
||||
case lbAny | prLF<<32:
|
||||
return lbLF, LineDontBreak, 60
|
||||
case lbAny | prNL<<32:
|
||||
return lbNL, LineDontBreak, 60
|
||||
|
||||
// LB7.
|
||||
case lbAny | prSP<<32:
|
||||
return lbSP, LineDontBreak, 70
|
||||
case lbAny | prZW<<32:
|
||||
return lbZW, LineDontBreak, 70
|
||||
|
||||
// LB8.
|
||||
case lbZW | prSP<<32:
|
||||
return lbZW, LineDontBreak, 70
|
||||
case lbZW | prAny<<32:
|
||||
return lbAny, LineCanBreak, 80
|
||||
|
||||
// LB11.
|
||||
case lbAny | prWJ<<32:
|
||||
return lbWJ, LineDontBreak, 110
|
||||
case lbWJ | prAny<<32:
|
||||
return lbAny, LineDontBreak, 110
|
||||
|
||||
// LB12.
|
||||
case lbAny | prGL<<32:
|
||||
return lbGL, LineCanBreak, 310
|
||||
case lbGL | prAny<<32:
|
||||
return lbAny, LineDontBreak, 120
|
||||
|
||||
// LB13 (simple transitions).
|
||||
case lbAny | prCL<<32:
|
||||
return lbCL, LineCanBreak, 310
|
||||
case lbAny | prCP<<32:
|
||||
return lbCP, LineCanBreak, 310
|
||||
case lbAny | prEX<<32:
|
||||
return lbEX, LineDontBreak, 130
|
||||
case lbAny | prIS<<32:
|
||||
return lbIS, LineCanBreak, 310
|
||||
case lbAny | prSY<<32:
|
||||
return lbSY, LineCanBreak, 310
|
||||
|
||||
// LB14.
|
||||
case lbAny | prOP<<32:
|
||||
return lbOP, LineCanBreak, 310
|
||||
case lbOP | prSP<<32:
|
||||
return lbOP, LineDontBreak, 70
|
||||
case lbOP | prAny<<32:
|
||||
return lbAny, LineDontBreak, 140
|
||||
|
||||
// LB15.
|
||||
case lbQU | prSP<<32:
|
||||
return lbQUSP, LineDontBreak, 70
|
||||
case lbQU | prOP<<32:
|
||||
return lbOP, LineDontBreak, 150
|
||||
case lbQUSP | prOP<<32:
|
||||
return lbOP, LineDontBreak, 150
|
||||
|
||||
// LB16.
|
||||
case lbCL | prSP<<32:
|
||||
return lbCLCPSP, LineDontBreak, 70
|
||||
case lbNUCL | prSP<<32:
|
||||
return lbCLCPSP, LineDontBreak, 70
|
||||
case lbCP | prSP<<32:
|
||||
return lbCLCPSP, LineDontBreak, 70
|
||||
case lbNUCP | prSP<<32:
|
||||
return lbCLCPSP, LineDontBreak, 70
|
||||
case lbCL | prNS<<32:
|
||||
return lbNS, LineDontBreak, 160
|
||||
case lbNUCL | prNS<<32:
|
||||
return lbNS, LineDontBreak, 160
|
||||
case lbCP | prNS<<32:
|
||||
return lbNS, LineDontBreak, 160
|
||||
case lbNUCP | prNS<<32:
|
||||
return lbNS, LineDontBreak, 160
|
||||
case lbCLCPSP | prNS<<32:
|
||||
return lbNS, LineDontBreak, 160
|
||||
|
||||
// LB17.
|
||||
case lbAny | prB2<<32:
|
||||
return lbB2, LineCanBreak, 310
|
||||
case lbB2 | prSP<<32:
|
||||
return lbB2SP, LineDontBreak, 70
|
||||
case lbB2 | prB2<<32:
|
||||
return lbB2, LineDontBreak, 170
|
||||
case lbB2SP | prB2<<32:
|
||||
return lbB2, LineDontBreak, 170
|
||||
|
||||
// LB18.
|
||||
case lbSP | prAny<<32:
|
||||
return lbAny, LineCanBreak, 180
|
||||
case lbQUSP | prAny<<32:
|
||||
return lbAny, LineCanBreak, 180
|
||||
case lbCLCPSP | prAny<<32:
|
||||
return lbAny, LineCanBreak, 180
|
||||
case lbB2SP | prAny<<32:
|
||||
return lbAny, LineCanBreak, 180
|
||||
|
||||
// LB19.
|
||||
case lbAny | prQU<<32:
|
||||
return lbQU, LineDontBreak, 190
|
||||
case lbQU | prAny<<32:
|
||||
return lbAny, LineDontBreak, 190
|
||||
|
||||
// LB20.
|
||||
case lbAny | prCB<<32:
|
||||
return lbCB, LineCanBreak, 200
|
||||
case lbCB | prAny<<32:
|
||||
return lbAny, LineCanBreak, 200
|
||||
|
||||
// LB21.
|
||||
case lbAny | prBA<<32:
|
||||
return lbBA, LineDontBreak, 210
|
||||
case lbAny | prHY<<32:
|
||||
return lbHY, LineDontBreak, 210
|
||||
case lbAny | prNS<<32:
|
||||
return lbNS, LineDontBreak, 210
|
||||
case lbAny | prBB<<32:
|
||||
return lbBB, LineCanBreak, 310
|
||||
case lbBB | prAny<<32:
|
||||
return lbAny, LineDontBreak, 210
|
||||
|
||||
// LB21a.
|
||||
case lbAny | prHL<<32:
|
||||
return lbHL, LineCanBreak, 310
|
||||
case lbHL | prHY<<32:
|
||||
return lbLB21a, LineDontBreak, 210
|
||||
case lbHL | prBA<<32:
|
||||
return lbLB21a, LineDontBreak, 210
|
||||
case lbLB21a | prAny<<32:
|
||||
return lbAny, LineDontBreak, 211
|
||||
|
||||
// LB21b.
|
||||
case lbSY | prHL<<32:
|
||||
return lbHL, LineDontBreak, 212
|
||||
case lbNUSY | prHL<<32:
|
||||
return lbHL, LineDontBreak, 212
|
||||
|
||||
// LB22.
|
||||
case lbAny | prIN<<32:
|
||||
return lbAny, LineDontBreak, 220
|
||||
|
||||
// LB23.
|
||||
case lbAny | prAL<<32:
|
||||
return lbAL, LineCanBreak, 310
|
||||
case lbAny | prNU<<32:
|
||||
return lbNU, LineCanBreak, 310
|
||||
case lbAL | prNU<<32:
|
||||
return lbNU, LineDontBreak, 230
|
||||
case lbHL | prNU<<32:
|
||||
return lbNU, LineDontBreak, 230
|
||||
case lbNU | prAL<<32:
|
||||
return lbAL, LineDontBreak, 230
|
||||
case lbNU | prHL<<32:
|
||||
return lbHL, LineDontBreak, 230
|
||||
case lbNUNU | prAL<<32:
|
||||
return lbAL, LineDontBreak, 230
|
||||
case lbNUNU | prHL<<32:
|
||||
return lbHL, LineDontBreak, 230
|
||||
|
||||
// LB23a.
|
||||
case lbAny | prPR<<32:
|
||||
return lbPR, LineCanBreak, 310
|
||||
case lbAny | prID<<32:
|
||||
return lbIDEM, LineCanBreak, 310
|
||||
case lbAny | prEB<<32:
|
||||
return lbEB, LineCanBreak, 310
|
||||
case lbAny | prEM<<32:
|
||||
return lbIDEM, LineCanBreak, 310
|
||||
case lbPR | prID<<32:
|
||||
return lbIDEM, LineDontBreak, 231
|
||||
case lbPR | prEB<<32:
|
||||
return lbEB, LineDontBreak, 231
|
||||
case lbPR | prEM<<32:
|
||||
return lbIDEM, LineDontBreak, 231
|
||||
case lbIDEM | prPO<<32:
|
||||
return lbPO, LineDontBreak, 231
|
||||
case lbEB | prPO<<32:
|
||||
return lbPO, LineDontBreak, 231
|
||||
|
||||
// LB24.
|
||||
case lbAny | prPO<<32:
|
||||
return lbPO, LineCanBreak, 310
|
||||
case lbPR | prAL<<32:
|
||||
return lbAL, LineDontBreak, 240
|
||||
case lbPR | prHL<<32:
|
||||
return lbHL, LineDontBreak, 240
|
||||
case lbPO | prAL<<32:
|
||||
return lbAL, LineDontBreak, 240
|
||||
case lbPO | prHL<<32:
|
||||
return lbHL, LineDontBreak, 240
|
||||
case lbAL | prPR<<32:
|
||||
return lbPR, LineDontBreak, 240
|
||||
case lbAL | prPO<<32:
|
||||
return lbPO, LineDontBreak, 240
|
||||
case lbHL | prPR<<32:
|
||||
return lbPR, LineDontBreak, 240
|
||||
case lbHL | prPO<<32:
|
||||
return lbPO, LineDontBreak, 240
|
||||
|
||||
// LB25 (simple transitions).
|
||||
case lbPR | prNU<<32:
|
||||
return lbNU, LineDontBreak, 250
|
||||
case lbPO | prNU<<32:
|
||||
return lbNU, LineDontBreak, 250
|
||||
case lbOP | prNU<<32:
|
||||
return lbNU, LineDontBreak, 250
|
||||
case lbHY | prNU<<32:
|
||||
return lbNU, LineDontBreak, 250
|
||||
case lbNU | prNU<<32:
|
||||
return lbNUNU, LineDontBreak, 250
|
||||
case lbNU | prSY<<32:
|
||||
return lbNUSY, LineDontBreak, 250
|
||||
case lbNU | prIS<<32:
|
||||
return lbNUIS, LineDontBreak, 250
|
||||
case lbNUNU | prNU<<32:
|
||||
return lbNUNU, LineDontBreak, 250
|
||||
case lbNUNU | prSY<<32:
|
||||
return lbNUSY, LineDontBreak, 250
|
||||
case lbNUNU | prIS<<32:
|
||||
return lbNUIS, LineDontBreak, 250
|
||||
case lbNUSY | prNU<<32:
|
||||
return lbNUNU, LineDontBreak, 250
|
||||
case lbNUSY | prSY<<32:
|
||||
return lbNUSY, LineDontBreak, 250
|
||||
case lbNUSY | prIS<<32:
|
||||
return lbNUIS, LineDontBreak, 250
|
||||
case lbNUIS | prNU<<32:
|
||||
return lbNUNU, LineDontBreak, 250
|
||||
case lbNUIS | prSY<<32:
|
||||
return lbNUSY, LineDontBreak, 250
|
||||
case lbNUIS | prIS<<32:
|
||||
return lbNUIS, LineDontBreak, 250
|
||||
case lbNU | prCL<<32:
|
||||
return lbNUCL, LineDontBreak, 250
|
||||
case lbNU | prCP<<32:
|
||||
return lbNUCP, LineDontBreak, 250
|
||||
case lbNUNU | prCL<<32:
|
||||
return lbNUCL, LineDontBreak, 250
|
||||
case lbNUNU | prCP<<32:
|
||||
return lbNUCP, LineDontBreak, 250
|
||||
case lbNUSY | prCL<<32:
|
||||
return lbNUCL, LineDontBreak, 250
|
||||
case lbNUSY | prCP<<32:
|
||||
return lbNUCP, LineDontBreak, 250
|
||||
case lbNUIS | prCL<<32:
|
||||
return lbNUCL, LineDontBreak, 250
|
||||
case lbNUIS | prCP<<32:
|
||||
return lbNUCP, LineDontBreak, 250
|
||||
case lbNU | prPO<<32:
|
||||
return lbPO, LineDontBreak, 250
|
||||
case lbNUNU | prPO<<32:
|
||||
return lbPO, LineDontBreak, 250
|
||||
case lbNUSY | prPO<<32:
|
||||
return lbPO, LineDontBreak, 250
|
||||
case lbNUIS | prPO<<32:
|
||||
return lbPO, LineDontBreak, 250
|
||||
case lbNUCL | prPO<<32:
|
||||
return lbPO, LineDontBreak, 250
|
||||
case lbNUCP | prPO<<32:
|
||||
return lbPO, LineDontBreak, 250
|
||||
case lbNU | prPR<<32:
|
||||
return lbPR, LineDontBreak, 250
|
||||
case lbNUNU | prPR<<32:
|
||||
return lbPR, LineDontBreak, 250
|
||||
case lbNUSY | prPR<<32:
|
||||
return lbPR, LineDontBreak, 250
|
||||
case lbNUIS | prPR<<32:
|
||||
return lbPR, LineDontBreak, 250
|
||||
case lbNUCL | prPR<<32:
|
||||
return lbPR, LineDontBreak, 250
|
||||
case lbNUCP | prPR<<32:
|
||||
return lbPR, LineDontBreak, 250
|
||||
|
||||
// LB26.
|
||||
case lbAny | prJL<<32:
|
||||
return lbJL, LineCanBreak, 310
|
||||
case lbAny | prJV<<32:
|
||||
return lbJV, LineCanBreak, 310
|
||||
case lbAny | prJT<<32:
|
||||
return lbJT, LineCanBreak, 310
|
||||
case lbAny | prH2<<32:
|
||||
return lbH2, LineCanBreak, 310
|
||||
case lbAny | prH3<<32:
|
||||
return lbH3, LineCanBreak, 310
|
||||
case lbJL | prJL<<32:
|
||||
return lbJL, LineDontBreak, 260
|
||||
case lbJL | prJV<<32:
|
||||
return lbJV, LineDontBreak, 260
|
||||
case lbJL | prH2<<32:
|
||||
return lbH2, LineDontBreak, 260
|
||||
case lbJL | prH3<<32:
|
||||
return lbH3, LineDontBreak, 260
|
||||
case lbJV | prJV<<32:
|
||||
return lbJV, LineDontBreak, 260
|
||||
case lbJV | prJT<<32:
|
||||
return lbJT, LineDontBreak, 260
|
||||
case lbH2 | prJV<<32:
|
||||
return lbJV, LineDontBreak, 260
|
||||
case lbH2 | prJT<<32:
|
||||
return lbJT, LineDontBreak, 260
|
||||
case lbJT | prJT<<32:
|
||||
return lbJT, LineDontBreak, 260
|
||||
case lbH3 | prJT<<32:
|
||||
return lbJT, LineDontBreak, 260
|
||||
|
||||
// LB27.
|
||||
case lbJL | prPO<<32:
|
||||
return lbPO, LineDontBreak, 270
|
||||
case lbJV | prPO<<32:
|
||||
return lbPO, LineDontBreak, 270
|
||||
case lbJT | prPO<<32:
|
||||
return lbPO, LineDontBreak, 270
|
||||
case lbH2 | prPO<<32:
|
||||
return lbPO, LineDontBreak, 270
|
||||
case lbH3 | prPO<<32:
|
||||
return lbPO, LineDontBreak, 270
|
||||
case lbPR | prJL<<32:
|
||||
return lbJL, LineDontBreak, 270
|
||||
case lbPR | prJV<<32:
|
||||
return lbJV, LineDontBreak, 270
|
||||
case lbPR | prJT<<32:
|
||||
return lbJT, LineDontBreak, 270
|
||||
case lbPR | prH2<<32:
|
||||
return lbH2, LineDontBreak, 270
|
||||
case lbPR | prH3<<32:
|
||||
return lbH3, LineDontBreak, 270
|
||||
|
||||
// LB28.
|
||||
case lbAL | prAL<<32:
|
||||
return lbAL, LineDontBreak, 280
|
||||
case lbAL | prHL<<32:
|
||||
return lbHL, LineDontBreak, 280
|
||||
case lbHL | prAL<<32:
|
||||
return lbAL, LineDontBreak, 280
|
||||
case lbHL | prHL<<32:
|
||||
return lbHL, LineDontBreak, 280
|
||||
|
||||
// LB29.
|
||||
case lbIS | prAL<<32:
|
||||
return lbAL, LineDontBreak, 290
|
||||
case lbIS | prHL<<32:
|
||||
return lbHL, LineDontBreak, 290
|
||||
case lbNUIS | prAL<<32:
|
||||
return lbAL, LineDontBreak, 290
|
||||
case lbNUIS | prHL<<32:
|
||||
return lbHL, LineDontBreak, 290
|
||||
|
||||
default:
|
||||
return -1, -1, -1
|
||||
}
|
||||
}
|
||||
|
||||
// transitionLineBreakState determines the new state of the line break parser
|
||||
// given the current state and the next code point. It also returns the type of
|
||||
// line break: LineDontBreak, LineCanBreak, or LineMustBreak. If more than one
|
||||
// code point is needed to determine the new state, the byte slice or the string
|
||||
// starting after rune "r" can be used (whichever is not nil or empty) for
|
||||
// further lookups.
|
||||
func transitionLineBreakState(state int, r rune, b []byte, str string) (newState int, lineBreak int) {
|
||||
// Determine the property of the next character.
|
||||
nextProperty, generalCategory := propertyLineBreak(r)
|
||||
|
||||
// Prepare.
|
||||
var forceNoBreak, isCPeaFWH bool
|
||||
if state >= 0 && state&lbCPeaFWHBit != 0 {
|
||||
isCPeaFWH = true // LB30: CP but ea is not F, W, or H.
|
||||
state = state &^ lbCPeaFWHBit
|
||||
}
|
||||
if state >= 0 && state&lbZWJBit != 0 {
|
||||
state = state &^ lbZWJBit // Extract zero-width joiner bit.
|
||||
forceNoBreak = true // LB8a.
|
||||
}
|
||||
|
||||
defer func() {
|
||||
// Transition into LB30.
|
||||
if newState == lbCP || newState == lbNUCP {
|
||||
ea := propertyEastAsianWidth(r)
|
||||
if ea != prF && ea != prW && ea != prH {
|
||||
newState |= lbCPeaFWHBit
|
||||
}
|
||||
}
|
||||
|
||||
// Override break.
|
||||
if forceNoBreak {
|
||||
lineBreak = LineDontBreak
|
||||
}
|
||||
}()
|
||||
|
||||
// LB1.
|
||||
if nextProperty == prAI || nextProperty == prSG || nextProperty == prXX {
|
||||
nextProperty = prAL
|
||||
} else if nextProperty == prSA {
|
||||
if generalCategory == gcMn || generalCategory == gcMc {
|
||||
nextProperty = prCM
|
||||
} else {
|
||||
nextProperty = prAL
|
||||
}
|
||||
} else if nextProperty == prCJ {
|
||||
nextProperty = prNS
|
||||
}
|
||||
|
||||
// Combining marks.
|
||||
if nextProperty == prZWJ || nextProperty == prCM {
|
||||
var bit int
|
||||
if nextProperty == prZWJ {
|
||||
bit = lbZWJBit
|
||||
}
|
||||
mustBreakState := state < 0 || state == lbBK || state == lbCR || state == lbLF || state == lbNL
|
||||
if !mustBreakState && state != lbSP && state != lbZW && state != lbQUSP && state != lbCLCPSP && state != lbB2SP {
|
||||
// LB9.
|
||||
return state | bit, LineDontBreak
|
||||
} else {
|
||||
// LB10.
|
||||
if mustBreakState {
|
||||
return lbAL | bit, LineMustBreak
|
||||
}
|
||||
return lbAL | bit, LineCanBreak
|
||||
}
|
||||
}
|
||||
|
||||
// Find the applicable transition in the table.
|
||||
var rule int
|
||||
newState, lineBreak, rule = lbTransitions(state, nextProperty)
|
||||
if newState < 0 {
|
||||
// No specific transition found. Try the less specific ones.
|
||||
anyPropProp, anyPropLineBreak, anyPropRule := lbTransitions(state, prAny)
|
||||
anyStateProp, anyStateLineBreak, anyStateRule := lbTransitions(lbAny, nextProperty)
|
||||
if anyPropProp >= 0 && anyStateProp >= 0 {
|
||||
// Both apply. We'll use a mix (see comments for grTransitions).
|
||||
newState, lineBreak, rule = anyStateProp, anyStateLineBreak, anyStateRule
|
||||
if anyPropRule < anyStateRule {
|
||||
lineBreak, rule = anyPropLineBreak, anyPropRule
|
||||
}
|
||||
} else if anyPropProp >= 0 {
|
||||
// We only have a specific state.
|
||||
newState, lineBreak, rule = anyPropProp, anyPropLineBreak, anyPropRule
|
||||
// This branch will probably never be reached because okAnyState will
|
||||
// always be true given the current transition map. But we keep it here
|
||||
// for future modifications to the transition map where this may not be
|
||||
// true anymore.
|
||||
} else if anyStateProp >= 0 {
|
||||
// We only have a specific property.
|
||||
newState, lineBreak, rule = anyStateProp, anyStateLineBreak, anyStateRule
|
||||
} else {
|
||||
// No known transition. LB31: ALL ÷ ALL.
|
||||
newState, lineBreak, rule = lbAny, LineCanBreak, 310
|
||||
}
|
||||
}
|
||||
|
||||
// LB12a.
|
||||
if rule > 121 &&
|
||||
nextProperty == prGL &&
|
||||
(state != lbSP && state != lbBA && state != lbHY && state != lbLB21a && state != lbQUSP && state != lbCLCPSP && state != lbB2SP) {
|
||||
return lbGL, LineDontBreak
|
||||
}
|
||||
|
||||
// LB13.
|
||||
if rule > 130 && state != lbNU && state != lbNUNU {
|
||||
switch nextProperty {
|
||||
case prCL:
|
||||
return lbCL, LineDontBreak
|
||||
case prCP:
|
||||
return lbCP, LineDontBreak
|
||||
case prIS:
|
||||
return lbIS, LineDontBreak
|
||||
case prSY:
|
||||
return lbSY, LineDontBreak
|
||||
}
|
||||
}
|
||||
|
||||
// LB25 (look ahead).
|
||||
if rule > 250 &&
|
||||
(state == lbPR || state == lbPO) &&
|
||||
nextProperty == prOP || nextProperty == prHY {
|
||||
var r rune
|
||||
if b != nil { // Byte slice version.
|
||||
r, _ = utf8.DecodeRune(b)
|
||||
} else { // String version.
|
||||
r, _ = utf8.DecodeRuneInString(str)
|
||||
}
|
||||
if r != utf8.RuneError {
|
||||
pr, _ := propertyLineBreak(r)
|
||||
if pr == prNU {
|
||||
return lbNU, LineDontBreak
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// LB30 (part one).
|
||||
if rule > 300 {
|
||||
if (state == lbAL || state == lbHL || state == lbNU || state == lbNUNU) && nextProperty == prOP {
|
||||
ea := propertyEastAsianWidth(r)
|
||||
if ea != prF && ea != prW && ea != prH {
|
||||
return lbOP, LineDontBreak
|
||||
}
|
||||
} else if isCPeaFWH {
|
||||
switch nextProperty {
|
||||
case prAL:
|
||||
return lbAL, LineDontBreak
|
||||
case prHL:
|
||||
return lbHL, LineDontBreak
|
||||
case prNU:
|
||||
return lbNU, LineDontBreak
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// LB30a.
|
||||
if newState == lbAny && nextProperty == prRI {
|
||||
if state != lbOddRI && state != lbEvenRI { // Includes state == -1.
|
||||
// Transition into the first RI.
|
||||
return lbOddRI, lineBreak
|
||||
}
|
||||
if state == lbOddRI {
|
||||
// Don't break pairs of Regional Indicators.
|
||||
return lbEvenRI, LineDontBreak
|
||||
}
|
||||
return lbOddRI, lineBreak
|
||||
}
|
||||
|
||||
// LB30b.
|
||||
if rule > 302 {
|
||||
if nextProperty == prEM {
|
||||
if state == lbEB || state == lbExtPicCn {
|
||||
return prAny, LineDontBreak
|
||||
}
|
||||
}
|
||||
graphemeProperty := propertyGraphemes(r)
|
||||
if graphemeProperty == prExtendedPictographic && generalCategory == gcCn {
|
||||
return lbExtPicCn, LineCanBreak
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
208
vendor/github.com/rivo/uniseg/properties.go
generated
vendored
208
vendor/github.com/rivo/uniseg/properties.go
generated
vendored
@@ -1,208 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
// The Unicode properties as used in the various parsers. Only the ones needed
|
||||
// in the context of this package are included.
|
||||
const (
|
||||
prXX = 0 // Same as prAny.
|
||||
prAny = iota // prAny must be 0.
|
||||
prPrepend // Grapheme properties must come first, to reduce the number of bits stored in the state vector.
|
||||
prCR
|
||||
prLF
|
||||
prControl
|
||||
prExtend
|
||||
prRegionalIndicator
|
||||
prSpacingMark
|
||||
prL
|
||||
prV
|
||||
prT
|
||||
prLV
|
||||
prLVT
|
||||
prZWJ
|
||||
prExtendedPictographic
|
||||
prNewline
|
||||
prWSegSpace
|
||||
prDoubleQuote
|
||||
prSingleQuote
|
||||
prMidNumLet
|
||||
prNumeric
|
||||
prMidLetter
|
||||
prMidNum
|
||||
prExtendNumLet
|
||||
prALetter
|
||||
prFormat
|
||||
prHebrewLetter
|
||||
prKatakana
|
||||
prSp
|
||||
prSTerm
|
||||
prClose
|
||||
prSContinue
|
||||
prATerm
|
||||
prUpper
|
||||
prLower
|
||||
prSep
|
||||
prOLetter
|
||||
prCM
|
||||
prBA
|
||||
prBK
|
||||
prSP
|
||||
prEX
|
||||
prQU
|
||||
prAL
|
||||
prPR
|
||||
prPO
|
||||
prOP
|
||||
prCP
|
||||
prIS
|
||||
prHY
|
||||
prSY
|
||||
prNU
|
||||
prCL
|
||||
prNL
|
||||
prGL
|
||||
prAI
|
||||
prBB
|
||||
prHL
|
||||
prSA
|
||||
prJL
|
||||
prJV
|
||||
prJT
|
||||
prNS
|
||||
prZW
|
||||
prB2
|
||||
prIN
|
||||
prWJ
|
||||
prID
|
||||
prEB
|
||||
prCJ
|
||||
prH2
|
||||
prH3
|
||||
prSG
|
||||
prCB
|
||||
prRI
|
||||
prEM
|
||||
prN
|
||||
prNa
|
||||
prA
|
||||
prW
|
||||
prH
|
||||
prF
|
||||
prEmojiPresentation
|
||||
)
|
||||
|
||||
// Unicode General Categories. Only the ones needed in the context of this
|
||||
// package are included.
|
||||
const (
|
||||
gcNone = iota // gcNone must be 0.
|
||||
gcCc
|
||||
gcZs
|
||||
gcPo
|
||||
gcSc
|
||||
gcPs
|
||||
gcPe
|
||||
gcSm
|
||||
gcPd
|
||||
gcNd
|
||||
gcLu
|
||||
gcSk
|
||||
gcPc
|
||||
gcLl
|
||||
gcSo
|
||||
gcLo
|
||||
gcPi
|
||||
gcCf
|
||||
gcNo
|
||||
gcPf
|
||||
gcLC
|
||||
gcLm
|
||||
gcMn
|
||||
gcMe
|
||||
gcMc
|
||||
gcNl
|
||||
gcZl
|
||||
gcZp
|
||||
gcCn
|
||||
gcCs
|
||||
gcCo
|
||||
)
|
||||
|
||||
// Special code points.
|
||||
const (
|
||||
vs15 = 0xfe0e // Variation Selector-15 (text presentation)
|
||||
vs16 = 0xfe0f // Variation Selector-16 (emoji presentation)
|
||||
)
|
||||
|
||||
// propertySearch performs a binary search on a property slice and returns the
|
||||
// entry whose range (start = first array element, end = second array element)
|
||||
// includes r, or an array of 0's if no such entry was found.
|
||||
func propertySearch[E interface{ [3]int | [4]int }](dictionary []E, r rune) (result E) {
|
||||
// Run a binary search.
|
||||
from := 0
|
||||
to := len(dictionary)
|
||||
for to > from {
|
||||
middle := (from + to) / 2
|
||||
cpRange := dictionary[middle]
|
||||
if int(r) < cpRange[0] {
|
||||
to = middle
|
||||
continue
|
||||
}
|
||||
if int(r) > cpRange[1] {
|
||||
from = middle + 1
|
||||
continue
|
||||
}
|
||||
return cpRange
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// property returns the Unicode property value (see constants above) of the
|
||||
// given code point.
|
||||
func property(dictionary [][3]int, r rune) int {
|
||||
return propertySearch(dictionary, r)[2]
|
||||
}
|
||||
|
||||
// propertyLineBreak returns the Unicode property value and General Category
|
||||
// (see constants above) of the given code point, as listed in the line break
|
||||
// code points table, while fast tracking ASCII digits and letters.
|
||||
func propertyLineBreak(r rune) (property, generalCategory int) {
|
||||
if r >= 'a' && r <= 'z' {
|
||||
return prAL, gcLl
|
||||
}
|
||||
if r >= 'A' && r <= 'Z' {
|
||||
return prAL, gcLu
|
||||
}
|
||||
if r >= '0' && r <= '9' {
|
||||
return prNU, gcNd
|
||||
}
|
||||
entry := propertySearch(lineBreakCodePoints, r)
|
||||
return entry[2], entry[3]
|
||||
}
|
||||
|
||||
// propertyGraphemes returns the Unicode grapheme cluster property value of the
|
||||
// given code point while fast tracking ASCII characters.
|
||||
func propertyGraphemes(r rune) int {
|
||||
if r >= 0x20 && r <= 0x7e {
|
||||
return prAny
|
||||
}
|
||||
if r == 0x0a {
|
||||
return prLF
|
||||
}
|
||||
if r == 0x0d {
|
||||
return prCR
|
||||
}
|
||||
if r >= 0 && r <= 0x1f || r == 0x7f {
|
||||
return prControl
|
||||
}
|
||||
return property(graphemeCodePoints, r)
|
||||
}
|
||||
|
||||
// propertyEastAsianWidth returns the Unicode East Asian Width property value of
|
||||
// the given code point while fast tracking ASCII characters.
|
||||
func propertyEastAsianWidth(r rune) int {
|
||||
if r >= 0x20 && r <= 0x7e {
|
||||
return prNa
|
||||
}
|
||||
if r >= 0 && r <= 0x1f || r == 0x7f {
|
||||
return prN
|
||||
}
|
||||
return property(eastAsianWidth, r)
|
||||
}
|
||||
90
vendor/github.com/rivo/uniseg/sentence.go
generated
vendored
90
vendor/github.com/rivo/uniseg/sentence.go
generated
vendored
@@ -1,90 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// FirstSentence returns the first sentence found in the given byte slice
|
||||
// according to the rules of [Unicode Standard Annex #29, Sentence Boundaries].
|
||||
// This function can be called continuously to extract all sentences from a byte
|
||||
// slice, as illustrated in the example below.
|
||||
//
|
||||
// If you don't know the current state, for example when calling the function
|
||||
// for the first time, you must pass -1. For consecutive calls, pass the state
|
||||
// and rest slice returned by the previous call.
|
||||
//
|
||||
// The "rest" slice is the sub-slice of the original byte slice "b" starting
|
||||
// after the last byte of the identified sentence. If the length of the "rest"
|
||||
// slice is 0, the entire byte slice "b" has been processed. The "sentence" byte
|
||||
// slice is the sub-slice of the input slice containing the identified sentence.
|
||||
//
|
||||
// Given an empty byte slice "b", the function returns nil values.
|
||||
//
|
||||
// [Unicode Standard Annex #29, Sentence Boundaries]: http://unicode.org/reports/tr29/#Sentence_Boundaries
|
||||
func FirstSentence(b []byte, state int) (sentence, rest []byte, newState int) {
|
||||
// An empty byte slice returns nothing.
|
||||
if len(b) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRune(b)
|
||||
if len(b) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
return b, nil, sbAny
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
if state < 0 {
|
||||
state, _ = transitionSentenceBreakState(state, r, b[length:], "")
|
||||
}
|
||||
|
||||
// Transition until we find a boundary.
|
||||
var boundary bool
|
||||
for {
|
||||
r, l := utf8.DecodeRune(b[length:])
|
||||
state, boundary = transitionSentenceBreakState(state, r, b[length+l:], "")
|
||||
|
||||
if boundary {
|
||||
return b[:length], b[length:], state
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(b) <= length {
|
||||
return b, nil, sbAny
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FirstSentenceInString is like [FirstSentence] but its input and outputs are
|
||||
// strings.
|
||||
func FirstSentenceInString(str string, state int) (sentence, rest string, newState int) {
|
||||
// An empty byte slice returns nothing.
|
||||
if len(str) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRuneInString(str)
|
||||
if len(str) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
return str, "", sbAny
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
if state < 0 {
|
||||
state, _ = transitionSentenceBreakState(state, r, nil, str[length:])
|
||||
}
|
||||
|
||||
// Transition until we find a boundary.
|
||||
var boundary bool
|
||||
for {
|
||||
r, l := utf8.DecodeRuneInString(str[length:])
|
||||
state, boundary = transitionSentenceBreakState(state, r, nil, str[length+l:])
|
||||
|
||||
if boundary {
|
||||
return str[:length], str[length:], state
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(str) <= length {
|
||||
return str, "", sbAny
|
||||
}
|
||||
}
|
||||
}
|
||||
2845
vendor/github.com/rivo/uniseg/sentenceproperties.go
generated
vendored
2845
vendor/github.com/rivo/uniseg/sentenceproperties.go
generated
vendored
File diff suppressed because it is too large
Load Diff
276
vendor/github.com/rivo/uniseg/sentencerules.go
generated
vendored
276
vendor/github.com/rivo/uniseg/sentencerules.go
generated
vendored
@@ -1,276 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// The states of the sentence break parser.
|
||||
const (
|
||||
sbAny = iota
|
||||
sbCR
|
||||
sbParaSep
|
||||
sbATerm
|
||||
sbUpper
|
||||
sbLower
|
||||
sbSB7
|
||||
sbSB8Close
|
||||
sbSB8Sp
|
||||
sbSTerm
|
||||
sbSB8aClose
|
||||
sbSB8aSp
|
||||
)
|
||||
|
||||
// sbTransitions implements the sentence break parser's state transitions. It's
|
||||
// anologous to [grTransitions], see comments there for details.
|
||||
//
|
||||
// Unicode version 15.0.0.
|
||||
func sbTransitions(state, prop int) (newState int, sentenceBreak bool, rule int) {
|
||||
switch uint64(state) | uint64(prop)<<32 {
|
||||
// SB3.
|
||||
case sbAny | prCR<<32:
|
||||
return sbCR, false, 9990
|
||||
case sbCR | prLF<<32:
|
||||
return sbParaSep, false, 30
|
||||
|
||||
// SB4.
|
||||
case sbAny | prSep<<32:
|
||||
return sbParaSep, false, 9990
|
||||
case sbAny | prLF<<32:
|
||||
return sbParaSep, false, 9990
|
||||
case sbParaSep | prAny<<32:
|
||||
return sbAny, true, 40
|
||||
case sbCR | prAny<<32:
|
||||
return sbAny, true, 40
|
||||
|
||||
// SB6.
|
||||
case sbAny | prATerm<<32:
|
||||
return sbATerm, false, 9990
|
||||
case sbATerm | prNumeric<<32:
|
||||
return sbAny, false, 60
|
||||
case sbSB7 | prNumeric<<32:
|
||||
return sbAny, false, 60 // Because ATerm also appears in SB7.
|
||||
|
||||
// SB7.
|
||||
case sbAny | prUpper<<32:
|
||||
return sbUpper, false, 9990
|
||||
case sbAny | prLower<<32:
|
||||
return sbLower, false, 9990
|
||||
case sbUpper | prATerm<<32:
|
||||
return sbSB7, false, 70
|
||||
case sbLower | prATerm<<32:
|
||||
return sbSB7, false, 70
|
||||
case sbSB7 | prUpper<<32:
|
||||
return sbUpper, false, 70
|
||||
|
||||
// SB8a.
|
||||
case sbAny | prSTerm<<32:
|
||||
return sbSTerm, false, 9990
|
||||
case sbATerm | prSContinue<<32:
|
||||
return sbAny, false, 81
|
||||
case sbATerm | prATerm<<32:
|
||||
return sbATerm, false, 81
|
||||
case sbATerm | prSTerm<<32:
|
||||
return sbSTerm, false, 81
|
||||
case sbSB7 | prSContinue<<32:
|
||||
return sbAny, false, 81
|
||||
case sbSB7 | prATerm<<32:
|
||||
return sbATerm, false, 81
|
||||
case sbSB7 | prSTerm<<32:
|
||||
return sbSTerm, false, 81
|
||||
case sbSB8Close | prSContinue<<32:
|
||||
return sbAny, false, 81
|
||||
case sbSB8Close | prATerm<<32:
|
||||
return sbATerm, false, 81
|
||||
case sbSB8Close | prSTerm<<32:
|
||||
return sbSTerm, false, 81
|
||||
case sbSB8Sp | prSContinue<<32:
|
||||
return sbAny, false, 81
|
||||
case sbSB8Sp | prATerm<<32:
|
||||
return sbATerm, false, 81
|
||||
case sbSB8Sp | prSTerm<<32:
|
||||
return sbSTerm, false, 81
|
||||
case sbSTerm | prSContinue<<32:
|
||||
return sbAny, false, 81
|
||||
case sbSTerm | prATerm<<32:
|
||||
return sbATerm, false, 81
|
||||
case sbSTerm | prSTerm<<32:
|
||||
return sbSTerm, false, 81
|
||||
case sbSB8aClose | prSContinue<<32:
|
||||
return sbAny, false, 81
|
||||
case sbSB8aClose | prATerm<<32:
|
||||
return sbATerm, false, 81
|
||||
case sbSB8aClose | prSTerm<<32:
|
||||
return sbSTerm, false, 81
|
||||
case sbSB8aSp | prSContinue<<32:
|
||||
return sbAny, false, 81
|
||||
case sbSB8aSp | prATerm<<32:
|
||||
return sbATerm, false, 81
|
||||
case sbSB8aSp | prSTerm<<32:
|
||||
return sbSTerm, false, 81
|
||||
|
||||
// SB9.
|
||||
case sbATerm | prClose<<32:
|
||||
return sbSB8Close, false, 90
|
||||
case sbSB7 | prClose<<32:
|
||||
return sbSB8Close, false, 90
|
||||
case sbSB8Close | prClose<<32:
|
||||
return sbSB8Close, false, 90
|
||||
case sbATerm | prSp<<32:
|
||||
return sbSB8Sp, false, 90
|
||||
case sbSB7 | prSp<<32:
|
||||
return sbSB8Sp, false, 90
|
||||
case sbSB8Close | prSp<<32:
|
||||
return sbSB8Sp, false, 90
|
||||
case sbSTerm | prClose<<32:
|
||||
return sbSB8aClose, false, 90
|
||||
case sbSB8aClose | prClose<<32:
|
||||
return sbSB8aClose, false, 90
|
||||
case sbSTerm | prSp<<32:
|
||||
return sbSB8aSp, false, 90
|
||||
case sbSB8aClose | prSp<<32:
|
||||
return sbSB8aSp, false, 90
|
||||
case sbATerm | prSep<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbATerm | prCR<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbATerm | prLF<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSB7 | prSep<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSB7 | prCR<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSB7 | prLF<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSB8Close | prSep<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSB8Close | prCR<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSB8Close | prLF<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSTerm | prSep<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSTerm | prCR<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSTerm | prLF<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSB8aClose | prSep<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSB8aClose | prCR<<32:
|
||||
return sbParaSep, false, 90
|
||||
case sbSB8aClose | prLF<<32:
|
||||
return sbParaSep, false, 90
|
||||
|
||||
// SB10.
|
||||
case sbSB8Sp | prSp<<32:
|
||||
return sbSB8Sp, false, 100
|
||||
case sbSB8aSp | prSp<<32:
|
||||
return sbSB8aSp, false, 100
|
||||
case sbSB8Sp | prSep<<32:
|
||||
return sbParaSep, false, 100
|
||||
case sbSB8Sp | prCR<<32:
|
||||
return sbParaSep, false, 100
|
||||
case sbSB8Sp | prLF<<32:
|
||||
return sbParaSep, false, 100
|
||||
|
||||
// SB11.
|
||||
case sbATerm | prAny<<32:
|
||||
return sbAny, true, 110
|
||||
case sbSB7 | prAny<<32:
|
||||
return sbAny, true, 110
|
||||
case sbSB8Close | prAny<<32:
|
||||
return sbAny, true, 110
|
||||
case sbSB8Sp | prAny<<32:
|
||||
return sbAny, true, 110
|
||||
case sbSTerm | prAny<<32:
|
||||
return sbAny, true, 110
|
||||
case sbSB8aClose | prAny<<32:
|
||||
return sbAny, true, 110
|
||||
case sbSB8aSp | prAny<<32:
|
||||
return sbAny, true, 110
|
||||
// We'll always break after ParaSep due to SB4.
|
||||
|
||||
default:
|
||||
return -1, false, -1
|
||||
}
|
||||
}
|
||||
|
||||
// transitionSentenceBreakState determines the new state of the sentence break
|
||||
// parser given the current state and the next code point. It also returns
|
||||
// whether a sentence boundary was detected. If more than one code point is
|
||||
// needed to determine the new state, the byte slice or the string starting
|
||||
// after rune "r" can be used (whichever is not nil or empty) for further
|
||||
// lookups.
|
||||
func transitionSentenceBreakState(state int, r rune, b []byte, str string) (newState int, sentenceBreak bool) {
|
||||
// Determine the property of the next character.
|
||||
nextProperty := property(sentenceBreakCodePoints, r)
|
||||
|
||||
// SB5 (Replacing Ignore Rules).
|
||||
if nextProperty == prExtend || nextProperty == prFormat {
|
||||
if state == sbParaSep || state == sbCR {
|
||||
return sbAny, true // Make sure we don't apply SB5 to SB3 or SB4.
|
||||
}
|
||||
if state < 0 {
|
||||
return sbAny, true // SB1.
|
||||
}
|
||||
return state, false
|
||||
}
|
||||
|
||||
// Find the applicable transition in the table.
|
||||
var rule int
|
||||
newState, sentenceBreak, rule = sbTransitions(state, nextProperty)
|
||||
if newState < 0 {
|
||||
// No specific transition found. Try the less specific ones.
|
||||
anyPropState, anyPropProp, anyPropRule := sbTransitions(state, prAny)
|
||||
anyStateState, anyStateProp, anyStateRule := sbTransitions(sbAny, nextProperty)
|
||||
if anyPropState >= 0 && anyStateState >= 0 {
|
||||
// Both apply. We'll use a mix (see comments for grTransitions).
|
||||
newState, sentenceBreak, rule = anyStateState, anyStateProp, anyStateRule
|
||||
if anyPropRule < anyStateRule {
|
||||
sentenceBreak, rule = anyPropProp, anyPropRule
|
||||
}
|
||||
} else if anyPropState >= 0 {
|
||||
// We only have a specific state.
|
||||
newState, sentenceBreak, rule = anyPropState, anyPropProp, anyPropRule
|
||||
// This branch will probably never be reached because okAnyState will
|
||||
// always be true given the current transition map. But we keep it here
|
||||
// for future modifications to the transition map where this may not be
|
||||
// true anymore.
|
||||
} else if anyStateState >= 0 {
|
||||
// We only have a specific property.
|
||||
newState, sentenceBreak, rule = anyStateState, anyStateProp, anyStateRule
|
||||
} else {
|
||||
// No known transition. SB999: Any × Any.
|
||||
newState, sentenceBreak, rule = sbAny, false, 9990
|
||||
}
|
||||
}
|
||||
|
||||
// SB8.
|
||||
if rule > 80 && (state == sbATerm || state == sbSB8Close || state == sbSB8Sp || state == sbSB7) {
|
||||
// Check the right side of the rule.
|
||||
var length int
|
||||
for nextProperty != prOLetter &&
|
||||
nextProperty != prUpper &&
|
||||
nextProperty != prLower &&
|
||||
nextProperty != prSep &&
|
||||
nextProperty != prCR &&
|
||||
nextProperty != prLF &&
|
||||
nextProperty != prATerm &&
|
||||
nextProperty != prSTerm {
|
||||
// Move on to the next rune.
|
||||
if b != nil { // Byte slice version.
|
||||
r, length = utf8.DecodeRune(b)
|
||||
b = b[length:]
|
||||
} else { // String version.
|
||||
r, length = utf8.DecodeRuneInString(str)
|
||||
str = str[length:]
|
||||
}
|
||||
if r == utf8.RuneError {
|
||||
break
|
||||
}
|
||||
nextProperty = property(sentenceBreakCodePoints, r)
|
||||
}
|
||||
if nextProperty == prLower {
|
||||
return sbLower, false
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
242
vendor/github.com/rivo/uniseg/step.go
generated
vendored
242
vendor/github.com/rivo/uniseg/step.go
generated
vendored
@@ -1,242 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// The bit masks used to extract boundary information returned by [Step].
|
||||
const (
|
||||
MaskLine = 3
|
||||
MaskWord = 4
|
||||
MaskSentence = 8
|
||||
)
|
||||
|
||||
// The number of bits to shift the boundary information returned by [Step] to
|
||||
// obtain the monospace width of the grapheme cluster.
|
||||
const ShiftWidth = 4
|
||||
|
||||
// The bit positions by which boundary flags are shifted by the [Step] function.
|
||||
// These must correspond to the Mask constants.
|
||||
const (
|
||||
shiftWord = 2
|
||||
shiftSentence = 3
|
||||
// shiftwWidth is ShiftWidth above. No mask as these are always the remaining bits.
|
||||
)
|
||||
|
||||
// The bit positions by which states are shifted by the [Step] function. These
|
||||
// values must ensure state values defined for each of the boundary algorithms
|
||||
// don't overlap (and that they all still fit in a single int). These must
|
||||
// correspond to the Mask constants.
|
||||
const (
|
||||
shiftWordState = 4
|
||||
shiftSentenceState = 9
|
||||
shiftLineState = 13
|
||||
shiftPropState = 21 // No mask as these are always the remaining bits.
|
||||
)
|
||||
|
||||
// The bit mask used to extract the state returned by the [Step] function, after
|
||||
// shifting. These values must correspond to the shift constants.
|
||||
const (
|
||||
maskGraphemeState = 0xf
|
||||
maskWordState = 0x1f
|
||||
maskSentenceState = 0xf
|
||||
maskLineState = 0xff
|
||||
)
|
||||
|
||||
// Step returns the first grapheme cluster (user-perceived character) found in
|
||||
// the given byte slice. It also returns information about the boundary between
|
||||
// that grapheme cluster and the one following it as well as the monospace width
|
||||
// of the grapheme cluster. There are three types of boundary information: word
|
||||
// boundaries, sentence boundaries, and line breaks. This function is therefore
|
||||
// a combination of [FirstGraphemeCluster], [FirstWord], [FirstSentence], and
|
||||
// [FirstLineSegment].
|
||||
//
|
||||
// The "boundaries" return value can be evaluated as follows:
|
||||
//
|
||||
// - boundaries&MaskWord != 0: The boundary is a word boundary.
|
||||
// - boundaries&MaskWord == 0: The boundary is not a word boundary.
|
||||
// - boundaries&MaskSentence != 0: The boundary is a sentence boundary.
|
||||
// - boundaries&MaskSentence == 0: The boundary is not a sentence boundary.
|
||||
// - boundaries&MaskLine == LineDontBreak: You must not break the line at the
|
||||
// boundary.
|
||||
// - boundaries&MaskLine == LineMustBreak: You must break the line at the
|
||||
// boundary.
|
||||
// - boundaries&MaskLine == LineCanBreak: You may or may not break the line at
|
||||
// the boundary.
|
||||
// - boundaries >> ShiftWidth: The width of the grapheme cluster for most
|
||||
// monospace fonts where a value of 1 represents one character cell.
|
||||
//
|
||||
// This function can be called continuously to extract all grapheme clusters
|
||||
// from a byte slice, as illustrated in the examples below.
|
||||
//
|
||||
// If you don't know which state to pass, for example when calling the function
|
||||
// for the first time, you must pass -1. For consecutive calls, pass the state
|
||||
// and rest slice returned by the previous call.
|
||||
//
|
||||
// The "rest" slice is the sub-slice of the original byte slice "b" starting
|
||||
// after the last byte of the identified grapheme cluster. If the length of the
|
||||
// "rest" slice is 0, the entire byte slice "b" has been processed. The
|
||||
// "cluster" byte slice is the sub-slice of the input slice containing the
|
||||
// first identified grapheme cluster.
|
||||
//
|
||||
// Given an empty byte slice "b", the function returns nil values.
|
||||
//
|
||||
// While slightly less convenient than using the Graphemes class, this function
|
||||
// has much better performance and makes no allocations. It lends itself well to
|
||||
// large byte slices.
|
||||
//
|
||||
// Note that in accordance with [UAX #14 LB3], the final segment will end with
|
||||
// a mandatory line break (boundaries&MaskLine == LineMustBreak). You can choose
|
||||
// to ignore this by checking if the length of the "rest" slice is 0 and calling
|
||||
// [HasTrailingLineBreak] or [HasTrailingLineBreakInString] on the last rune.
|
||||
//
|
||||
// [UAX #14 LB3]: https://www.unicode.org/reports/tr14/#Algorithm
|
||||
func Step(b []byte, state int) (cluster, rest []byte, boundaries int, newState int) {
|
||||
// An empty byte slice returns nothing.
|
||||
if len(b) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRune(b)
|
||||
if len(b) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
var prop int
|
||||
if state < 0 {
|
||||
prop = propertyGraphemes(r)
|
||||
} else {
|
||||
prop = state >> shiftPropState
|
||||
}
|
||||
return b, nil, LineMustBreak | (1 << shiftWord) | (1 << shiftSentence) | (runeWidth(r, prop) << ShiftWidth), grAny | (wbAny << shiftWordState) | (sbAny << shiftSentenceState) | (lbAny << shiftLineState) | (prop << shiftPropState)
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
var graphemeState, wordState, sentenceState, lineState, firstProp int
|
||||
remainder := b[length:]
|
||||
if state < 0 {
|
||||
graphemeState, firstProp, _ = transitionGraphemeState(state, r)
|
||||
wordState, _ = transitionWordBreakState(state, r, remainder, "")
|
||||
sentenceState, _ = transitionSentenceBreakState(state, r, remainder, "")
|
||||
lineState, _ = transitionLineBreakState(state, r, remainder, "")
|
||||
} else {
|
||||
graphemeState = state & maskGraphemeState
|
||||
wordState = (state >> shiftWordState) & maskWordState
|
||||
sentenceState = (state >> shiftSentenceState) & maskSentenceState
|
||||
lineState = (state >> shiftLineState) & maskLineState
|
||||
firstProp = state >> shiftPropState
|
||||
}
|
||||
|
||||
// Transition until we find a grapheme cluster boundary.
|
||||
width := runeWidth(r, firstProp)
|
||||
for {
|
||||
var (
|
||||
graphemeBoundary, wordBoundary, sentenceBoundary bool
|
||||
lineBreak, prop int
|
||||
)
|
||||
|
||||
r, l := utf8.DecodeRune(remainder)
|
||||
remainder = b[length+l:]
|
||||
|
||||
graphemeState, prop, graphemeBoundary = transitionGraphemeState(graphemeState, r)
|
||||
wordState, wordBoundary = transitionWordBreakState(wordState, r, remainder, "")
|
||||
sentenceState, sentenceBoundary = transitionSentenceBreakState(sentenceState, r, remainder, "")
|
||||
lineState, lineBreak = transitionLineBreakState(lineState, r, remainder, "")
|
||||
|
||||
if graphemeBoundary {
|
||||
boundary := lineBreak | (width << ShiftWidth)
|
||||
if wordBoundary {
|
||||
boundary |= 1 << shiftWord
|
||||
}
|
||||
if sentenceBoundary {
|
||||
boundary |= 1 << shiftSentence
|
||||
}
|
||||
return b[:length], b[length:], boundary, graphemeState | (wordState << shiftWordState) | (sentenceState << shiftSentenceState) | (lineState << shiftLineState) | (prop << shiftPropState)
|
||||
}
|
||||
|
||||
if firstProp == prExtendedPictographic {
|
||||
if r == vs15 {
|
||||
width = 1
|
||||
} else if r == vs16 {
|
||||
width = 2
|
||||
}
|
||||
} else if firstProp != prRegionalIndicator && firstProp != prL {
|
||||
width += runeWidth(r, prop)
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(b) <= length {
|
||||
return b, nil, LineMustBreak | (1 << shiftWord) | (1 << shiftSentence) | (width << ShiftWidth), grAny | (wbAny << shiftWordState) | (sbAny << shiftSentenceState) | (lbAny << shiftLineState) | (prop << shiftPropState)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// StepString is like [Step] but its input and outputs are strings.
|
||||
func StepString(str string, state int) (cluster, rest string, boundaries int, newState int) {
|
||||
// An empty byte slice returns nothing.
|
||||
if len(str) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRuneInString(str)
|
||||
if len(str) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
prop := propertyGraphemes(r)
|
||||
return str, "", LineMustBreak | (1 << shiftWord) | (1 << shiftSentence) | (runeWidth(r, prop) << ShiftWidth), grAny | (wbAny << shiftWordState) | (sbAny << shiftSentenceState) | (lbAny << shiftLineState)
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
var graphemeState, wordState, sentenceState, lineState, firstProp int
|
||||
remainder := str[length:]
|
||||
if state < 0 {
|
||||
graphemeState, firstProp, _ = transitionGraphemeState(state, r)
|
||||
wordState, _ = transitionWordBreakState(state, r, nil, remainder)
|
||||
sentenceState, _ = transitionSentenceBreakState(state, r, nil, remainder)
|
||||
lineState, _ = transitionLineBreakState(state, r, nil, remainder)
|
||||
} else {
|
||||
graphemeState = state & maskGraphemeState
|
||||
wordState = (state >> shiftWordState) & maskWordState
|
||||
sentenceState = (state >> shiftSentenceState) & maskSentenceState
|
||||
lineState = (state >> shiftLineState) & maskLineState
|
||||
firstProp = state >> shiftPropState
|
||||
}
|
||||
|
||||
// Transition until we find a grapheme cluster boundary.
|
||||
width := runeWidth(r, firstProp)
|
||||
for {
|
||||
var (
|
||||
graphemeBoundary, wordBoundary, sentenceBoundary bool
|
||||
lineBreak, prop int
|
||||
)
|
||||
|
||||
r, l := utf8.DecodeRuneInString(remainder)
|
||||
remainder = str[length+l:]
|
||||
|
||||
graphemeState, prop, graphemeBoundary = transitionGraphemeState(graphemeState, r)
|
||||
wordState, wordBoundary = transitionWordBreakState(wordState, r, nil, remainder)
|
||||
sentenceState, sentenceBoundary = transitionSentenceBreakState(sentenceState, r, nil, remainder)
|
||||
lineState, lineBreak = transitionLineBreakState(lineState, r, nil, remainder)
|
||||
|
||||
if graphemeBoundary {
|
||||
boundary := lineBreak | (width << ShiftWidth)
|
||||
if wordBoundary {
|
||||
boundary |= 1 << shiftWord
|
||||
}
|
||||
if sentenceBoundary {
|
||||
boundary |= 1 << shiftSentence
|
||||
}
|
||||
return str[:length], str[length:], boundary, graphemeState | (wordState << shiftWordState) | (sentenceState << shiftSentenceState) | (lineState << shiftLineState) | (prop << shiftPropState)
|
||||
}
|
||||
|
||||
if firstProp == prExtendedPictographic {
|
||||
if r == vs15 {
|
||||
width = 1
|
||||
} else if r == vs16 {
|
||||
width = 2
|
||||
}
|
||||
} else if firstProp != prRegionalIndicator && firstProp != prL {
|
||||
width += runeWidth(r, prop)
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(str) <= length {
|
||||
return str, "", LineMustBreak | (1 << shiftWord) | (1 << shiftSentence) | (width << ShiftWidth), grAny | (wbAny << shiftWordState) | (sbAny << shiftSentenceState) | (lbAny << shiftLineState) | (prop << shiftPropState)
|
||||
}
|
||||
}
|
||||
}
|
||||
61
vendor/github.com/rivo/uniseg/width.go
generated
vendored
61
vendor/github.com/rivo/uniseg/width.go
generated
vendored
@@ -1,61 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
// EastAsianAmbiguousWidth specifies the monospace width for East Asian
|
||||
// characters classified as Ambiguous. The default is 1 but some rare fonts
|
||||
// render them with a width of 2.
|
||||
var EastAsianAmbiguousWidth = 1
|
||||
|
||||
// runeWidth returns the monospace width for the given rune. The provided
|
||||
// grapheme property is a value mapped by the [graphemeCodePoints] table.
|
||||
//
|
||||
// Every rune has a width of 1, except for runes with the following properties
|
||||
// (evaluated in this order):
|
||||
//
|
||||
// - Control, CR, LF, Extend, ZWJ: Width of 0
|
||||
// - \u2e3a, TWO-EM DASH: Width of 3
|
||||
// - \u2e3b, THREE-EM DASH: Width of 4
|
||||
// - East-Asian width Fullwidth and Wide: Width of 2 (Ambiguous and Neutral
|
||||
// have a width of 1)
|
||||
// - Regional Indicator: Width of 2
|
||||
// - Extended Pictographic: Width of 2, unless Emoji Presentation is "No".
|
||||
func runeWidth(r rune, graphemeProperty int) int {
|
||||
switch graphemeProperty {
|
||||
case prControl, prCR, prLF, prExtend, prZWJ:
|
||||
return 0
|
||||
case prRegionalIndicator:
|
||||
return 2
|
||||
case prExtendedPictographic:
|
||||
if property(emojiPresentation, r) == prEmojiPresentation {
|
||||
return 2
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
switch r {
|
||||
case 0x2e3a:
|
||||
return 3
|
||||
case 0x2e3b:
|
||||
return 4
|
||||
}
|
||||
|
||||
switch propertyEastAsianWidth(r) {
|
||||
case prW, prF:
|
||||
return 2
|
||||
case prA:
|
||||
return EastAsianAmbiguousWidth
|
||||
}
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
// StringWidth returns the monospace width for the given string, that is, the
|
||||
// number of same-size cells to be occupied by the string.
|
||||
func StringWidth(s string) (width int) {
|
||||
state := -1
|
||||
for len(s) > 0 {
|
||||
var w int
|
||||
_, s, w, state = FirstGraphemeClusterInString(s, state)
|
||||
width += w
|
||||
}
|
||||
return
|
||||
}
|
||||
89
vendor/github.com/rivo/uniseg/word.go
generated
vendored
89
vendor/github.com/rivo/uniseg/word.go
generated
vendored
@@ -1,89 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// FirstWord returns the first word found in the given byte slice according to
|
||||
// the rules of [Unicode Standard Annex #29, Word Boundaries]. This function can
|
||||
// be called continuously to extract all words from a byte slice, as illustrated
|
||||
// in the example below.
|
||||
//
|
||||
// If you don't know the current state, for example when calling the function
|
||||
// for the first time, you must pass -1. For consecutive calls, pass the state
|
||||
// and rest slice returned by the previous call.
|
||||
//
|
||||
// The "rest" slice is the sub-slice of the original byte slice "b" starting
|
||||
// after the last byte of the identified word. If the length of the "rest" slice
|
||||
// is 0, the entire byte slice "b" has been processed. The "word" byte slice is
|
||||
// the sub-slice of the input slice containing the identified word.
|
||||
//
|
||||
// Given an empty byte slice "b", the function returns nil values.
|
||||
//
|
||||
// [Unicode Standard Annex #29, Word Boundaries]: http://unicode.org/reports/tr29/#Word_Boundaries
|
||||
func FirstWord(b []byte, state int) (word, rest []byte, newState int) {
|
||||
// An empty byte slice returns nothing.
|
||||
if len(b) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRune(b)
|
||||
if len(b) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
return b, nil, wbAny
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
if state < 0 {
|
||||
state, _ = transitionWordBreakState(state, r, b[length:], "")
|
||||
}
|
||||
|
||||
// Transition until we find a boundary.
|
||||
var boundary bool
|
||||
for {
|
||||
r, l := utf8.DecodeRune(b[length:])
|
||||
state, boundary = transitionWordBreakState(state, r, b[length+l:], "")
|
||||
|
||||
if boundary {
|
||||
return b[:length], b[length:], state
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(b) <= length {
|
||||
return b, nil, wbAny
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FirstWordInString is like [FirstWord] but its input and outputs are strings.
|
||||
func FirstWordInString(str string, state int) (word, rest string, newState int) {
|
||||
// An empty byte slice returns nothing.
|
||||
if len(str) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract the first rune.
|
||||
r, length := utf8.DecodeRuneInString(str)
|
||||
if len(str) <= length { // If we're already past the end, there is nothing else to parse.
|
||||
return str, "", wbAny
|
||||
}
|
||||
|
||||
// If we don't know the state, determine it now.
|
||||
if state < 0 {
|
||||
state, _ = transitionWordBreakState(state, r, nil, str[length:])
|
||||
}
|
||||
|
||||
// Transition until we find a boundary.
|
||||
var boundary bool
|
||||
for {
|
||||
r, l := utf8.DecodeRuneInString(str[length:])
|
||||
state, boundary = transitionWordBreakState(state, r, nil, str[length+l:])
|
||||
|
||||
if boundary {
|
||||
return str[:length], str[length:], state
|
||||
}
|
||||
|
||||
length += l
|
||||
if len(str) <= length {
|
||||
return str, "", wbAny
|
||||
}
|
||||
}
|
||||
}
|
||||
1883
vendor/github.com/rivo/uniseg/wordproperties.go
generated
vendored
1883
vendor/github.com/rivo/uniseg/wordproperties.go
generated
vendored
File diff suppressed because it is too large
Load Diff
282
vendor/github.com/rivo/uniseg/wordrules.go
generated
vendored
282
vendor/github.com/rivo/uniseg/wordrules.go
generated
vendored
@@ -1,282 +0,0 @@
|
||||
package uniseg
|
||||
|
||||
import "unicode/utf8"
|
||||
|
||||
// The states of the word break parser.
|
||||
const (
|
||||
wbAny = iota
|
||||
wbCR
|
||||
wbLF
|
||||
wbNewline
|
||||
wbWSegSpace
|
||||
wbHebrewLetter
|
||||
wbALetter
|
||||
wbWB7
|
||||
wbWB7c
|
||||
wbNumeric
|
||||
wbWB11
|
||||
wbKatakana
|
||||
wbExtendNumLet
|
||||
wbOddRI
|
||||
wbEvenRI
|
||||
wbZWJBit = 16 // This bit is set for any states followed by at least one zero-width joiner (see WB4 and WB3c).
|
||||
)
|
||||
|
||||
// wbTransitions implements the word break parser's state transitions. It's
|
||||
// anologous to [grTransitions], see comments there for details.
|
||||
//
|
||||
// Unicode version 15.0.0.
|
||||
func wbTransitions(state, prop int) (newState int, wordBreak bool, rule int) {
|
||||
switch uint64(state) | uint64(prop)<<32 {
|
||||
// WB3b.
|
||||
case wbAny | prNewline<<32:
|
||||
return wbNewline, true, 32
|
||||
case wbAny | prCR<<32:
|
||||
return wbCR, true, 32
|
||||
case wbAny | prLF<<32:
|
||||
return wbLF, true, 32
|
||||
|
||||
// WB3a.
|
||||
case wbNewline | prAny<<32:
|
||||
return wbAny, true, 31
|
||||
case wbCR | prAny<<32:
|
||||
return wbAny, true, 31
|
||||
case wbLF | prAny<<32:
|
||||
return wbAny, true, 31
|
||||
|
||||
// WB3.
|
||||
case wbCR | prLF<<32:
|
||||
return wbLF, false, 30
|
||||
|
||||
// WB3d.
|
||||
case wbAny | prWSegSpace<<32:
|
||||
return wbWSegSpace, true, 9990
|
||||
case wbWSegSpace | prWSegSpace<<32:
|
||||
return wbWSegSpace, false, 34
|
||||
|
||||
// WB5.
|
||||
case wbAny | prALetter<<32:
|
||||
return wbALetter, true, 9990
|
||||
case wbAny | prHebrewLetter<<32:
|
||||
return wbHebrewLetter, true, 9990
|
||||
case wbALetter | prALetter<<32:
|
||||
return wbALetter, false, 50
|
||||
case wbALetter | prHebrewLetter<<32:
|
||||
return wbHebrewLetter, false, 50
|
||||
case wbHebrewLetter | prALetter<<32:
|
||||
return wbALetter, false, 50
|
||||
case wbHebrewLetter | prHebrewLetter<<32:
|
||||
return wbHebrewLetter, false, 50
|
||||
|
||||
// WB7. Transitions to wbWB7 handled by transitionWordBreakState().
|
||||
case wbWB7 | prALetter<<32:
|
||||
return wbALetter, false, 70
|
||||
case wbWB7 | prHebrewLetter<<32:
|
||||
return wbHebrewLetter, false, 70
|
||||
|
||||
// WB7a.
|
||||
case wbHebrewLetter | prSingleQuote<<32:
|
||||
return wbAny, false, 71
|
||||
|
||||
// WB7c. Transitions to wbWB7c handled by transitionWordBreakState().
|
||||
case wbWB7c | prHebrewLetter<<32:
|
||||
return wbHebrewLetter, false, 73
|
||||
|
||||
// WB8.
|
||||
case wbAny | prNumeric<<32:
|
||||
return wbNumeric, true, 9990
|
||||
case wbNumeric | prNumeric<<32:
|
||||
return wbNumeric, false, 80
|
||||
|
||||
// WB9.
|
||||
case wbALetter | prNumeric<<32:
|
||||
return wbNumeric, false, 90
|
||||
case wbHebrewLetter | prNumeric<<32:
|
||||
return wbNumeric, false, 90
|
||||
|
||||
// WB10.
|
||||
case wbNumeric | prALetter<<32:
|
||||
return wbALetter, false, 100
|
||||
case wbNumeric | prHebrewLetter<<32:
|
||||
return wbHebrewLetter, false, 100
|
||||
|
||||
// WB11. Transitions to wbWB11 handled by transitionWordBreakState().
|
||||
case wbWB11 | prNumeric<<32:
|
||||
return wbNumeric, false, 110
|
||||
|
||||
// WB13.
|
||||
case wbAny | prKatakana<<32:
|
||||
return wbKatakana, true, 9990
|
||||
case wbKatakana | prKatakana<<32:
|
||||
return wbKatakana, false, 130
|
||||
|
||||
// WB13a.
|
||||
case wbAny | prExtendNumLet<<32:
|
||||
return wbExtendNumLet, true, 9990
|
||||
case wbALetter | prExtendNumLet<<32:
|
||||
return wbExtendNumLet, false, 131
|
||||
case wbHebrewLetter | prExtendNumLet<<32:
|
||||
return wbExtendNumLet, false, 131
|
||||
case wbNumeric | prExtendNumLet<<32:
|
||||
return wbExtendNumLet, false, 131
|
||||
case wbKatakana | prExtendNumLet<<32:
|
||||
return wbExtendNumLet, false, 131
|
||||
case wbExtendNumLet | prExtendNumLet<<32:
|
||||
return wbExtendNumLet, false, 131
|
||||
|
||||
// WB13b.
|
||||
case wbExtendNumLet | prALetter<<32:
|
||||
return wbALetter, false, 132
|
||||
case wbExtendNumLet | prHebrewLetter<<32:
|
||||
return wbHebrewLetter, false, 132
|
||||
case wbExtendNumLet | prNumeric<<32:
|
||||
return wbNumeric, false, 132
|
||||
case wbExtendNumLet | prKatakana<<32:
|
||||
return wbKatakana, false, 132
|
||||
|
||||
default:
|
||||
return -1, false, -1
|
||||
}
|
||||
}
|
||||
|
||||
// transitionWordBreakState determines the new state of the word break parser
|
||||
// given the current state and the next code point. It also returns whether a
|
||||
// word boundary was detected. If more than one code point is needed to
|
||||
// determine the new state, the byte slice or the string starting after rune "r"
|
||||
// can be used (whichever is not nil or empty) for further lookups.
|
||||
func transitionWordBreakState(state int, r rune, b []byte, str string) (newState int, wordBreak bool) {
|
||||
// Determine the property of the next character.
|
||||
nextProperty := property(workBreakCodePoints, r)
|
||||
|
||||
// "Replacing Ignore Rules".
|
||||
if nextProperty == prZWJ {
|
||||
// WB4 (for zero-width joiners).
|
||||
if state == wbNewline || state == wbCR || state == wbLF {
|
||||
return wbAny | wbZWJBit, true // Make sure we don't apply WB4 to WB3a.
|
||||
}
|
||||
if state < 0 {
|
||||
return wbAny | wbZWJBit, false
|
||||
}
|
||||
return state | wbZWJBit, false
|
||||
} else if nextProperty == prExtend || nextProperty == prFormat {
|
||||
// WB4 (for Extend and Format).
|
||||
if state == wbNewline || state == wbCR || state == wbLF {
|
||||
return wbAny, true // Make sure we don't apply WB4 to WB3a.
|
||||
}
|
||||
if state == wbWSegSpace || state == wbAny|wbZWJBit {
|
||||
return wbAny, false // We don't break but this is also not WB3d or WB3c.
|
||||
}
|
||||
if state < 0 {
|
||||
return wbAny, false
|
||||
}
|
||||
return state, false
|
||||
} else if nextProperty == prExtendedPictographic && state >= 0 && state&wbZWJBit != 0 {
|
||||
// WB3c.
|
||||
return wbAny, false
|
||||
}
|
||||
if state >= 0 {
|
||||
state = state &^ wbZWJBit
|
||||
}
|
||||
|
||||
// Find the applicable transition in the table.
|
||||
var rule int
|
||||
newState, wordBreak, rule = wbTransitions(state, nextProperty)
|
||||
if newState < 0 {
|
||||
// No specific transition found. Try the less specific ones.
|
||||
anyPropState, anyPropWordBreak, anyPropRule := wbTransitions(state, prAny)
|
||||
anyStateState, anyStateWordBreak, anyStateRule := wbTransitions(wbAny, nextProperty)
|
||||
if anyPropState >= 0 && anyStateState >= 0 {
|
||||
// Both apply. We'll use a mix (see comments for grTransitions).
|
||||
newState, wordBreak, rule = anyStateState, anyStateWordBreak, anyStateRule
|
||||
if anyPropRule < anyStateRule {
|
||||
wordBreak, rule = anyPropWordBreak, anyPropRule
|
||||
}
|
||||
} else if anyPropState >= 0 {
|
||||
// We only have a specific state.
|
||||
newState, wordBreak, rule = anyPropState, anyPropWordBreak, anyPropRule
|
||||
// This branch will probably never be reached because okAnyState will
|
||||
// always be true given the current transition map. But we keep it here
|
||||
// for future modifications to the transition map where this may not be
|
||||
// true anymore.
|
||||
} else if anyStateState >= 0 {
|
||||
// We only have a specific property.
|
||||
newState, wordBreak, rule = anyStateState, anyStateWordBreak, anyStateRule
|
||||
} else {
|
||||
// No known transition. WB999: Any ÷ Any.
|
||||
newState, wordBreak, rule = wbAny, true, 9990
|
||||
}
|
||||
}
|
||||
|
||||
// For those rules that need to look up runes further in the string, we
|
||||
// determine the property after nextProperty, skipping over Format, Extend,
|
||||
// and ZWJ (according to WB4). It's -1 if not needed, if such a rune cannot
|
||||
// be determined (because the text ends or the rune is faulty).
|
||||
farProperty := -1
|
||||
if rule > 60 &&
|
||||
(state == wbALetter || state == wbHebrewLetter || state == wbNumeric) &&
|
||||
(nextProperty == prMidLetter || nextProperty == prMidNumLet || nextProperty == prSingleQuote || // WB6.
|
||||
nextProperty == prDoubleQuote || // WB7b.
|
||||
nextProperty == prMidNum) { // WB12.
|
||||
for {
|
||||
var (
|
||||
r rune
|
||||
length int
|
||||
)
|
||||
if b != nil { // Byte slice version.
|
||||
r, length = utf8.DecodeRune(b)
|
||||
b = b[length:]
|
||||
} else { // String version.
|
||||
r, length = utf8.DecodeRuneInString(str)
|
||||
str = str[length:]
|
||||
}
|
||||
if r == utf8.RuneError {
|
||||
break
|
||||
}
|
||||
prop := property(workBreakCodePoints, r)
|
||||
if prop == prExtend || prop == prFormat || prop == prZWJ {
|
||||
continue
|
||||
}
|
||||
farProperty = prop
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// WB6.
|
||||
if rule > 60 &&
|
||||
(state == wbALetter || state == wbHebrewLetter) &&
|
||||
(nextProperty == prMidLetter || nextProperty == prMidNumLet || nextProperty == prSingleQuote) &&
|
||||
(farProperty == prALetter || farProperty == prHebrewLetter) {
|
||||
return wbWB7, false
|
||||
}
|
||||
|
||||
// WB7b.
|
||||
if rule > 72 &&
|
||||
state == wbHebrewLetter &&
|
||||
nextProperty == prDoubleQuote &&
|
||||
farProperty == prHebrewLetter {
|
||||
return wbWB7c, false
|
||||
}
|
||||
|
||||
// WB12.
|
||||
if rule > 120 &&
|
||||
state == wbNumeric &&
|
||||
(nextProperty == prMidNum || nextProperty == prMidNumLet || nextProperty == prSingleQuote) &&
|
||||
farProperty == prNumeric {
|
||||
return wbWB11, false
|
||||
}
|
||||
|
||||
// WB15 and WB16.
|
||||
if newState == wbAny && nextProperty == prRegionalIndicator {
|
||||
if state != wbOddRI && state != wbEvenRI { // Includes state == -1.
|
||||
// Transition into the first RI.
|
||||
return wbOddRI, true
|
||||
}
|
||||
if state == wbOddRI {
|
||||
// Don't break pairs of Regional Indicators.
|
||||
return wbEvenRI, false
|
||||
}
|
||||
return wbOddRI, true // We can break after a pair.
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
24
vendor/modules.txt
vendored
24
vendor/modules.txt
vendored
@@ -254,6 +254,16 @@ github.com/cespare/xxhash/v2
|
||||
# github.com/cevaris/ordered_map v0.0.0-20190319150403-3adeae072e73
|
||||
## explicit
|
||||
github.com/cevaris/ordered_map
|
||||
# github.com/clipperhouse/displaywidth v0.3.1
|
||||
## explicit; go 1.18
|
||||
github.com/clipperhouse/displaywidth
|
||||
# github.com/clipperhouse/stringish v0.1.1
|
||||
## explicit; go 1.18
|
||||
github.com/clipperhouse/stringish
|
||||
# github.com/clipperhouse/uax29/v2 v2.2.0
|
||||
## explicit; go 1.18
|
||||
github.com/clipperhouse/uax29/v2/graphemes
|
||||
github.com/clipperhouse/uax29/v2/internal/iterators
|
||||
# github.com/cloudflare/circl v1.6.1
|
||||
## explicit; go 1.22.0
|
||||
github.com/cloudflare/circl/dh/x25519
|
||||
@@ -1021,8 +1031,8 @@ github.com/mattn/go-colorable
|
||||
# github.com/mattn/go-isatty v0.0.20
|
||||
## explicit; go 1.15
|
||||
github.com/mattn/go-isatty
|
||||
# github.com/mattn/go-runewidth v0.0.16
|
||||
## explicit; go 1.9
|
||||
# github.com/mattn/go-runewidth v0.0.19
|
||||
## explicit; go 1.20
|
||||
github.com/mattn/go-runewidth
|
||||
# github.com/mattn/go-sqlite3 v1.14.32
|
||||
## explicit; go 1.19
|
||||
@@ -1167,15 +1177,18 @@ github.com/nxadm/tail/winfile
|
||||
# github.com/oklog/run v1.2.0
|
||||
## explicit; go 1.20
|
||||
github.com/oklog/run
|
||||
# github.com/olekukonko/cat v0.0.0-20250911104152-50322a0618f6
|
||||
## explicit; go 1.21
|
||||
github.com/olekukonko/cat
|
||||
# github.com/olekukonko/errors v1.1.0
|
||||
## explicit; go 1.21
|
||||
github.com/olekukonko/errors
|
||||
# github.com/olekukonko/ll v0.0.9
|
||||
# github.com/olekukonko/ll v0.1.2
|
||||
## explicit; go 1.21
|
||||
github.com/olekukonko/ll
|
||||
github.com/olekukonko/ll/lh
|
||||
github.com/olekukonko/ll/lx
|
||||
# github.com/olekukonko/tablewriter v1.1.0
|
||||
# github.com/olekukonko/tablewriter v1.1.1
|
||||
## explicit; go 1.21
|
||||
github.com/olekukonko/tablewriter
|
||||
github.com/olekukonko/tablewriter/pkg/twwarp
|
||||
@@ -1846,9 +1859,6 @@ github.com/rcrowley/go-metrics
|
||||
## explicit; go 1.22.0
|
||||
github.com/riandyrn/otelchi
|
||||
github.com/riandyrn/otelchi/version
|
||||
# github.com/rivo/uniseg v0.4.7
|
||||
## explicit; go 1.18
|
||||
github.com/rivo/uniseg
|
||||
# github.com/rogpeppe/go-internal v1.14.1
|
||||
## explicit; go 1.23
|
||||
github.com/rogpeppe/go-internal/internal/syscall/windows
|
||||
|
||||
Reference in New Issue
Block a user