automatically ignore known forwarded addresses, fixes #64

This commit is contained in:
Aine
2023-09-18 12:35:37 +03:00
parent e90925eceb
commit 60b4386dd8
187 changed files with 4070 additions and 2667 deletions

View File

@@ -66,6 +66,7 @@ env vars
* **POSTMOOGLE_DB_DSN** - database connection string
* **POSTMOOGLE_DB_DIALECT** - database dialect (postgres, sqlite3)
* **POSTMOOGLE_MAILBOXES_RESERVED** - space separated list of reserved mailboxes, [docs/mailboxes.md](docs/mailboxes.md)
* **POSTMOOGLE_MAILBOXES_FORWARDED** - space separated list of forwarded from emails that should be ignored when sending replies
* **POSTMOOGLE_MAILBOXES_ACTIVATION** - activation flow for new mailboxes, [docs/mailboxes.md](docs/mailboxes.md)
* **POSTMOOGLE_MAXSIZE** - max email size (including attachments) in megabytes
* **POSTMOOGLE_ADMINS** - a space-separated list of admin users. See `POSTMOOGLE_USERS` for syntax examples

View File

@@ -21,6 +21,7 @@ import (
// Mailboxes config
type MBXConfig struct {
Reserved []string
Forwarded []string
Activation string
}

View File

@@ -280,7 +280,7 @@ func (e *parentEmail) fixtofrom(newSenderMailbox string, domains []string) strin
return previousSender
}
func (e *parentEmail) calculateRecipients(from string) {
func (e *parentEmail) calculateRecipients(from string, forwardedFrom []string) {
recipients := map[string]struct{}{}
recipients[e.From] = struct{}{}
@@ -290,6 +290,10 @@ func (e *parentEmail) calculateRecipients(from string) {
for _, addr := range email.AddressList(e.CC) {
recipients[addr] = struct{}{}
}
for _, addr := range forwardedFrom {
delete(recipients, addr)
}
delete(recipients, from)
rcpts := make([]string, 0, len(recipients))
@@ -351,7 +355,7 @@ func (b *Bot) getParentEmail(evt *event.Event, newFromMailbox string) *parentEma
parent.InReplyTo = utils.EventField[string](&parentEvt.Content, eventMessageIDkey)
parent.References = utils.EventField[string](&parentEvt.Content, eventReferencesKey)
senderEmail := parent.fixtofrom(newFromMailbox, b.domains)
parent.calculateRecipients(senderEmail)
parent.calculateRecipients(senderEmail, b.mbxc.Forwarded)
parent.MessageID = email.MessageID(parentEvt.ID, parent.FromDomain)
if parent.InReplyTo == "" {
parent.InReplyTo = parent.MessageID

View File

@@ -27,6 +27,7 @@ func New() *Config {
Admins: env.Slice("admins"),
Mailboxes: Mailboxes{
Reserved: env.Slice("mailboxes.reserved"),
Forwarded: env.Slice("mailboxes.forwarded"),
Activation: env.String("mailboxes.activation", defaultConfig.Mailboxes.Activation),
},
TLS: TLS{

View File

@@ -72,6 +72,7 @@ type Monitoring struct {
// Mailboxes config
type Mailboxes struct {
Reserved []string
Forwarded []string
Activation string
}

21
go.mod
View File

@@ -17,7 +17,7 @@ require (
github.com/mattn/go-sqlite3 v1.14.17
github.com/mileusna/crontab v1.2.0
github.com/raja/argon2pw v1.0.2-0.20210910183755-a391af63bd39
github.com/rs/zerolog v1.29.1
github.com/rs/zerolog v1.30.0
gitlab.com/etke.cc/go/env v1.0.0
gitlab.com/etke.cc/go/fswatcher v1.0.0
gitlab.com/etke.cc/go/healthchecks v1.0.1
@@ -25,8 +25,8 @@ require (
gitlab.com/etke.cc/go/secgen v1.1.1
gitlab.com/etke.cc/go/trysmtp v1.1.3
gitlab.com/etke.cc/go/validator v1.0.6
gitlab.com/etke.cc/linkpearl v0.0.0-20230616132249-490d525152ec
maunium.net/go/mautrix v0.15.3
gitlab.com/etke.cc/linkpearl v0.0.0-20230916181909-246862c25568
maunium.net/go/mautrix v0.16.1
)
require (
@@ -45,15 +45,16 @@ require (
github.com/pkg/errors v0.9.1 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
github.com/tidwall/gjson v1.14.4 // indirect
github.com/tidwall/gjson v1.16.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/sjson v1.2.5 // indirect
github.com/yuin/goldmark v1.5.4 // indirect
golang.org/x/crypto v0.10.0 // indirect
golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 // indirect
golang.org/x/net v0.11.0 // indirect
golang.org/x/sys v0.9.0 // indirect
golang.org/x/text v0.10.0 // indirect
github.com/yuin/goldmark v1.5.6 // indirect
go.mau.fi/util v0.1.0 // indirect
golang.org/x/crypto v0.13.0 // indirect
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect
golang.org/x/net v0.15.0 // indirect
golang.org/x/sys v0.12.0 // indirect
golang.org/x/text v0.13.0 // indirect
maunium.net/go/maulogger/v2 v2.4.1 // indirect
)

46
go.sum
View File

@@ -74,9 +74,9 @@ github.com/raja/argon2pw v1.0.2-0.20210910183755-a391af63bd39/go.mod h1:idX/fPqw
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.29.1 h1:cO+d60CHkknCbvzEWxP0S9K6KqyTjrCNUy1LdQLCGPc=
github.com/rs/zerolog v1.29.1/go.mod h1:Le6ESbR7hc+DP6Lt1THiV8CQSdkkNrd3R0XbEgp3ZBU=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.30.0 h1:SymVODrcRsaRaSInD9yQtKbtWqwsfoPcRff/oRXLj4c=
github.com/rs/zerolog v1.30.0/go.mod h1:/tk+P47gFdPXq4QYjvCmT5/Gsug2nagsFWBWhAiSi1w=
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf h1:pvbZ0lM0XWPBqUKqFU8cmavspvIl9nulOYwdy6IFRRo=
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02nZ62WenDCkgHFerpIOmW0iT7GKmXM=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
@@ -84,8 +84,8 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/gjson v1.14.4 h1:uo0p8EbA09J7RQaflQ1aBRffTR7xedD2bcIVSYxLnkM=
github.com/tidwall/gjson v1.14.4/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/gjson v1.16.0 h1:SyXa+dsSPpUlcwEDuKuEBJEz5vzTvOea+9rjyYodQFg=
github.com/tidwall/gjson v1.16.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
@@ -93,8 +93,8 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
github.com/yuin/goldmark v1.5.4 h1:2uY/xC0roWy8IBEGLgB1ywIoEJFGmRrX21YQcvGZzjU=
github.com/yuin/goldmark v1.5.4/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yuin/goldmark v1.5.6 h1:COmQAWTCcGetChm3Ig7G/t8AFAN00t+o8Mt4cf7JpwA=
github.com/yuin/goldmark v1.5.6/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
gitlab.com/etke.cc/go/env v1.0.0 h1:J98BwzOuELnjsVPFvz5wa79L7IoRV9CmrS41xLYXtSw=
gitlab.com/etke.cc/go/env v1.0.0/go.mod h1:e1l4RM5MA1sc0R1w/RBDAESWRwgo5cOG9gx8BKUn2C4=
gitlab.com/etke.cc/go/fswatcher v1.0.0 h1:uyiVn+1NVCjOLZrXSZouIDBDZBMwVipS4oYuvAFpPzo=
@@ -109,18 +109,20 @@ gitlab.com/etke.cc/go/trysmtp v1.1.3 h1:e2EHond77onMaecqCg6mWumffTSEf+ycgj88nbee
gitlab.com/etke.cc/go/trysmtp v1.1.3/go.mod h1:lOO7tTdAE0a3ETV3wN3GJ7I1Tqewu7YTpPWaOmTteV0=
gitlab.com/etke.cc/go/validator v1.0.6 h1:w0Muxf9Pqw7xvF7NaaswE6d7r9U3nB2t2l5PnFMrecQ=
gitlab.com/etke.cc/go/validator v1.0.6/go.mod h1:Id0SxRj0J3IPhiKlj0w1plxVLZfHlkwipn7HfRZsDts=
gitlab.com/etke.cc/linkpearl v0.0.0-20230616132249-490d525152ec h1:qwLPc/7LiI+eDXn0iZFcZJbl/5luHquSsxlBDQhMBN4=
gitlab.com/etke.cc/linkpearl v0.0.0-20230616132249-490d525152ec/go.mod h1:TLrEM42/9w5R0tS/PEgN0t5JIcBJHQhHi8OuVvPyeRw=
gitlab.com/etke.cc/linkpearl v0.0.0-20230916181909-246862c25568 h1:4DqBpBNYZt6MGtDzxZoTwO40996Ug3XVbAkpMTLhowU=
gitlab.com/etke.cc/linkpearl v0.0.0-20230916181909-246862c25568/go.mod h1:IZ0TE+ZnIdJLb538owDMxhtpWH7blfW+oR7e5XRXxNY=
go.mau.fi/util v0.1.0 h1:BwIFWIOEeO7lsiI2eWKFkWTfc5yQmoe+0FYyOFVyaoE=
go.mau.fi/util v0.1.0/go.mod h1:AxuJUMCxpzgJ5eV9JbPWKRH8aAJJidxetNdUj7qcb84=
golang.org/x/crypto v0.0.0-20220518034528-6f7dac969898/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.10.0 h1:LKqV2xt9+kDzSTfOhx4FrkEBcMrAgHSYgzywV9zcGmM=
golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I=
golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1 h1:k/i9J1pBpvlfR+9QsetwPyERsqu1GIbi967PQMq3Ivc=
golang.org/x/exp v0.0.0-20230522175609-2e198f4a06a1/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w=
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
golang.org/x/net v0.0.0-20210501142056-aec3718b3fa0/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU=
golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ=
golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -130,16 +132,16 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.9.0 h1:GRRCnKYhdQrD8kfRAdQ6Zcw1P0OcELxGLKJvtjVMZ28=
golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58=
golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@@ -147,5 +149,5 @@ gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
maunium.net/go/maulogger/v2 v2.4.1 h1:N7zSdd0mZkB2m2JtFUsiGTQQAdP0YeFWT7YMc80yAL8=
maunium.net/go/maulogger/v2 v2.4.1/go.mod h1:omPuYwYBILeVQobz8uO3XC8DIRuEb5rXYlQSuqrbCho=
maunium.net/go/mautrix v0.15.3 h1:C9BHSUM0gYbuZmAtopuLjIcH5XHLb/ZjTEz7nN+0jN0=
maunium.net/go/mautrix v0.15.3/go.mod h1:zLrQqdxJlLkurRCozTc9CL6FySkgZlO/kpCYxBILSLE=
maunium.net/go/mautrix v0.16.1 h1:Wb3CvOCe8A/NLsFeZYxKrgXKiqeZUQEBD1zqm7n/kWk=
maunium.net/go/mautrix v0.16.1/go.mod h1:2Jf15tulVtr6LxoiRL4smRXwpkGWUNfBFhwh/aXDBuk=

View File

@@ -24,7 +24,7 @@ Find out [who uses zerolog](https://github.com/rs/zerolog/wiki/Who-uses-zerolog)
* [Sampling](#log-sampling)
* [Hooks](#hooks)
* [Contextual fields](#contextual-logging)
* `context.Context` integration
* [`context.Context` integration](#contextcontext-integration)
* [Integration with `net/http`](#integration-with-nethttp)
* [JSON and CBOR encoding formats](#binary-encoding)
* [Pretty logging for development](#pretty-logging)
@@ -499,7 +499,7 @@ log.Ctx(ctx).Info().Msg("hello world")
### Set as standard logger output
```go
log := zerolog.New(os.Stdout).With().
stdlog := zerolog.New(os.Stdout).With().
Str("foo", "bar").
Logger()
@@ -511,6 +511,58 @@ stdlog.Print("hello world")
// Output: {"foo":"bar","message":"hello world"}
```
### context.Context integration
Go contexts are commonly passed throughout Go code, and this can help you pass
your Logger into places it might otherwise be hard to inject. The `Logger`
instance may be attached to Go context (`context.Context`) using
`Logger.WithContext(ctx)` and extracted from it using `zerolog.Ctx(ctx)`.
For example:
```go
func f() {
logger := zerolog.New(os.Stdout)
ctx := context.Background()
// Attach the Logger to the context.Context
ctx = logger.WithContext(ctx)
someFunc(ctx)
}
func someFunc(ctx context.Context) {
// Get Logger from the go Context. if it's nil, then
// `zerolog.DefaultContextLogger` is returned, if
// `DefaultContextLogger` is nil, then a disabled logger is returned.
logger := zerolog.Ctx(ctx)
logger.Info().Msg("Hello")
}
```
A second form of `context.Context` integration allows you to pass the current
context.Context into the logged event, and retrieve it from hooks. This can be
useful to log trace and span IDs or other information stored in the go context,
and facilitates the unification of logging and tracing in some systems:
```go
type TracingHook struct{}
func (h TracingHook) Run(e *zerolog.Event, level zerolog.Level, msg string) {
ctx := e.Ctx()
spanId := getSpanIdFromContext(ctx) // as per your tracing framework
e.Str("span-id", spanId)
}
func f() {
// Setup the logger
logger := zerolog.New(os.Stdout)
logger = logger.Hook(TracingHook{})
ctx := context.Background()
// Use the Ctx function to make the context available to the hook
logger.Info().Ctx(ctx).Msg("Hello")
}
```
### Integration with `net/http`
The `github.com/rs/zerolog/hlog` package provides some helpers to integrate zerolog with `http.Handler`.
@@ -703,6 +755,8 @@ Log a static string, without any context or `printf`-style templating:
## Caveats
### Field duplication
Note that zerolog does no de-duplication of fields. Using the same key multiple times creates multiple keys in final JSON:
```go
@@ -714,3 +768,19 @@ logger.Info().
```
In this case, many consumers will take the last value, but this is not guaranteed; check yours if in doubt.
### Concurrency safety
Be careful when calling UpdateContext. It is not concurrency safe. Use the With method to create a child logger:
```go
func handler(w http.ResponseWriter, r *http.Request) {
// Create a child logger for concurrency safety
logger := log.Logger.With().Logger()
// Add context fields, for example User-Agent from HTTP headers
logger.UpdateContext(func(c zerolog.Context) zerolog.Context {
...
})
}
```

View File

@@ -1,6 +1,7 @@
package zerolog
import (
"context"
"fmt"
"io/ioutil"
"math"
@@ -165,6 +166,15 @@ func (c Context) Err(err error) Context {
return c.AnErr(ErrorFieldName, err)
}
// Ctx adds the context.Context to the logger context. The context.Context is
// not rendered in the error message, but is made available for hooks to use.
// A typical use case is to extract tracing information from the
// context.Context.
func (c Context) Ctx(ctx context.Context) Context {
c.l.ctx = ctx
return c
}
// Bool adds the field key with val as a bool to the logger context.
func (c Context) Bool(key string, b bool) Context {
c.l.context = enc.AppendBool(enc.AppendKey(c.l.context, key), b)
@@ -329,8 +339,9 @@ func (ts timestampHook) Run(e *Event, level Level, msg string) {
var th = timestampHook{}
// Timestamp adds the current local time as UNIX timestamp to the logger context with the "time" key.
// Timestamp adds the current local time to the logger context with the "time" key, formatted using zerolog.TimeFieldFormat.
// To customize the key name, change zerolog.TimestampFieldName.
// To customize the time format, change zerolog.TimeFieldFormat.
//
// NOTE: It won't dedupe the "time" key if the *Context has one already.
func (c Context) Timestamp() Context {

View File

@@ -24,6 +24,9 @@ func init() {
func appendJSON(dst []byte, j []byte) []byte {
return cbor.AppendEmbeddedJSON(dst, j)
}
func appendCBOR(dst []byte, c []byte) []byte {
return cbor.AppendEmbeddedCBOR(dst, c)
}
// decodeIfBinaryToString - converts a binary formatted log msg to a
// JSON formatted String Log message.

View File

@@ -6,6 +6,7 @@ package zerolog
// JSON encoded byte stream.
import (
"encoding/base64"
"github.com/rs/zerolog/internal/json"
)
@@ -25,6 +26,17 @@ func init() {
func appendJSON(dst []byte, j []byte) []byte {
return append(dst, j...)
}
func appendCBOR(dst []byte, cbor []byte) []byte {
dst = append(dst, []byte("\"data:application/cbor;base64,")...)
l := len(dst)
enc := base64.StdEncoding
n := enc.EncodedLen(len(cbor))
for i := 0; i < n; i++ {
dst = append(dst, '.')
}
enc.Encode(dst[l:], cbor)
return append(dst, '"')
}
func decodeIfBinaryToString(in []byte) string {
return string(in)

View File

@@ -1,6 +1,7 @@
package zerolog
import (
"context"
"fmt"
"net"
"os"
@@ -24,9 +25,10 @@ type Event struct {
w LevelWriter
level Level
done func(msg string)
stack bool // enable error stack trace
ch []Hook // hooks from context
skipFrame int // The number of additional frames to skip when printing the caller.
stack bool // enable error stack trace
ch []Hook // hooks from context
skipFrame int // The number of additional frames to skip when printing the caller.
ctx context.Context // Optional Go context for event
}
func putEvent(e *Event) {
@@ -318,6 +320,18 @@ func (e *Event) RawJSON(key string, b []byte) *Event {
return e
}
// RawCBOR adds already encoded CBOR to the log line under key.
//
// No sanity check is performed on b
// Note: The full featureset of CBOR is supported as data will not be mapped to json but stored as data-url
func (e *Event) RawCBOR(key string, b []byte) *Event {
if e == nil {
return e
}
e.buf = appendCBOR(enc.AppendKey(e.buf, key), b)
return e
}
// AnErr adds the field key with serialized err to the *Event context.
// If err is nil, no field is added.
func (e *Event) AnErr(key string, err error) *Event {
@@ -405,6 +419,28 @@ func (e *Event) Stack() *Event {
return e
}
// Ctx adds the Go Context to the *Event context. The context is not rendered
// in the output message, but is available to hooks and to Func() calls via the
// GetCtx() accessor. A typical use case is to extract tracing information from
// the Go Ctx.
func (e *Event) Ctx(ctx context.Context) *Event {
if e != nil {
e.ctx = ctx
}
return e
}
// GetCtx retrieves the Go context.Context which is optionally stored in the
// Event. This allows Hooks and functions passed to Func() to retrieve values
// which are stored in the context.Context. This can be useful in tracing,
// where span information is commonly propagated in the context.Context.
func (e *Event) GetCtx() context.Context {
if e == nil || e.ctx == nil {
return context.Background()
}
return e.ctx
}
// Bool adds the field key with val as a bool to the *Event context.
func (e *Event) Bool(key string, b bool) *Event {
if e == nil {

View File

@@ -26,7 +26,8 @@ const (
additionalTypeBreak byte = 31
// Tag Sub-types.
additionalTypeTimestamp byte = 01
additionalTypeTimestamp byte = 01
additionalTypeEmbeddedCBOR byte = 63
// Extended Tags - from https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml
additionalTypeTagNetworkAddr uint16 = 260

View File

@@ -5,6 +5,7 @@ package cbor
import (
"bufio"
"bytes"
"encoding/base64"
"fmt"
"io"
"math"
@@ -213,6 +214,31 @@ func decodeString(src *bufio.Reader, noQuotes bool) []byte {
}
return append(result, '"')
}
func decodeStringToDataUrl(src *bufio.Reader, mimeType string) []byte {
pb := readByte(src)
major := pb & maskOutAdditionalType
minor := pb & maskOutMajorType
if major != majorTypeByteString {
panic(fmt.Errorf("Major type is: %d in decodeString", major))
}
length := decodeIntAdditionalType(src, minor)
l := int(length)
enc := base64.StdEncoding
lEnc := enc.EncodedLen(l)
result := make([]byte, len("\"data:;base64,\"")+len(mimeType)+lEnc)
dest := result
u := copy(dest, "\"data:")
dest = dest[u:]
u = copy(dest, mimeType)
dest = dest[u:]
u = copy(dest, ";base64,")
dest = dest[u:]
pbs := readNBytes(src, l)
enc.Encode(dest, pbs)
dest = dest[lEnc:]
dest[0] = '"'
return result
}
func decodeUTF8String(src *bufio.Reader) []byte {
pb := readByte(src)
@@ -349,6 +375,20 @@ func decodeTagData(src *bufio.Reader) []byte {
switch minor {
case additionalTypeTimestamp:
return decodeTimeStamp(src)
case additionalTypeIntUint8:
val := decodeIntAdditionalType(src, minor)
switch byte(val) {
case additionalTypeEmbeddedCBOR:
pb := readByte(src)
dataMajor := pb & maskOutAdditionalType
if dataMajor != majorTypeByteString {
panic(fmt.Errorf("Unsupported embedded Type: %d in decodeEmbeddedCBOR", dataMajor))
}
src.UnreadByte()
return decodeStringToDataUrl(src, "application/cbor")
default:
panic(fmt.Errorf("Unsupported Additional Tag Type: %d in decodeTagData", val))
}
// Tag value is larger than 256 (so uint16).
case additionalTypeIntUint16:

View File

@@ -93,3 +93,25 @@ func AppendEmbeddedJSON(dst, s []byte) []byte {
}
return append(dst, s...)
}
// AppendEmbeddedCBOR adds a tag and embeds input CBOR as such.
func AppendEmbeddedCBOR(dst, s []byte) []byte {
major := majorTypeTags
minor := additionalTypeEmbeddedCBOR
// Append the TAG to indicate this is Embedded JSON.
dst = append(dst, major|additionalTypeIntUint8)
dst = append(dst, minor)
// Append the CBOR Object as Byte String.
major = majorTypeByteString
l := len(s)
if l <= additionalMax {
lb := byte(l)
dst = append(dst, major|lb)
} else {
dst = appendCborTypePrefix(dst, major, uint64(l))
}
return append(dst, s...)
}

27
vendor/github.com/rs/zerolog/log.go generated vendored
View File

@@ -82,8 +82,9 @@
// log.Warn().Msg("")
// // Output: {"level":"warn","severity":"warn"}
//
// # Caveats
//
// Caveats
// Field duplication:
//
// There is no fields deduplication out-of-the-box.
// Using the same key multiple times creates new key in final JSON each time.
@@ -96,9 +97,24 @@
//
// In this case, many consumers will take the last value,
// but this is not guaranteed; check yours if in doubt.
//
// Concurrency safety:
//
// Be careful when calling UpdateContext. It is not concurrency safe. Use the With method to create a child logger:
//
// func handler(w http.ResponseWriter, r *http.Request) {
// // Create a child logger for concurrency safety
// logger := log.Logger.With().Logger()
//
// // Add context fields, for example User-Agent from HTTP headers
// logger.UpdateContext(func(c zerolog.Context) zerolog.Context {
// ...
// })
// }
package zerolog
import (
"context"
"errors"
"fmt"
"io"
@@ -218,6 +234,7 @@ type Logger struct {
context []byte
hooks []Hook
stack bool
ctx context.Context
}
// New creates a root logger with given output writer. If the output writer implements
@@ -275,7 +292,8 @@ func (l Logger) With() Context {
// UpdateContext updates the internal logger's context.
//
// Use this method with caution. If unsure, prefer the With method.
// Caution: This method is not concurrency safe.
// Use the With method to create a child logger before modifying the context from concurrent goroutines.
func (l *Logger) UpdateContext(update func(c Context) Context) {
if l == disabledLogger {
return
@@ -309,7 +327,9 @@ func (l Logger) Sample(s Sampler) Logger {
// Hook returns a logger with the h Hook.
func (l Logger) Hook(h Hook) Logger {
l.hooks = append(l.hooks, h)
newHooks := make([]Hook, len(l.hooks), len(l.hooks)+1)
copy(newHooks, l.hooks)
l.hooks = append(newHooks, h)
return l
}
@@ -453,6 +473,7 @@ func (l *Logger) newEvent(level Level, done func(string)) *Event {
e := newEvent(l.w, level)
e.done = done
e.ch = l.hooks
e.ctx = l.ctx
if level != NoLevel && LevelFieldName != "" {
e.Str(LevelFieldName, LevelFieldMarshalFunc(level))
}

View File

@@ -211,6 +211,7 @@ There are currently the following built-in modifiers:
- `@tostr`: Converts json to a string. Wraps a json string.
- `@fromstr`: Converts a string from json. Unwraps a json string.
- `@group`: Groups arrays of objects. See [e4fc67c](https://github.com/tidwall/gjson/commit/e4fc67c92aeebf2089fabc7872f010e340d105db).
- `@dig`: Search for a value without providing its entire path. See [e8e87f2](https://github.com/tidwall/gjson/commit/e8e87f2a00dc41f3aba5631094e21f59a8cf8cbf).
### Modifier arguments

View File

@@ -137,12 +137,21 @@ next major release.*
The `~` (tilde) operator will convert a value to a boolean before comparison.
Supported tilde comparison type are:
```
~true Converts true-ish values to true
~false Converts false-ish and non-existent values to true
~null Converts null and non-existent values to true
~* Converts any existing value to true
```
For example, using the following JSON:
```json
{
"vals": [
{ "a": 1, "b": true },
{ "a": 1, "b": "data" },
{ "a": 2, "b": true },
{ "a": 3, "b": false },
{ "a": 4, "b": "0" },
@@ -157,15 +166,23 @@ For example, using the following JSON:
}
```
You can now query for all true(ish) or false(ish) values:
To query for all true-ish or false-ish values:
```
vals.#(b==~true)#.a >> [1,2,6,7,8]
vals.#(b==~true)#.a >> [2,6,7,8]
vals.#(b==~false)#.a >> [3,4,5,9,10,11]
```
The last value which was non-existent is treated as `false`
To query for null and explicit value existence:
```
vals.#(b==~null)#.a >> [10,11]
vals.#(b==~*)#.a >> [1,2,3,4,5,6,7,8,9,10]
vals.#(b!=~*)#.a >> [11]
```
### Dot vs Pipe
The `.` is standard separator, but it's also possible to use a `|`.
@@ -241,6 +258,7 @@ There are currently the following built-in modifiers:
- `@tostr`: Converts json to a string. Wraps a json string.
- `@fromstr`: Converts a string from json. Unwraps a json string.
- `@group`: Groups arrays of objects. See [e4fc67c](https://github.com/tidwall/gjson/commit/e4fc67c92aeebf2089fabc7872f010e340d105db).
- `@dig`: Search for a value without providing its entire path. See [e8e87f2](https://github.com/tidwall/gjson/commit/e8e87f2a00dc41f3aba5631094e21f59a8cf8cbf).
#### Modifier arguments

View File

@@ -645,9 +645,9 @@ func tostr(json string) (raw string, str string) {
// Exists returns true if value exists.
//
// if gjson.Get(json, "name.last").Exists(){
// println("value exists")
// }
// if gjson.Get(json, "name.last").Exists(){
// println("value exists")
// }
func (t Result) Exists() bool {
return t.Type != Null || len(t.Raw) != 0
}
@@ -661,7 +661,6 @@ func (t Result) Exists() bool {
// nil, for JSON null
// map[string]interface{}, for JSON objects
// []interface{}, for JSON arrays
//
func (t Result) Value() interface{} {
if t.Type == String {
return t.Str
@@ -826,19 +825,28 @@ func parseArrayPath(path string) (r arrayPathResult) {
}
// splitQuery takes a query and splits it into three parts:
// path, op, middle, and right.
//
// path, op, middle, and right.
//
// So for this query:
// #(first_name=="Murphy").last
//
// #(first_name=="Murphy").last
//
// Becomes
// first_name # path
// =="Murphy" # middle
// .last # right
//
// first_name # path
// =="Murphy" # middle
// .last # right
//
// Or,
// #(service_roles.#(=="one")).cap
//
// #(service_roles.#(=="one")).cap
//
// Becomes
// service_roles.#(=="one") # path
// # middle
// .cap # right
//
// service_roles.#(=="one") # path
// # middle
// .cap # right
func parseQuery(query string) (
path, op, value, remain string, i int, vesc, ok bool,
) {
@@ -1251,15 +1259,74 @@ func matchLimit(str, pattern string) bool {
return matched
}
func falseish(t Result) bool {
switch t.Type {
case Null:
return true
case False:
return true
case String:
b, err := strconv.ParseBool(strings.ToLower(t.Str))
if err != nil {
return false
}
return !b
case Number:
return t.Num == 0
default:
return false
}
}
func trueish(t Result) bool {
switch t.Type {
case True:
return true
case String:
b, err := strconv.ParseBool(strings.ToLower(t.Str))
if err != nil {
return false
}
return b
case Number:
return t.Num != 0
default:
return false
}
}
func nullish(t Result) bool {
return t.Type == Null
}
func queryMatches(rp *arrayPathResult, value Result) bool {
rpv := rp.query.value
if len(rpv) > 0 && rpv[0] == '~' {
// convert to bool
rpv = rpv[1:]
if value.Bool() {
value = Result{Type: True}
} else {
value = Result{Type: False}
if len(rpv) > 0 {
if rpv[0] == '~' {
// convert to bool
rpv = rpv[1:]
var ish, ok bool
switch rpv {
case "*":
ish, ok = value.Exists(), true
case "null":
ish, ok = nullish(value), true
case "true":
ish, ok = trueish(value), true
case "false":
ish, ok = falseish(value), true
}
if ok {
rpv = "true"
if ish {
value = Result{Type: True}
} else {
value = Result{Type: False}
}
} else {
rpv = ""
value = Result{}
}
}
}
if !value.Exists() {
@@ -1918,23 +1985,23 @@ type parseContext struct {
// the '#' character.
// The dot and wildcard character can be escaped with '\'.
//
// {
// "name": {"first": "Tom", "last": "Anderson"},
// "age":37,
// "children": ["Sara","Alex","Jack"],
// "friends": [
// {"first": "James", "last": "Murphy"},
// {"first": "Roger", "last": "Craig"}
// ]
// }
// "name.last" >> "Anderson"
// "age" >> 37
// "children" >> ["Sara","Alex","Jack"]
// "children.#" >> 3
// "children.1" >> "Alex"
// "child*.2" >> "Jack"
// "c?ildren.0" >> "Sara"
// "friends.#.first" >> ["James","Roger"]
// {
// "name": {"first": "Tom", "last": "Anderson"},
// "age":37,
// "children": ["Sara","Alex","Jack"],
// "friends": [
// {"first": "James", "last": "Murphy"},
// {"first": "Roger", "last": "Craig"}
// ]
// }
// "name.last" >> "Anderson"
// "age" >> 37
// "children" >> ["Sara","Alex","Jack"]
// "children.#" >> 3
// "children.1" >> "Alex"
// "child*.2" >> "Jack"
// "c?ildren.0" >> "Sara"
// "friends.#.first" >> ["James","Roger"]
//
// This function expects that the json is well-formed, and does not validate.
// Invalid json will not panic, but it may return back unexpected results.
@@ -2126,8 +2193,7 @@ func unescape(json string) string {
// The caseSensitive paramater is used when the tokens are Strings.
// The order when comparing two different type is:
//
// Null < False < Number < String < True < JSON
//
// Null < False < Number < String < True < JSON
func (t Result) Less(token Result, caseSensitive bool) bool {
if t.Type < token.Type {
return true
@@ -2556,11 +2622,10 @@ func validnull(data []byte, i int) (outi int, ok bool) {
// Valid returns true if the input is valid json.
//
// if !gjson.Valid(json) {
// return errors.New("invalid json")
// }
// value := gjson.Get(json, "name.last")
//
// if !gjson.Valid(json) {
// return errors.New("invalid json")
// }
// value := gjson.Get(json, "name.last")
func Valid(json string) bool {
_, ok := validpayload(stringBytes(json), 0)
return ok
@@ -2568,13 +2633,12 @@ func Valid(json string) bool {
// ValidBytes returns true if the input is valid json.
//
// if !gjson.Valid(json) {
// return errors.New("invalid json")
// }
// value := gjson.Get(json, "name.last")
// if !gjson.Valid(json) {
// return errors.New("invalid json")
// }
// value := gjson.Get(json, "name.last")
//
// If working with bytes, this method preferred over ValidBytes(string(data))
//
func ValidBytes(json []byte) bool {
_, ok := validpayload(json, 0)
return ok
@@ -2690,6 +2754,7 @@ func execModifier(json, path string) (pathOut, res string, ok bool) {
var parsedArgs bool
switch pathOut[0] {
case '{', '[', '"':
// json arg
res := Parse(pathOut)
if res.Exists() {
args = squash(pathOut)
@@ -2698,14 +2763,20 @@ func execModifier(json, path string) (pathOut, res string, ok bool) {
}
}
if !parsedArgs {
idx := strings.IndexByte(pathOut, '|')
if idx == -1 {
args = pathOut
pathOut = ""
} else {
args = pathOut[:idx]
pathOut = pathOut[idx:]
// simple arg
i := 0
for ; i < len(pathOut); i++ {
if pathOut[i] == '|' {
break
}
switch pathOut[i] {
case '{', '[', '"', '(':
s := squash(pathOut[i:])
i += len(s) - 1
}
}
args = pathOut[:i]
pathOut = pathOut[i:]
}
}
return pathOut, fn(json, args), true
@@ -2725,19 +2796,24 @@ func unwrap(json string) string {
// DisableModifiers will disable the modifier syntax
var DisableModifiers = false
var modifiers = map[string]func(json, arg string) string{
"pretty": modPretty,
"ugly": modUgly,
"reverse": modReverse,
"this": modThis,
"flatten": modFlatten,
"join": modJoin,
"valid": modValid,
"keys": modKeys,
"values": modValues,
"tostr": modToStr,
"fromstr": modFromStr,
"group": modGroup,
var modifiers map[string]func(json, arg string) string
func init() {
modifiers = map[string]func(json, arg string) string{
"pretty": modPretty,
"ugly": modUgly,
"reverse": modReverse,
"this": modThis,
"flatten": modFlatten,
"join": modJoin,
"valid": modValid,
"keys": modKeys,
"values": modValues,
"tostr": modToStr,
"fromstr": modFromStr,
"group": modGroup,
"dig": modDig,
}
}
// AddModifier binds a custom modifier command to the GJSON syntax.
@@ -2848,9 +2924,13 @@ func modReverse(json, arg string) string {
}
// @flatten an array with child arrays.
// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,[6,7]]
//
// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,[6,7]]
//
// The {"deep":true} arg can be provide for deep flattening.
// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,6,7]
//
// [1,[2],[3,4],[5,[6,7]]] -> [1,2,3,4,5,6,7]
//
// The original json is returned when the json is not an array.
func modFlatten(json, arg string) string {
res := Parse(json)
@@ -2895,7 +2975,8 @@ func modFlatten(json, arg string) string {
}
// @keys extracts the keys from an object.
// {"first":"Tom","last":"Smith"} -> ["first","last"]
//
// {"first":"Tom","last":"Smith"} -> ["first","last"]
func modKeys(json, arg string) string {
v := Parse(json)
if !v.Exists() {
@@ -2922,7 +3003,8 @@ func modKeys(json, arg string) string {
}
// @values extracts the values from an object.
// {"first":"Tom","last":"Smith"} -> ["Tom","Smith"]
//
// {"first":"Tom","last":"Smith"} -> ["Tom","Smith"]
func modValues(json, arg string) string {
v := Parse(json)
if !v.Exists() {
@@ -2947,11 +3029,17 @@ func modValues(json, arg string) string {
}
// @join multiple objects into a single object.
// [{"first":"Tom"},{"last":"Smith"}] -> {"first","Tom","last":"Smith"}
//
// [{"first":"Tom"},{"last":"Smith"}] -> {"first","Tom","last":"Smith"}
//
// The arg can be "true" to specify that duplicate keys should be preserved.
// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":37,"age":41}
//
// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":37,"age":41}
//
// Without preserved keys:
// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":41}
//
// [{"first":"Tom","age":37},{"age":41}] -> {"first","Tom","age":41}
//
// The original json is returned when the json is not an object.
func modJoin(json, arg string) string {
res := Parse(json)
@@ -3024,7 +3112,8 @@ func modValid(json, arg string) string {
}
// @fromstr converts a string to json
// "{\"id\":1023,\"name\":\"alert\"}" -> {"id":1023,"name":"alert"}
//
// "{\"id\":1023,\"name\":\"alert\"}" -> {"id":1023,"name":"alert"}
func modFromStr(json, arg string) string {
if !Valid(json) {
return ""
@@ -3033,7 +3122,8 @@ func modFromStr(json, arg string) string {
}
// @tostr converts a string to json
// {"id":1023,"name":"alert"} -> "{\"id\":1023,\"name\":\"alert\"}"
//
// {"id":1023,"name":"alert"} -> "{\"id\":1023,\"name\":\"alert\"}"
func modToStr(str, arg string) string {
return string(AppendJSONString(nil, str))
}
@@ -3210,11 +3300,11 @@ func revSquash(json string) string {
// Paths returns the original GJSON paths for a Result where the Result came
// from a simple query path that returns an array, like:
//
// gjson.Get(json, "friends.#.first")
// gjson.Get(json, "friends.#.first")
//
// The returned value will be in the form of a JSON array:
//
// ["friends.0.first","friends.1.first","friends.2.first"]
// ["friends.0.first","friends.1.first","friends.2.first"]
//
// The param 'json' must be the original JSON used when calling Get.
//
@@ -3239,11 +3329,11 @@ func (t Result) Paths(json string) []string {
// Path returns the original GJSON path for a Result where the Result came
// from a simple path that returns a single value, like:
//
// gjson.Get(json, "friends.#(last=Murphy)")
// gjson.Get(json, "friends.#(last=Murphy)")
//
// The returned value will be in the form of a JSON string:
//
// "friends.0"
// "friends.0"
//
// The param 'json' must be the original JSON used when calling Get.
//
@@ -3357,3 +3447,30 @@ func escapeComp(comp string) string {
}
return comp
}
func parseRecursiveDescent(all []Result, parent Result, path string) []Result {
if res := parent.Get(path); res.Exists() {
all = append(all, res)
}
if parent.IsArray() || parent.IsObject() {
parent.ForEach(func(_, val Result) bool {
all = parseRecursiveDescent(all, val, path)
return true
})
}
return all
}
func modDig(json, arg string) string {
all := parseRecursiveDescent(nil, Parse(json), arg)
var out []byte
out = append(out, '[')
for i, res := range all {
if i > 0 {
out = append(out, ',')
}
out = append(out, res.Raw...)
}
out = append(out, ']')
return string(out)
}

105
vendor/github.com/yuin/goldmark/.golangci.yml generated vendored Normal file
View File

@@ -0,0 +1,105 @@
run:
deadline: 10m
issues:
exclude-use-default: false
exclude-rules:
- path: _test.go
linters:
- errcheck
- lll
exclude:
- "Package util"
linters:
disable-all: true
enable:
- errcheck
- gosimple
- govet
- ineffassign
- staticcheck
- typecheck
- unused
- gofmt
- godot
- makezero
- misspell
- revive
- wastedassign
- lll
linters-settings:
revive:
severity: "warning"
confidence: 0.8
rules:
- name: blank-imports
severity: warning
disabled: false
- name: context-as-argument
severity: warning
disabled: false
- name: context-keys-type
severity: warning
disabled: false
- name: dot-imports
severity: warning
disabled: false
- name: error-return
severity: warning
disabled: false
- name: error-strings
severity: warning
disabled: false
- name: error-naming
severity: warning
disabled: false
- name: exported
severity: warning
disabled: false
- name: increment-decrement
severity: warning
disabled: false
- name: var-naming
severity: warning
disabled: false
- name: var-declaration
severity: warning
disabled: false
- name: package-comments
severity: warning
disabled: false
- name: range
severity: warning
disabled: false
- name: receiver-naming
severity: warning
disabled: false
- name: time-naming
severity: warning
disabled: false
- name: unexported-return
severity: warning
disabled: false
- name: indent-error-flow
severity: warning
disabled: false
- name: errorf
severity: warning
disabled: false
- name: empty-block
severity: warning
disabled: true
- name: superfluous-else
severity: warning
disabled: false
- name: unused-parameter
severity: warning
disabled: true
- name: unreachable-code
severity: warning
disabled: false
- name: redefines-builtin-id
severity: warning
disabled: false

View File

@@ -1,4 +1,7 @@
.PHONY: test fuzz
.PHONY: test fuzz lint
lint:
golangci-lint run -c .golangci.yml ./...
test:
go test -coverprofile=profile.out -coverpkg=github.com/yuin/goldmark,github.com/yuin/goldmark/ast,github.com/yuin/goldmark/extension,github.com/yuin/goldmark/extension/ast,github.com/yuin/goldmark/parser,github.com/yuin/goldmark/renderer,github.com/yuin/goldmark/renderer/html,github.com/yuin/goldmark/text,github.com/yuin/goldmark/util ./...

View File

@@ -440,6 +440,9 @@ Extensions
- [goldmark-pdf](https://github.com/stephenafamo/goldmark-pdf): A PDF renderer that can be passed to `goldmark.WithRenderer()`.
- [goldmark-hashtag](https://github.com/abhinav/goldmark-hashtag): Adds support for `#hashtag`-based tagging to goldmark.
- [goldmark-wikilink](https://github.com/abhinav/goldmark-wikilink): Adds support for `[[wiki]]`-style links to goldmark.
- [goldmark-anchor](https://github.com/abhinav/goldmark-anchor): Adds anchors (permalinks) next to all headers in a document.
- [goldmark-figure](https://github.com/mangoumbrella/goldmark-figure): Adds support for rendering paragraphs starting with an image to `<figure>` elements.
- [goldmark-frontmatter](https://github.com/abhinav/goldmark-frontmatter): Adds support for YAML, TOML, and custom front matter to documents.
- [goldmark-toc](https://github.com/abhinav/goldmark-toc): Adds support for generating tables-of-contents for goldmark documents.
- [goldmark-mermaid](https://github.com/abhinav/goldmark-mermaid): Adds support for rendering [Mermaid](https://mermaid-js.github.io/mermaid/) diagrams in goldmark documents.
- [goldmark-pikchr](https://github.com/jchenry/goldmark-pikchr): Adds support for rendering [Pikchr](https://pikchr.org/home/doc/trunk/homepage.md) diagrams in goldmark documents.
@@ -448,6 +451,7 @@ Extensions
- [goldmark-fences](https://github.com/stefanfritsch/goldmark-fences): Support for pandoc-style [fenced divs](https://pandoc.org/MANUAL.html#divs-and-spans) in goldmark.
- [goldmark-d2](https://github.com/FurqanSoftware/goldmark-d2): Adds support for [D2](https://d2lang.com/) diagrams.
- [goldmark-katex](https://github.com/FurqanSoftware/goldmark-katex): Adds support for [KaTeX](https://katex.org/) math and equations.
- [goldmark-img64](https://github.com/tenkoh/goldmark-img64): Adds support for embedding images into the document as DataURL (base64 encoded).
goldmark internal(for extension developers)

View File

@@ -39,7 +39,7 @@ func NewNodeKind(name string) NodeKind {
return kindMax
}
// An Attribute is an attribute of the Node
// An Attribute is an attribute of the Node.
type Attribute struct {
Name []byte
Value interface{}
@@ -248,7 +248,7 @@ func (n *BaseNode) RemoveChildren(self Node) {
n.childCount = 0
}
// SortChildren implements Node.SortChildren
// SortChildren implements Node.SortChildren.
func (n *BaseNode) SortChildren(comparator func(n1, n2 Node) int) {
var sorted Node
current := n.firstChild
@@ -358,7 +358,7 @@ func (n *BaseNode) InsertBefore(self, v1, insertee Node) {
}
}
// OwnerDocument implements Node.OwnerDocument
// OwnerDocument implements Node.OwnerDocument.
func (n *BaseNode) OwnerDocument() *Document {
d := n.Parent()
for {
@@ -399,7 +399,7 @@ func (n *BaseNode) SetAttribute(name []byte, value interface{}) {
n.attributes = append(n.attributes, Attribute{name, value})
}
// SetAttributeString implements Node.SetAttributeString
// SetAttributeString implements Node.SetAttributeString.
func (n *BaseNode) SetAttributeString(name string, value interface{}) {
n.SetAttribute(util.StringToReadOnlyBytes(name), value)
}
@@ -422,12 +422,12 @@ func (n *BaseNode) AttributeString(s string) (interface{}, bool) {
return n.Attribute(util.StringToReadOnlyBytes(s))
}
// Attributes implements Node.Attributes
// Attributes implements Node.Attributes.
func (n *BaseNode) Attributes() []Attribute {
return n.attributes
}
// RemoveAttributes implements Node.RemoveAttributes
// RemoveAttributes implements Node.RemoveAttributes.
func (n *BaseNode) RemoveAttributes() {
n.attributes = nil
}

View File

@@ -14,12 +14,12 @@ type BaseBlock struct {
lines *textm.Segments
}
// Type implements Node.Type
// Type implements Node.Type.
func (b *BaseBlock) Type() NodeType {
return TypeBlock
}
// IsRaw implements Node.IsRaw
// IsRaw implements Node.IsRaw.
func (b *BaseBlock) IsRaw() bool {
return false
}
@@ -34,7 +34,7 @@ func (b *BaseBlock) SetBlankPreviousLines(v bool) {
b.blankPreviousLines = v
}
// Lines implements Node.Lines
// Lines implements Node.Lines.
func (b *BaseBlock) Lines() *textm.Segments {
if b.lines == nil {
b.lines = textm.NewSegments()
@@ -42,7 +42,7 @@ func (b *BaseBlock) Lines() *textm.Segments {
return b.lines
}
// SetLines implements Node.SetLines
// SetLines implements Node.SetLines.
func (b *BaseBlock) SetLines(v *textm.Segments) {
b.lines = v
}
@@ -72,7 +72,7 @@ func (n *Document) Kind() NodeKind {
return KindDocument
}
// OwnerDocument implements Node.OwnerDocument
// OwnerDocument implements Node.OwnerDocument.
func (n *Document) OwnerDocument() *Document {
return n
}
@@ -431,19 +431,19 @@ func NewListItem(offset int) *ListItem {
type HTMLBlockType int
const (
// HTMLBlockType1 represents type 1 html blocks
// HTMLBlockType1 represents type 1 html blocks.
HTMLBlockType1 HTMLBlockType = iota + 1
// HTMLBlockType2 represents type 2 html blocks
// HTMLBlockType2 represents type 2 html blocks.
HTMLBlockType2
// HTMLBlockType3 represents type 3 html blocks
// HTMLBlockType3 represents type 3 html blocks.
HTMLBlockType3
// HTMLBlockType4 represents type 4 html blocks
// HTMLBlockType4 represents type 4 html blocks.
HTMLBlockType4
// HTMLBlockType5 represents type 5 html blocks
// HTMLBlockType5 represents type 5 html blocks.
HTMLBlockType5
// HTMLBlockType6 represents type 6 html blocks
// HTMLBlockType6 represents type 6 html blocks.
HTMLBlockType6
// HTMLBlockType7 represents type 7 html blocks
// HTMLBlockType7 represents type 7 html blocks.
HTMLBlockType7
)

View File

@@ -13,12 +13,12 @@ type BaseInline struct {
BaseNode
}
// Type implements Node.Type
// Type implements Node.Type.
func (b *BaseInline) Type() NodeType {
return TypeInline
}
// IsRaw implements Node.IsRaw
// IsRaw implements Node.IsRaw.
func (b *BaseInline) IsRaw() bool {
return false
}
@@ -33,12 +33,12 @@ func (b *BaseInline) SetBlankPreviousLines(v bool) {
panic("can not call with inline nodes.")
}
// Lines implements Node.Lines
// Lines implements Node.Lines.
func (b *BaseInline) Lines() *textm.Segments {
panic("can not call with inline nodes.")
}
// SetLines implements Node.SetLines
// SetLines implements Node.SetLines.
func (b *BaseInline) SetLines(v *textm.Segments) {
panic("can not call with inline nodes.")
}
@@ -132,7 +132,8 @@ func (n *Text) Merge(node Node, source []byte) bool {
if !ok {
return false
}
if n.Segment.Stop != t.Segment.Start || t.Segment.Padding != 0 || source[n.Segment.Stop-1] == '\n' || t.IsRaw() != n.IsRaw() {
if n.Segment.Stop != t.Segment.Start || t.Segment.Padding != 0 ||
source[n.Segment.Stop-1] == '\n' || t.IsRaw() != n.IsRaw() {
return false
}
n.Segment.Stop = t.Segment.Stop
@@ -214,7 +215,7 @@ func MergeOrReplaceTextSegment(parent Node, n Node, s textm.Segment) {
}
}
// A String struct is a textual content that has a concrete value
// A String struct is a textual content that has a concrete value.
type String struct {
BaseInline
@@ -305,7 +306,7 @@ func (n *CodeSpan) IsBlank(source []byte) bool {
return true
}
// Dump implements Node.Dump
// Dump implements Node.Dump.
func (n *CodeSpan) Dump(source []byte, level int) {
DumpHelper(n, source, level, nil, nil)
}
@@ -467,7 +468,7 @@ type AutoLink struct {
// Inline implements Inline.Inline.
func (n *AutoLink) Inline() {}
// Dump implements Node.Dump
// Dump implements Node.Dump.
func (n *AutoLink) Dump(source []byte, level int) {
segment := n.value.Segment
m := map[string]string{

View File

@@ -88,7 +88,7 @@ type Footnote struct {
func (n *Footnote) Dump(source []byte, level int) {
m := map[string]string{}
m["Index"] = fmt.Sprintf("%v", n.Index)
m["Ref"] = fmt.Sprintf("%s", n.Ref)
m["Ref"] = string(n.Ref)
gast.DumpHelper(n, source, level, m, nil)
}

View File

@@ -2,8 +2,9 @@ package ast
import (
"fmt"
gast "github.com/yuin/goldmark/ast"
"strings"
gast "github.com/yuin/goldmark/ast"
)
// Alignment is a text alignment of table cells.
@@ -45,7 +46,7 @@ type Table struct {
Alignments []Alignment
}
// Dump implements Node.Dump
// Dump implements Node.Dump.
func (n *Table) Dump(source []byte, level int) {
gast.DumpHelper(n, source, level, nil, func(level int) {
indent := strings.Repeat(" ", level)

View File

@@ -29,6 +29,7 @@ type cjk struct {
EscapedSpace bool
}
// CJK is a goldmark extension that provides functionalities for CJK languages.
var CJK = NewCJK(WithEastAsianLineBreaks(), WithEscapedSpace())
// NewCJK returns a new extension with given options.

View File

@@ -113,7 +113,8 @@ func (b *definitionDescriptionParser) Trigger() []byte {
return []byte{':'}
}
func (b *definitionDescriptionParser) Open(parent gast.Node, reader text.Reader, pc parser.Context) (gast.Node, parser.State) {
func (b *definitionDescriptionParser) Open(
parent gast.Node, reader text.Reader, pc parser.Context) (gast.Node, parser.State) {
line, _ := reader.PeekLine()
pos := pc.BlockOffset()
indent := pc.BlockIndent()
@@ -199,7 +200,8 @@ func (r *DefinitionListHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFunc
// DefinitionListAttributeFilter defines attribute names which dl elements can have.
var DefinitionListAttributeFilter = html.GlobalAttributeFilter
func (r *DefinitionListHTMLRenderer) renderDefinitionList(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *DefinitionListHTMLRenderer) renderDefinitionList(
w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
if n.Attributes() != nil {
_, _ = w.WriteString("<dl")
@@ -217,7 +219,8 @@ func (r *DefinitionListHTMLRenderer) renderDefinitionList(w util.BufWriter, sour
// DefinitionTermAttributeFilter defines attribute names which dd elements can have.
var DefinitionTermAttributeFilter = html.GlobalAttributeFilter
func (r *DefinitionListHTMLRenderer) renderDefinitionTerm(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *DefinitionListHTMLRenderer) renderDefinitionTerm(
w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
if n.Attributes() != nil {
_, _ = w.WriteString("<dt")
@@ -235,7 +238,8 @@ func (r *DefinitionListHTMLRenderer) renderDefinitionTerm(w util.BufWriter, sour
// DefinitionDescriptionAttributeFilter defines attribute names which dd elements can have.
var DefinitionDescriptionAttributeFilter = html.GlobalAttributeFilter
func (r *DefinitionListHTMLRenderer) renderDefinitionDescription(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *DefinitionListHTMLRenderer) renderDefinitionDescription(
w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
n := node.(*ast.DefinitionDescription)
_, _ = w.WriteString("<dd")

View File

@@ -44,8 +44,8 @@ func (b *footnoteBlockParser) Open(parent gast.Node, reader text.Reader, pc pars
return nil, parser.NoChildren
}
open := pos + 1
closes := 0
closure := util.FindClosure(line[pos+1:], '[', ']', false, false)
var closes int
closure := util.FindClosure(line[pos+1:], '[', ']', false, false) //nolint:staticcheck
closes = pos + 1 + closure
next := closes + 1
if closure > -1 {
@@ -136,7 +136,7 @@ func (s *footnoteParser) Parse(parent gast.Node, block text.Reader, pc parser.Co
return nil
}
open := pos
closure := util.FindClosure(line[pos:], '[', ']', false, false)
closure := util.FindClosure(line[pos:], '[', ']', false, false) //nolint:staticcheck
if closure < 0 {
return nil
}
@@ -156,7 +156,7 @@ func (s *footnoteParser) Parse(parent gast.Node, block text.Reader, pc parser.Co
d := def.(*ast.Footnote)
if bytes.Equal(d.Ref, value) {
if d.Index < 0 {
list.Count += 1
list.Count++
d.Index = list.Count
}
index = d.Index
@@ -272,9 +272,9 @@ func (a *footnoteASTTransformer) Transform(node *gast.Document, reader text.Read
// FootnoteConfig holds configuration values for the footnote extension.
//
// Link* and Backlink* configurations have some variables:
// Occurrances of “^^” in the string will be replaced by the
// Occurrences of “^^” in the string will be replaced by the
// corresponding footnote number in the HTML output.
// Occurrances of “%%” will be replaced by a number for the
// Occurrences of “%%” will be replaced by a number for the
// reference (footnotes can have multiple references).
type FootnoteConfig struct {
html.Config
@@ -525,7 +525,8 @@ func (r *FootnoteHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRegist
reg.Register(ast.KindFootnoteList, r.renderFootnoteList)
}
func (r *FootnoteHTMLRenderer) renderFootnoteLink(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *FootnoteHTMLRenderer) renderFootnoteLink(
w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
n := node.(*ast.FootnoteLink)
is := strconv.Itoa(n.Index)
@@ -556,7 +557,8 @@ func (r *FootnoteHTMLRenderer) renderFootnoteLink(w util.BufWriter, source []byt
return gast.WalkContinue, nil
}
func (r *FootnoteHTMLRenderer) renderFootnoteBacklink(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *FootnoteHTMLRenderer) renderFootnoteBacklink(
w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
n := node.(*ast.FootnoteBacklink)
is := strconv.Itoa(n.Index)
@@ -581,7 +583,8 @@ func (r *FootnoteHTMLRenderer) renderFootnoteBacklink(w util.BufWriter, source [
return gast.WalkContinue, nil
}
func (r *FootnoteHTMLRenderer) renderFootnote(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *FootnoteHTMLRenderer) renderFootnote(
w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
n := node.(*ast.Footnote)
is := strconv.Itoa(n.Index)
if entering {
@@ -600,7 +603,8 @@ func (r *FootnoteHTMLRenderer) renderFootnote(w util.BufWriter, source []byte, n
return gast.WalkContinue, nil
}
func (r *FootnoteHTMLRenderer) renderFootnoteList(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *FootnoteHTMLRenderer) renderFootnoteList(
w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
_, _ = w.WriteString(`<div class="footnotes" role="doc-endnotes"`)
if node.Attributes() != nil {

View File

@@ -11,9 +11,9 @@ import (
"github.com/yuin/goldmark/util"
)
var wwwURLRegxp = regexp.MustCompile(`^www\.[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-z]+(?:[/#?][-a-zA-Z0-9@:%_\+.~#!?&/=\(\);,'">\^{}\[\]` + "`" + `]*)?`)
var wwwURLRegxp = regexp.MustCompile(`^www\.[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-z]+(?:[/#?][-a-zA-Z0-9@:%_\+.~#!?&/=\(\);,'">\^{}\[\]` + "`" + `]*)?`) //nolint:golint,lll
var urlRegexp = regexp.MustCompile(`^(?:http|https|ftp)://[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-z]+(?::\d+)?(?:[/#?][-a-zA-Z0-9@:%_+.~#$!?&/=\(\);,'">\^{}\[\]` + "`" + `]*)?`)
var urlRegexp = regexp.MustCompile(`^(?:http|https|ftp)://[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-z]+(?::\d+)?(?:[/#?][-a-zA-Z0-9@:%_+.~#$!?&/=\(\);,'">\^{}\[\]` + "`" + `]*)?`) //nolint:golint,lll
// An LinkifyConfig struct is a data structure that holds configuration of the
// Linkify extension.
@@ -92,9 +92,6 @@ func WithLinkifyURLRegexp(value *regexp.Regexp) LinkifyOption {
}
}
// WithLinkifyWWWRegexp is a functional option that specify
// a pattern of the URL without a protocol.
// This pattern must start with 'www.' .
type withLinkifyWWWRegexp struct {
value *regexp.Regexp
}
@@ -107,14 +104,15 @@ func (o *withLinkifyWWWRegexp) SetLinkifyOption(p *LinkifyConfig) {
p.WWWRegexp = o.value
}
// WithLinkifyWWWRegexp is a functional option that specify
// a pattern of the URL without a protocol.
// This pattern must start with 'www.' .
func WithLinkifyWWWRegexp(value *regexp.Regexp) LinkifyOption {
return &withLinkifyWWWRegexp{
value: value,
}
}
// WithLinkifyWWWRegexp is a functional otpion that specify
// a pattern of the email address.
type withLinkifyEmailRegexp struct {
value *regexp.Regexp
}
@@ -127,6 +125,8 @@ func (o *withLinkifyEmailRegexp) SetLinkifyOption(p *LinkifyConfig) {
p.EmailRegexp = o.value
}
// WithLinkifyEmailRegexp is a functional otpion that specify
// a pattern of the email address.
func WithLinkifyEmailRegexp(value *regexp.Regexp) LinkifyOption {
return &withLinkifyEmailRegexp{
value: value,
@@ -303,6 +303,8 @@ type linkify struct {
// Linkify is an extension that allow you to parse text that seems like a URL.
var Linkify = &linkify{}
// NewLinkify creates a new [goldmark.Extender] that
// allow you to parse text that seems like a URL.
func NewLinkify(opts ...LinkifyOption) goldmark.Extender {
return &linkify{
options: opts,

2
vendor/github.com/yuin/goldmark/extension/package.go generated vendored Normal file
View File

@@ -0,0 +1,2 @@
// Package extension is a collection of builtin extensions.
package extension

View File

@@ -85,7 +85,8 @@ func (r *StrikethroughHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncR
// StrikethroughAttributeFilter defines attribute names which dd elements can have.
var StrikethroughAttributeFilter = html.GlobalAttributeFilter
func (r *StrikethroughHTMLRenderer) renderStrikethrough(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *StrikethroughHTMLRenderer) renderStrikethrough(
w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
if n.Attributes() != nil {
_, _ = w.WriteString("<del")

View File

@@ -23,7 +23,7 @@ type escapedPipeCell struct {
Transformed bool
}
// TableCellAlignMethod indicates how are table cells aligned in HTML format.indicates how are table cells aligned in HTML format.
// TableCellAlignMethod indicates how are table cells aligned in HTML format.
type TableCellAlignMethod int
const (
@@ -181,7 +181,8 @@ func (b *tableParagraphTransformer) Transform(node *gast.Paragraph, reader text.
}
}
func (b *tableParagraphTransformer) parseRow(segment text.Segment, alignments []ast.Alignment, isHeader bool, reader text.Reader, pc parser.Context) *ast.TableRow {
func (b *tableParagraphTransformer) parseRow(segment text.Segment,
alignments []ast.Alignment, isHeader bool, reader text.Reader, pc parser.Context) *ast.TableRow {
source := reader.Source()
line := segment.Value(source)
pos := 0
@@ -369,7 +370,8 @@ var TableAttributeFilter = html.GlobalAttributeFilter.Extend(
[]byte("width"), // [Deprecated]
)
func (r *TableHTMLRenderer) renderTable(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *TableHTMLRenderer) renderTable(
w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
_, _ = w.WriteString("<table")
if n.Attributes() != nil {
@@ -391,7 +393,8 @@ var TableHeaderAttributeFilter = html.GlobalAttributeFilter.Extend(
[]byte("valign"), // [Deprecated since HTML4] [Obsolete since HTML5]
)
func (r *TableHTMLRenderer) renderTableHeader(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *TableHTMLRenderer) renderTableHeader(
w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
_, _ = w.WriteString("<thead")
if n.Attributes() != nil {
@@ -418,7 +421,8 @@ var TableRowAttributeFilter = html.GlobalAttributeFilter.Extend(
[]byte("valign"), // [Obsolete since HTML5]
)
func (r *TableHTMLRenderer) renderTableRow(w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *TableHTMLRenderer) renderTableRow(
w util.BufWriter, source []byte, n gast.Node, entering bool) (gast.WalkStatus, error) {
if entering {
_, _ = w.WriteString("<tr")
if n.Attributes() != nil {
@@ -445,12 +449,14 @@ var TableThCellAttributeFilter = html.GlobalAttributeFilter.Extend(
[]byte("charoff"), // [Obsolete since HTML5]
[]byte("colspan"), // [OK] Number of columns that the cell is to span
[]byte("headers"), // [OK] This attribute contains a list of space-separated strings, each corresponding to the id attribute of the <th> elements that apply to this element
[]byte("headers"), // [OK] This attribute contains a list of space-separated
// strings, each corresponding to the id attribute of the <th> elements that apply to this element
[]byte("height"), // [Deprecated since HTML4] [Obsolete since HTML5]
[]byte("rowspan"), // [OK] Number of rows that the cell is to span
[]byte("scope"), // [OK] This enumerated attribute defines the cells that the header (defined in the <th>) element relates to [NOT OK in <td>]
[]byte("scope"), // [OK] This enumerated attribute defines the cells that
// the header (defined in the <th>) element relates to [NOT OK in <td>]
[]byte("valign"), // [Obsolete since HTML5]
[]byte("width"), // [Deprecated since HTML4] [Obsolete since HTML5]
@@ -466,7 +472,8 @@ var TableTdCellAttributeFilter = html.GlobalAttributeFilter.Extend(
[]byte("charoff"), // [Obsolete since HTML5]
[]byte("colspan"), // [OK] Number of columns that the cell is to span
[]byte("headers"), // [OK] This attribute contains a list of space-separated strings, each corresponding to the id attribute of the <th> elements that apply to this element
[]byte("headers"), // [OK] This attribute contains a list of space-separated
// strings, each corresponding to the id attribute of the <th> elements that apply to this element
[]byte("height"), // [Deprecated since HTML4] [Obsolete since HTML5]
@@ -477,7 +484,8 @@ var TableTdCellAttributeFilter = html.GlobalAttributeFilter.Extend(
[]byte("width"), // [Deprecated since HTML4] [Obsolete since HTML5]
)
func (r *TableHTMLRenderer) renderTableCell(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *TableHTMLRenderer) renderTableCell(
w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
n := node.(*ast.TableCell)
tag := "td"
if n.Parent().Kind() == ast.KindTableHeader {

View File

@@ -1,6 +1,8 @@
package extension
import (
"regexp"
"github.com/yuin/goldmark"
gast "github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/extension/ast"
@@ -9,7 +11,6 @@ import (
"github.com/yuin/goldmark/renderer/html"
"github.com/yuin/goldmark/text"
"github.com/yuin/goldmark/util"
"regexp"
)
var taskListRegexp = regexp.MustCompile(`^\[([\sxX])\]\s*`)
@@ -80,21 +81,22 @@ func (r *TaskCheckBoxHTMLRenderer) RegisterFuncs(reg renderer.NodeRendererFuncRe
reg.Register(ast.KindTaskCheckBox, r.renderTaskCheckBox)
}
func (r *TaskCheckBoxHTMLRenderer) renderTaskCheckBox(w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
func (r *TaskCheckBoxHTMLRenderer) renderTaskCheckBox(
w util.BufWriter, source []byte, node gast.Node, entering bool) (gast.WalkStatus, error) {
if !entering {
return gast.WalkContinue, nil
}
n := node.(*ast.TaskCheckBox)
if n.IsChecked {
w.WriteString(`<input checked="" disabled="" type="checkbox"`)
_, _ = w.WriteString(`<input checked="" disabled="" type="checkbox"`)
} else {
w.WriteString(`<input disabled="" type="checkbox"`)
_, _ = w.WriteString(`<input disabled="" type="checkbox"`)
}
if r.XHTML {
w.WriteString(" /> ")
_, _ = w.WriteString(" /> ")
} else {
w.WriteString("> ")
_, _ = w.WriteString("> ")
}
return gast.WalkContinue, nil
}

View File

@@ -36,25 +36,25 @@ func getUnclosedCounter(pc parser.Context) *unclosedCounter {
type TypographicPunctuation int
const (
// LeftSingleQuote is '
// LeftSingleQuote is ' .
LeftSingleQuote TypographicPunctuation = iota + 1
// RightSingleQuote is '
// RightSingleQuote is ' .
RightSingleQuote
// LeftDoubleQuote is "
// LeftDoubleQuote is " .
LeftDoubleQuote
// RightDoubleQuote is "
// RightDoubleQuote is " .
RightDoubleQuote
// EnDash is --
// EnDash is -- .
EnDash
// EmDash is ---
// EmDash is --- .
EmDash
// Ellipsis is ...
// Ellipsis is ... .
Ellipsis
// LeftAngleQuote is <<
// LeftAngleQuote is << .
LeftAngleQuote
// RightAngleQuote is >>
// RightAngleQuote is >> .
RightAngleQuote
// Apostrophe is '
// Apostrophe is ' .
Apostrophe
typographicPunctuationMax
@@ -218,7 +218,8 @@ func (s *typographerParser) Parse(parent gast.Node, block text.Reader, pc parser
if c == '\'' {
if s.Substitutions[Apostrophe] != nil {
// Handle decade abbrevations such as '90s
if d.CanOpen && !d.CanClose && len(line) > 3 && util.IsNumeric(line[1]) && util.IsNumeric(line[2]) && line[3] == 's' {
if d.CanOpen && !d.CanClose && len(line) > 3 &&
util.IsNumeric(line[1]) && util.IsNumeric(line[2]) && line[3] == 's' {
after := rune(' ')
if len(line) > 4 {
after = util.ToRune(line, 4)
@@ -231,7 +232,8 @@ func (s *typographerParser) Parse(parent gast.Node, block text.Reader, pc parser
}
}
// special cases: 'twas, 'em, 'net
if len(line) > 1 && (unicode.IsPunct(before) || unicode.IsSpace(before)) && (line[1] == 't' || line[1] == 'e' || line[1] == 'n' || line[1] == 'l') {
if len(line) > 1 && (unicode.IsPunct(before) || unicode.IsSpace(before)) &&
(line[1] == 't' || line[1] == 'e' || line[1] == 'n' || line[1] == 'l') {
node := gast.NewString(s.Substitutions[Apostrophe])
node.SetCode(true)
block.Advance(1)
@@ -239,7 +241,8 @@ func (s *typographerParser) Parse(parent gast.Node, block text.Reader, pc parser
}
// Convert normal apostrophes. This is probably more flexible than necessary but
// converts any apostrophe in between two alphanumerics.
if len(line) > 1 && (unicode.IsDigit(before) || unicode.IsLetter(before)) && (unicode.IsLetter(util.ToRune(line, 1))) {
if len(line) > 1 && (unicode.IsDigit(before) || unicode.IsLetter(before)) &&
(unicode.IsLetter(util.ToRune(line, 1))) {
node := gast.NewString(s.Substitutions[Apostrophe])
node.SetCode(true)
block.Advance(1)
@@ -249,11 +252,14 @@ func (s *typographerParser) Parse(parent gast.Node, block text.Reader, pc parser
if s.Substitutions[LeftSingleQuote] != nil && d.CanOpen && !d.CanClose {
nt := LeftSingleQuote
// special cases: Alice's, I'm, Don't, You'd
if len(line) > 1 && (line[1] == 's' || line[1] == 'm' || line[1] == 't' || line[1] == 'd') && (len(line) < 3 || util.IsPunct(line[2]) || util.IsSpace(line[2])) {
if len(line) > 1 && (line[1] == 's' || line[1] == 'm' || line[1] == 't' || line[1] == 'd') &&
(len(line) < 3 || util.IsPunct(line[2]) || util.IsSpace(line[2])) {
nt = RightSingleQuote
}
// special cases: I've, I'll, You're
if len(line) > 2 && ((line[1] == 'v' && line[2] == 'e') || (line[1] == 'l' && line[2] == 'l') || (line[1] == 'r' && line[2] == 'e')) && (len(line) < 4 || util.IsPunct(line[3]) || util.IsSpace(line[3])) {
if len(line) > 2 && ((line[1] == 'v' && line[2] == 'e') ||
(line[1] == 'l' && line[2] == 'l') || (line[1] == 'r' && line[2] == 'e')) &&
(len(line) < 4 || util.IsPunct(line[3]) || util.IsSpace(line[3])) {
nt = RightSingleQuote
}
if nt == LeftSingleQuote {
@@ -266,8 +272,9 @@ func (s *typographerParser) Parse(parent gast.Node, block text.Reader, pc parser
return node
}
if s.Substitutions[RightSingleQuote] != nil {
// plural possesives and abbreviations: Smiths', doin'
if len(line) > 1 && unicode.IsSpace(util.ToRune(line, 0)) || unicode.IsPunct(util.ToRune(line, 0)) && (len(line) > 2 && !unicode.IsDigit(util.ToRune(line, 1))) {
// plural possesive and abbreviations: Smiths', doin'
if len(line) > 1 && unicode.IsSpace(util.ToRune(line, 0)) || unicode.IsPunct(util.ToRune(line, 0)) &&
(len(line) > 2 && !unicode.IsDigit(util.ToRune(line, 1))) {
node := gast.NewString(s.Substitutions[RightSingleQuote])
node.SetCode(true)
block.Advance(1)
@@ -276,7 +283,8 @@ func (s *typographerParser) Parse(parent gast.Node, block text.Reader, pc parser
}
if s.Substitutions[RightSingleQuote] != nil && counter.Single > 0 {
isClose := d.CanClose && !d.CanOpen
maybeClose := d.CanClose && d.CanOpen && len(line) > 1 && unicode.IsPunct(util.ToRune(line, 1)) && (len(line) == 2 || (len(line) > 2 && util.IsPunct(line[2]) || util.IsSpace(line[2])))
maybeClose := d.CanClose && d.CanOpen && len(line) > 1 && unicode.IsPunct(util.ToRune(line, 1)) &&
(len(line) == 2 || (len(line) > 2 && util.IsPunct(line[2]) || util.IsSpace(line[2])))
if isClose || maybeClose {
node := gast.NewString(s.Substitutions[RightSingleQuote])
node.SetCode(true)
@@ -296,7 +304,8 @@ func (s *typographerParser) Parse(parent gast.Node, block text.Reader, pc parser
}
if s.Substitutions[RightDoubleQuote] != nil && counter.Double > 0 {
isClose := d.CanClose && !d.CanOpen
maybeClose := d.CanClose && d.CanOpen && len(line) > 1 && (unicode.IsPunct(util.ToRune(line, 1))) && (len(line) == 2 || (len(line) > 2 && util.IsPunct(line[2]) || util.IsSpace(line[2])))
maybeClose := d.CanClose && d.CanOpen && len(line) > 1 && (unicode.IsPunct(util.ToRune(line, 1))) &&
(len(line) == 2 || (len(line) > 2 && util.IsPunct(line[2]) || util.IsSpace(line[2])))
if isClose || maybeClose {
// special case: "Monitor 21""
if len(line) > 1 && line[1] == '"' && unicode.IsDigit(before) {

View File

@@ -12,7 +12,7 @@ import (
var attrNameID = []byte("id")
var attrNameClass = []byte("class")
// An Attribute is an attribute of the markdown elements
// An Attribute is an attribute of the markdown elements.
type Attribute struct {
Name []byte
Value interface{}
@@ -93,7 +93,8 @@ func parseAttribute(reader text.Reader) (Attribute, bool) {
// CommonMark is basically defined for XHTML(even though it is legacy).
// So we restrict id characters.
for ; i < len(line) && !util.IsSpace(line[i]) &&
(!util.IsPunct(line[i]) || line[i] == '_' || line[i] == '-' || line[i] == ':' || line[i] == '.'); i++ {
(!util.IsPunct(line[i]) || line[i] == '_' ||
line[i] == '-' || line[i] == ':' || line[i] == '.'); i++ {
}
name := attrNameClass
if c == '#' {
@@ -145,7 +146,7 @@ func parseAttributeValue(reader text.Reader) (interface{}, bool) {
reader.SkipSpaces()
c := reader.Peek()
var value interface{}
ok := false
var ok bool
switch c {
case text.EOF:
return Attribute{}, false
@@ -244,7 +245,7 @@ func scanAttributeDecimal(reader text.Reader, w io.ByteWriter) {
for {
c := reader.Peek()
if util.IsNumeric(c) {
w.WriteByte(c)
_ = w.WriteByte(c)
} else {
return
}
@@ -286,7 +287,7 @@ func parseAttributeNumber(reader text.Reader) (float64, bool) {
}
scanAttributeDecimal(reader, &buf)
}
f, err := strconv.ParseFloat(buf.String(), 10)
f, err := strconv.ParseFloat(buf.String(), 64)
if err != nil {
return 0, false
}

View File

@@ -13,7 +13,7 @@ type HeadingConfig struct {
}
// SetOption implements SetOptioner.
func (b *HeadingConfig) SetOption(name OptionName, value interface{}) {
func (b *HeadingConfig) SetOption(name OptionName, _ interface{}) {
switch name {
case optAutoHeadingID:
b.AutoHeadingID = true
@@ -135,7 +135,9 @@ func (b *atxHeadingParser) Open(parent ast.Node, reader text.Reader, pc Context)
for _, attr := range attrs {
node.SetAttribute(attr.Name, attr.Value)
}
node.Lines().Append(text.NewSegment(segment.Start+start+1-segment.Padding, segment.Start+closureOpen-segment.Padding))
node.Lines().Append(text.NewSegment(
segment.Start+start+1-segment.Padding,
segment.Start+closureOpen-segment.Padding))
}
}
}

View File

@@ -66,12 +66,12 @@ func (d *Delimiter) Dump(source []byte, level int) {
var kindDelimiter = ast.NewNodeKind("Delimiter")
// Kind implements Node.Kind
// Kind implements Node.Kind.
func (d *Delimiter) Kind() ast.NodeKind {
return kindDelimiter
}
// Text implements Node.Text
// Text implements Node.Text.
func (d *Delimiter) Text(source []byte) []byte {
return d.Segment.Value(source)
}
@@ -126,7 +126,7 @@ func ScanDelimiter(line []byte, before rune, min int, processor DelimiterProcess
after = util.ToRune(line, j)
}
canOpen, canClose := false, false
var canOpen, canClose bool
beforeIsPunctuation := util.IsPunctRune(before)
beforeIsWhitespace := util.IsSpaceRune(before)
afterIsPunctuation := util.IsPunctRune(after)

View File

@@ -76,7 +76,7 @@ var allowedBlockTags = map[string]bool{
"ul": true,
}
var htmlBlockType1OpenRegexp = regexp.MustCompile(`(?i)^[ ]{0,3}<(script|pre|style|textarea)(?:\s.*|>.*|/>.*|)(?:\r\n|\n)?$`)
var htmlBlockType1OpenRegexp = regexp.MustCompile(`(?i)^[ ]{0,3}<(script|pre|style|textarea)(?:\s.*|>.*|/>.*|)(?:\r\n|\n)?$`) //nolint:golint,lll
var htmlBlockType1CloseRegexp = regexp.MustCompile(`(?i)^.*</(?:script|pre|style|textarea)>.*`)
var htmlBlockType2OpenRegexp = regexp.MustCompile(`^[ ]{0,3}<!\-\-`)
@@ -91,9 +91,9 @@ var htmlBlockType4Close = []byte{'>'}
var htmlBlockType5OpenRegexp = regexp.MustCompile(`^[ ]{0,3}<\!\[CDATA\[`)
var htmlBlockType5Close = []byte{']', ']', '>'}
var htmlBlockType6Regexp = regexp.MustCompile(`^[ ]{0,3}<(?:/[ ]*)?([a-zA-Z]+[a-zA-Z0-9\-]*)(?:[ ].*|>.*|/>.*|)(?:\r\n|\n)?$`)
var htmlBlockType6Regexp = regexp.MustCompile(`^[ ]{0,3}<(?:/[ ]*)?([a-zA-Z]+[a-zA-Z0-9\-]*)(?:[ ].*|>.*|/>.*|)(?:\r\n|\n)?$`) //nolint:golint,lll
var htmlBlockType7Regexp = regexp.MustCompile(`^[ ]{0,3}<(/[ ]*)?([a-zA-Z]+[a-zA-Z0-9\-]*)(` + attributePattern + `*)[ ]*(?:>|/>)[ ]*(?:\r\n|\n)?$`)
var htmlBlockType7Regexp = regexp.MustCompile(`^[ ]{0,3}<(/[ ]*)?([a-zA-Z]+[a-zA-Z0-9\-]*)(` + attributePattern + `*)[ ]*(?:>|/>)[ ]*(?:\r\n|\n)?$`) //nolint:golint,lll
type htmlBlockParser struct {
}
@@ -135,7 +135,8 @@ func (b *htmlBlockParser) Open(parent ast.Node, reader text.Reader, pc Context)
_, ok := allowedBlockTags[tagName]
if ok {
node = ast.NewHTMLBlock(ast.HTMLBlockType6)
} else if tagName != "script" && tagName != "style" && tagName != "pre" && !ast.IsParagraph(last) && !(isCloseTag && hasAttr) { // type 7 can not interrupt paragraph
} else if tagName != "script" && tagName != "style" &&
tagName != "pre" && !ast.IsParagraph(last) && !(isCloseTag && hasAttr) { // type 7 can not interrupt paragraph
node = ast.NewHTMLBlock(ast.HTMLBlockType7)
}
}

View File

@@ -250,7 +250,8 @@ var linkFindClosureOptions text.FindClosureOptions = text.FindClosureOptions{
Advance: true,
}
func (s *linkParser) parseReferenceLink(parent ast.Node, last *linkLabelState, block text.Reader, pc Context) (*ast.Link, bool) {
func (s *linkParser) parseReferenceLink(parent ast.Node, last *linkLabelState,
block text.Reader, pc Context) (*ast.Link, bool) {
_, orgpos := block.Position()
block.Advance(1) // skip '['
segments, found := block.FindClosure('[', ']', linkFindClosureOptions)

View File

@@ -22,7 +22,7 @@ var listItemFlagValue interface{} = true
// Same as
// `^(([ ]*)([\-\*\+]))(\s+.*)?\n?$`.FindSubmatchIndex or
// `^(([ ]*)(\d{1,9}[\.\)]))(\s+.*)?\n?$`.FindSubmatchIndex
// `^(([ ]*)(\d{1,9}[\.\)]))(\s+.*)?\n?$`.FindSubmatchIndex.
func parseListItem(line []byte) ([6]int, listItemType) {
i := 0
l := len(line)
@@ -89,7 +89,7 @@ func matchesListItem(source []byte, strict bool) ([6]int, listItemType) {
}
func calcListOffset(source []byte, match [6]int) int {
offset := 0
var offset int
if match[4] < 0 || util.IsBlank(source[match[4]:]) { // list item starts with a blank line
offset = 1
} else {
@@ -250,14 +250,14 @@ func (b *listParser) Close(node ast.Node, reader text.Reader, pc Context) {
for c := node.FirstChild(); c != nil && list.IsTight; c = c.NextSibling() {
if c.FirstChild() != nil && c.FirstChild() != c.LastChild() {
for c1 := c.FirstChild().NextSibling(); c1 != nil; c1 = c1.NextSibling() {
if bl, ok := c1.(ast.Node); ok && bl.HasBlankPreviousLines() {
if c1.HasBlankPreviousLines() {
list.IsTight = false
break
}
}
}
if c != node.FirstChild() {
if bl, ok := c.(ast.Node); ok && bl.HasBlankPreviousLines() {
if c.HasBlankPreviousLines() {
list.IsTight = false
}
}

View File

@@ -403,7 +403,8 @@ func (p *parseContext) IsInLinkLabel() bool {
type State int
const (
none State = 1 << iota
// None is a default value of the [State].
None State = 1 << iota
// Continue indicates parser can continue parsing.
Continue
@@ -1049,7 +1050,7 @@ func isBlankLine(lineNum, level int, stats []lineStat) bool {
func (p *parser) parseBlocks(parent ast.Node, reader text.Reader, pc Context) {
pc.SetOpenedBlocks([]Block{})
blankLines := make([]lineStat, 0, 128)
isBlank := false
var isBlank bool
for { // process blocks separated by blank lines
_, lines, ok := reader.SkipBlankLines()
if !ok {
@@ -1152,18 +1153,23 @@ func (p *parser) parseBlock(block text.BlockReader, parent ast.Node, pc Context)
break
}
lineLength := len(line)
var lineBreakFlags uint8 = 0
var lineBreakFlags uint8
hasNewLine := line[lineLength-1] == '\n'
if ((lineLength >= 3 && line[lineLength-2] == '\\' && line[lineLength-3] != '\\') || (lineLength == 2 && line[lineLength-2] == '\\')) && hasNewLine { // ends with \\n
if ((lineLength >= 3 && line[lineLength-2] == '\\' &&
line[lineLength-3] != '\\') || (lineLength == 2 && line[lineLength-2] == '\\')) && hasNewLine { // ends with \\n
lineLength -= 2
lineBreakFlags |= lineBreakHard | lineBreakVisible
} else if ((lineLength >= 4 && line[lineLength-3] == '\\' && line[lineLength-2] == '\r' && line[lineLength-4] != '\\') || (lineLength == 3 && line[lineLength-3] == '\\' && line[lineLength-2] == '\r')) && hasNewLine { // ends with \\r\n
} else if ((lineLength >= 4 && line[lineLength-3] == '\\' && line[lineLength-2] == '\r' &&
line[lineLength-4] != '\\') || (lineLength == 3 && line[lineLength-3] == '\\' && line[lineLength-2] == '\r')) &&
hasNewLine { // ends with \\r\n
lineLength -= 3
lineBreakFlags |= lineBreakHard | lineBreakVisible
} else if lineLength >= 3 && line[lineLength-3] == ' ' && line[lineLength-2] == ' ' && hasNewLine { // ends with [space][space]\n
} else if lineLength >= 3 && line[lineLength-3] == ' ' && line[lineLength-2] == ' ' &&
hasNewLine { // ends with [space][space]\n
lineLength -= 3
lineBreakFlags |= lineBreakHard
} else if lineLength >= 4 && line[lineLength-4] == ' ' && line[lineLength-3] == ' ' && line[lineLength-2] == '\r' && hasNewLine { // ends with [space][space]\r\n
} else if lineLength >= 4 && line[lineLength-4] == ' ' && line[lineLength-3] == ' ' &&
line[lineLength-2] == '\r' && hasNewLine { // ends with [space][space]\r\n
lineLength -= 4
lineBreakFlags |= lineBreakHard
} else if hasNewLine {

View File

@@ -15,7 +15,7 @@ type rawHTMLParser struct {
var defaultRawHTMLParser = &rawHTMLParser{}
// NewRawHTMLParser return a new InlineParser that can parse
// inline htmls
// inline htmls.
func NewRawHTMLParser() InlineParser {
return defaultRawHTMLParser
}
@@ -48,10 +48,10 @@ func (s *rawHTMLParser) Parse(parent ast.Node, block text.Reader, pc Context) as
}
var tagnamePattern = `([A-Za-z][A-Za-z0-9-]*)`
var attributePattern = `(?:[\r\n \t]+[a-zA-Z_:][a-zA-Z0-9:._-]*(?:[\r\n \t]*=[\r\n \t]*(?:[^\"'=<>` + "`" + `\x00-\x20]+|'[^']*'|"[^"]*"))?)`
var openTagRegexp = regexp.MustCompile("^<" + tagnamePattern + attributePattern + `*[ \t]*/?>`)
var closeTagRegexp = regexp.MustCompile("^</" + tagnamePattern + `\s*>`)
var spaceOrOneNewline = `(?:[ \t]|(?:\r\n|\n){0,1})`
var attributePattern = `(?:[\r\n \t]+[a-zA-Z_:][a-zA-Z0-9:._-]*(?:[\r\n \t]*=[\r\n \t]*(?:[^\"'=<>` + "`" + `\x00-\x20]+|'[^']*'|"[^"]*"))?)` //nolint:golint,lll
var openTagRegexp = regexp.MustCompile("^<" + tagnamePattern + attributePattern + `*` + spaceOrOneNewline + `*/?>`)
var closeTagRegexp = regexp.MustCompile("^</" + tagnamePattern + spaceOrOneNewline + `*>`)
var openProcessingInstruction = []byte("<?")
var closeProcessingInstruction = []byte("?>")
@@ -153,9 +153,8 @@ func (s *rawHTMLParser) parseMultiLineRegexp(reg *regexp.Regexp, block text.Read
if l == eline {
block.Advance(end - start)
break
} else {
block.AdvanceLine()
}
block.AdvanceLine()
}
return node
}

View File

@@ -91,7 +91,7 @@ func (b *setextHeadingParser) Close(node ast.Node, reader text.Reader, pc Contex
para.Lines().Append(segment)
heading.Parent().InsertAfter(heading.Parent(), heading, para)
} else {
next.(ast.Node).Lines().Unshift(segment)
next.Lines().Unshift(segment)
}
heading.Parent().RemoveChild(heading.Parent(), heading)
} else {

View File

@@ -1,3 +1,4 @@
// Package html implements renderer that outputs HTMLs.
package html
import (
@@ -253,15 +254,17 @@ var GlobalAttributeFilter = util.NewBytesFilter(
[]byte("translate"),
)
func (r *Renderer) renderDocument(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
func (r *Renderer) renderDocument(
w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
// nothing to do
return ast.WalkContinue, nil
}
// HeadingAttributeFilter defines attribute names which heading elements can have
// HeadingAttributeFilter defines attribute names which heading elements can have.
var HeadingAttributeFilter = GlobalAttributeFilter
func (r *Renderer) renderHeading(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
func (r *Renderer) renderHeading(
w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
n := node.(*ast.Heading)
if entering {
_, _ = w.WriteString("<h")
@@ -278,12 +281,13 @@ func (r *Renderer) renderHeading(w util.BufWriter, source []byte, node ast.Node,
return ast.WalkContinue, nil
}
// BlockquoteAttributeFilter defines attribute names which blockquote elements can have
// BlockquoteAttributeFilter defines attribute names which blockquote elements can have.
var BlockquoteAttributeFilter = GlobalAttributeFilter.Extend(
[]byte("cite"),
)
func (r *Renderer) renderBlockquote(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
func (r *Renderer) renderBlockquote(
w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
if entering {
if n.Attributes() != nil {
_, _ = w.WriteString("<blockquote")
@@ -308,7 +312,8 @@ func (r *Renderer) renderCodeBlock(w util.BufWriter, source []byte, n ast.Node,
return ast.WalkContinue, nil
}
func (r *Renderer) renderFencedCodeBlock(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
func (r *Renderer) renderFencedCodeBlock(
w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
n := node.(*ast.FencedCodeBlock)
if entering {
_, _ = w.WriteString("<pre><code")
@@ -326,7 +331,8 @@ func (r *Renderer) renderFencedCodeBlock(w util.BufWriter, source []byte, node a
return ast.WalkContinue, nil
}
func (r *Renderer) renderHTMLBlock(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
func (r *Renderer) renderHTMLBlock(
w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
n := node.(*ast.HTMLBlock)
if entering {
if r.Unsafe {
@@ -428,7 +434,7 @@ func (r *Renderer) renderParagraph(w util.BufWriter, source []byte, n ast.Node,
func (r *Renderer) renderTextBlock(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
if !entering {
if _, ok := n.NextSibling().(ast.Node); ok && n.FirstChild() != nil {
if n.NextSibling() != nil && n.FirstChild() != nil {
_ = w.WriteByte('\n')
}
}
@@ -444,7 +450,8 @@ var ThematicAttributeFilter = GlobalAttributeFilter.Extend(
[]byte("width"), // [Deprecated]
)
func (r *Renderer) renderThematicBreak(w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
func (r *Renderer) renderThematicBreak(
w util.BufWriter, source []byte, n ast.Node, entering bool) (ast.WalkStatus, error) {
if !entering {
return ast.WalkContinue, nil
}
@@ -473,7 +480,8 @@ var LinkAttributeFilter = GlobalAttributeFilter.Extend(
[]byte("target"),
)
func (r *Renderer) renderAutoLink(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
func (r *Renderer) renderAutoLink(
w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
n := node.(*ast.AutoLink)
if !entering {
return ast.WalkContinue, nil
@@ -528,7 +536,8 @@ func (r *Renderer) renderCodeSpan(w util.BufWriter, source []byte, n ast.Node, e
// EmphasisAttributeFilter defines attribute names which emphasis elements can have.
var EmphasisAttributeFilter = GlobalAttributeFilter
func (r *Renderer) renderEmphasis(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
func (r *Renderer) renderEmphasis(
w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
n := node.(*ast.Emphasis)
tag := "em"
if n.Level == 2 {
@@ -618,7 +627,8 @@ func (r *Renderer) renderImage(w util.BufWriter, source []byte, node ast.Node, e
return ast.WalkSkipChildren, nil
}
func (r *Renderer) renderRawHTML(w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
func (r *Renderer) renderRawHTML(
w util.BufWriter, source []byte, node ast.Node, entering bool) (ast.WalkStatus, error) {
if !entering {
return ast.WalkSkipChildren, nil
}
@@ -901,20 +911,24 @@ var bVb = []byte("vbscript:")
var bFile = []byte("file:")
var bData = []byte("data:")
func hasPrefix(s, prefix []byte) bool {
return len(s) >= len(prefix) && bytes.Equal(bytes.ToLower(s[0:len(prefix)]), bytes.ToLower(prefix))
}
// IsDangerousURL returns true if the given url seems a potentially dangerous url,
// otherwise false.
func IsDangerousURL(url []byte) bool {
if bytes.HasPrefix(url, bDataImage) && len(url) >= 11 {
if hasPrefix(url, bDataImage) && len(url) >= 11 {
v := url[11:]
if bytes.HasPrefix(v, bPng) || bytes.HasPrefix(v, bGif) ||
bytes.HasPrefix(v, bJpeg) || bytes.HasPrefix(v, bWebp) ||
bytes.HasPrefix(v, bSvg) {
if hasPrefix(v, bPng) || hasPrefix(v, bGif) ||
hasPrefix(v, bJpeg) || hasPrefix(v, bWebp) ||
hasPrefix(v, bSvg) {
return false
}
return true
}
return bytes.HasPrefix(url, bJs) || bytes.HasPrefix(url, bVb) ||
bytes.HasPrefix(url, bFile) || bytes.HasPrefix(url, bData)
return hasPrefix(url, bJs) || hasPrefix(url, bVb) ||
hasPrefix(url, bFile) || hasPrefix(url, bData)
}
func nodeToHTMLText(n ast.Node, source []byte) []byte {

View File

@@ -16,7 +16,7 @@ type Config struct {
NodeRenderers util.PrioritizedSlice
}
// NewConfig returns a new Config
// NewConfig returns a new Config.
func NewConfig() *Config {
return &Config{
Options: map[OptionName]interface{}{},
@@ -78,7 +78,7 @@ type NodeRenderer interface {
RegisterFuncs(NodeRendererFuncRegisterer)
}
// A NodeRendererFuncRegisterer registers
// A NodeRendererFuncRegisterer registers given NodeRendererFunc to this object.
type NodeRendererFuncRegisterer interface {
// Register registers given NodeRendererFunc to this object.
Register(ast.NodeKind, NodeRendererFunc)

2
vendor/github.com/yuin/goldmark/text/package.go generated vendored Normal file
View File

@@ -0,0 +1,2 @@
// Package text provides functionalities to manipulate texts.
package text

View File

@@ -1,6 +1,7 @@
package text
import (
"bytes"
"io"
"regexp"
"unicode/utf8"
@@ -75,7 +76,7 @@ type Reader interface {
FindClosure(opener, closer byte, options FindClosureOptions) (*Segments, bool)
}
// FindClosureOptions is options for Reader.FindClosure
// FindClosureOptions is options for Reader.FindClosure.
type FindClosureOptions struct {
// CodeSpan is a flag for the FindClosure. If this is set to true,
// FindClosure ignores closers in codespans.
@@ -153,7 +154,7 @@ func (r *reader) PeekLine() ([]byte, Segment) {
return nil, r.pos
}
// io.RuneReader interface
// io.RuneReader interface.
func (r *reader) ReadRune() (rune, int, error) {
return readRuneReader(r)
}
@@ -353,7 +354,7 @@ func (r *blockReader) Value(seg Segment) []byte {
return ret
}
// io.RuneReader interface
// io.RuneReader interface.
func (r *blockReader) ReadRune() (rune, int, error) {
return readRuneReader(r)
}
@@ -537,24 +538,30 @@ func matchReader(r Reader, reg *regexp.Regexp) bool {
}
func findSubMatchReader(r Reader, reg *regexp.Regexp) [][]byte {
oldline, oldseg := r.Position()
oldLine, oldSeg := r.Position()
match := reg.FindReaderSubmatchIndex(r)
r.SetPosition(oldline, oldseg)
r.SetPosition(oldLine, oldSeg)
if match == nil {
return nil
}
runes := make([]rune, 0, match[1]-match[0])
var bb bytes.Buffer
bb.Grow(match[1] - match[0])
for i := 0; i < match[1]; {
r, size, _ := readRuneReader(r)
i += size
runes = append(runes, r)
bb.WriteRune(r)
}
result := [][]byte{}
bs := bb.Bytes()
var result [][]byte
for i := 0; i < len(match); i += 2 {
result = append(result, []byte(string(runes[match[i]:match[i+1]])))
if match[i] < 0 {
result = append(result, []byte{})
continue
}
result = append(result, bs[match[i]:match[i+1]])
}
r.SetPosition(oldline, oldseg)
r.SetPosition(oldLine, oldSeg)
r.Advance(match[1] - match[0])
return result
}

View File

@@ -1,3 +1,4 @@
//nolint:golint,lll,misspell
package util
// An HTML5Entity struct represents HTML5 entitites.
@@ -8,7 +9,7 @@ type HTML5Entity struct {
}
// LookUpHTML5EntityByName returns (an HTML5Entity, true) if an entity named
// given name is found, otherwise (nil, false)
// given name is found, otherwise (nil, false).
func LookUpHTML5EntityByName(name string) (*HTML5Entity, bool) {
v, ok := html5entities[name]
return v, ok

File diff suppressed because it is too large Load Diff

View File

@@ -63,12 +63,13 @@ func (b *CopyOnWriteBuffer) AppendString(value string) {
// WriteByte writes the given byte to the buffer.
// WriteByte allocate new buffer and clears it at the first time.
func (b *CopyOnWriteBuffer) WriteByte(c byte) {
func (b *CopyOnWriteBuffer) WriteByte(c byte) error {
if !b.copied {
b.buffer = make([]byte, 0, len(b.buffer)+20)
b.copied = true
}
b.buffer = append(b.buffer, c)
return nil
}
// AppendByte appends given bytes to the buffer.
@@ -145,12 +146,12 @@ func TabWidth(currentPos int) int {
// If the line contains tab characters, paddings may be not zero.
// currentPos==0 and width==2:
//
// position: 0 1
// [TAB]aaaa
// width: 1234 5678
// position: 0 1
// [TAB]aaaa
// width: 1234 5678
//
// width=2 is in the tab character. In this case, IndentPosition returns
// (pos=1, padding=2)
// (pos=1, padding=2).
func IndentPosition(bs []byte, currentPos, width int) (pos, padding int) {
return IndentPositionPadding(bs, currentPos, 0, width)
}
@@ -424,7 +425,7 @@ func DoFullUnicodeCaseFolding(v []byte) []byte {
if c >= 0x41 && c <= 0x5a {
// A-Z to a-z
cob.Write(v[n:i])
cob.WriteByte(c + 32)
_ = cob.WriteByte(c + 32)
n = i + 1
}
continue
@@ -521,7 +522,7 @@ func ToLinkReference(v []byte) string {
return string(ReplaceSpaces(v, ' '))
}
var htmlEscapeTable = [256][]byte{nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, []byte("&quot;"), nil, nil, nil, []byte("&amp;"), nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, []byte("&lt;"), nil, []byte("&gt;"), nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil}
var htmlEscapeTable = [256][]byte{nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, []byte("&quot;"), nil, nil, nil, []byte("&amp;"), nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, []byte("&lt;"), nil, []byte("&gt;"), nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil} //nolint:golint,lll
// EscapeHTMLByte returns HTML escaped bytes if the given byte should be escaped,
// otherwise nil.
@@ -557,7 +558,7 @@ func UnescapePunctuations(source []byte) []byte {
c := source[i]
if i < limit-1 && c == '\\' && IsPunct(source[i+1]) {
cob.Write(source[n:i])
cob.WriteByte(source[i+1])
_ = cob.WriteByte(source[i+1])
i += 2
n = i
continue
@@ -573,9 +574,9 @@ func UnescapePunctuations(source []byte) []byte {
// ResolveNumericReferences resolve numeric references like '&#1234;" .
func ResolveNumericReferences(source []byte) []byte {
cob := NewCopyOnWriteBuffer(source)
buf := make([]byte, 6, 6)
buf := make([]byte, 6)
limit := len(source)
ok := false
var ok bool
n := 0
for i := 0; i < limit; i++ {
if source[i] == '&' {
@@ -625,7 +626,7 @@ func ResolveNumericReferences(source []byte) []byte {
func ResolveEntityNames(source []byte) []byte {
cob := NewCopyOnWriteBuffer(source)
limit := len(source)
ok := false
var ok bool
n := 0
for i := 0; i < limit; i++ {
if source[i] == '&' {
@@ -658,9 +659,9 @@ var htmlSpace = []byte("%20")
// URLEscape escape the given URL.
// If resolveReference is set true:
// 1. unescape punctuations
// 2. resolve numeric references
// 3. resolve entity references
// 1. unescape punctuations
// 2. resolve numeric references
// 3. resolve entity references
//
// URL encoded values (%xx) are kept as is.
func URLEscape(v []byte, resolveReference bool) []byte {
@@ -750,7 +751,7 @@ func FindURLIndex(b []byte) int {
return i
}
var emailDomainRegexp = regexp.MustCompile(`^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*`)
var emailDomainRegexp = regexp.MustCompile(`^[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*`) //nolint:golint,lll
// FindEmailIndex returns a stop index value if the given bytes seem an email address.
func FindEmailIndex(b []byte) int {
@@ -781,18 +782,19 @@ func FindEmailIndex(b []byte) int {
var spaces = []byte(" \t\n\x0b\x0c\x0d")
var spaceTable = [256]int8{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
var spaceTable = [256]int8{0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} //nolint:golint,lll
var punctTable = [256]int8{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
var punctTable = [256]int8{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} //nolint:golint,lll
// a-zA-Z0-9, ;/?:@&=+$,-_.!~*'()#
var urlEscapeTable = [256]int8{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
var utf8lenTable = [256]int8{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 99, 99, 99, 99, 99, 99, 99, 99}
var urlEscapeTable = [256]int8{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} //nolint:golint,lll
var urlTable = [256]uint8{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 5, 5, 1, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 1, 1, 0, 1, 0, 1, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}
var utf8lenTable = [256]int8{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 99, 99, 99, 99, 99, 99, 99, 99} //nolint:golint,lll
var emailTable = [256]uint8{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}
var urlTable = [256]uint8{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 5, 5, 1, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 1, 1, 0, 1, 0, 1, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1} //nolint:golint,lll
var emailTable = [256]uint8{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} //nolint:golint,lll
// UTF8Len returns a byte length of the utf-8 character.
func UTF8Len(b byte) int8 {
@@ -836,11 +838,18 @@ func IsAlphaNumeric(c byte) bool {
// IsEastAsianWideRune returns trhe if the given rune is an east asian wide character, otherwise false.
func IsEastAsianWideRune(r rune) bool {
// https://en.wikipedia.org/wiki/CJK_Symbols_and_Punctuation
var CJKSymbolsAndPunctuation = &unicode.RangeTable{
R16: []unicode.Range16{
{0x3000, 0x303F, 1},
},
}
return unicode.Is(unicode.Hiragana, r) ||
unicode.Is(unicode.Katakana, r) ||
unicode.Is(unicode.Han, r) ||
unicode.Is(unicode.Lm, r) ||
unicode.Is(unicode.Hangul, r)
unicode.Is(unicode.Hangul, r) ||
unicode.Is(CJKSymbolsAndPunctuation, r)
}
// A BufWriter is a subset of the bufio.Writer .
@@ -862,7 +871,7 @@ type PrioritizedValue struct {
Priority int
}
// PrioritizedSlice is a slice of the PrioritizedValues
// PrioritizedSlice is a slice of the PrioritizedValues.
type PrioritizedSlice []PrioritizedValue
// Sort sorts the PrioritizedSlice in ascending order.
@@ -977,7 +986,7 @@ func (s *bytesFilter) Contains(b []byte) bool {
}
h := bytesHash(b) % uint64(len(s.slots))
slot := s.slots[h]
if slot == nil || len(slot) == 0 {
if len(slot) == 0 {
return false
}
for _, element := range slot {

View File

@@ -1,3 +1,4 @@
//go:build appengine || js
// +build appengine js
package util

View File

@@ -1,3 +1,4 @@
//go:build !appengine && !js
// +build !appengine,!js
package util

View File

@@ -6,11 +6,11 @@ import (
lru "github.com/hashicorp/golang-lru/v2"
"github.com/rs/zerolog"
"go.mau.fi/util/dbutil"
"maunium.net/go/mautrix"
"maunium.net/go/mautrix/crypto"
"maunium.net/go/mautrix/crypto/cryptohelper"
"maunium.net/go/mautrix/event"
"maunium.net/go/mautrix/util/dbutil"
)
const (

373
vendor/go.mau.fi/util/LICENSE vendored Normal file
View File

@@ -0,0 +1,373 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

33
vendor/go.mau.fi/util/dbutil/json.go vendored Normal file
View File

@@ -0,0 +1,33 @@
package dbutil
import (
"database/sql/driver"
"encoding/json"
"fmt"
)
// JSON is a utility type for using arbitrary JSON data as values in database Exec and Scan calls.
type JSON struct {
Data any
}
func (j JSON) Scan(i any) error {
switch value := i.(type) {
case nil:
return nil
case string:
return json.Unmarshal([]byte(value), j.Data)
case []byte:
return json.Unmarshal(value, j.Data)
default:
return fmt.Errorf("invalid type %T for dbutil.JSON.Scan", i)
}
}
func (j JSON) Value() (driver.Value, error) {
if j.Data == nil {
return nil, nil
}
v, err := json.Marshal(j.Data)
return string(v), err
}

View File

@@ -7,7 +7,6 @@ import (
"time"
"github.com/rs/zerolog"
"maunium.net/go/maulogger/v2"
)
type DatabaseLogger interface {
@@ -31,37 +30,6 @@ func (n noopLogger) Warn(msg string, args ...interface{}) {}
func (n noopLogger) QueryTiming(_ context.Context, _, _ string, _ []interface{}, _ int, _ time.Duration, _ error) {
}
type mauLogger struct {
l maulogger.Logger
}
// Deprecated: Use zerolog instead
func MauLogger(log maulogger.Logger) DatabaseLogger {
return &mauLogger{l: log}
}
func (m mauLogger) WarnUnsupportedVersion(current, compat, latest int) {
m.l.Warnfln("Unsupported database schema version: currently on v%d (compatible down to v%d), latest known: v%d - continuing anyway", current, compat, latest)
}
func (m mauLogger) PrepareUpgrade(current, compat, latest int) {
m.l.Infofln("Database currently on v%d (compat: v%d), latest known: v%d", current, compat, latest)
}
func (m mauLogger) DoUpgrade(from, to int, message string, _ bool) {
m.l.Infofln("Upgrading database from v%d to v%d: %s", from, to, message)
}
func (m mauLogger) QueryTiming(_ context.Context, method, query string, _ []interface{}, _ int, duration time.Duration, _ error) {
if duration > 1*time.Second {
m.l.Warnfln("%s(%s) took %.3f seconds", method, query, duration.Seconds())
}
}
func (m mauLogger) Warn(msg string, args ...interface{}) {
m.l.Warnfln(msg, args...)
}
type zeroLogger struct {
l *zerolog.Logger
ZeroLogSettings
@@ -70,6 +38,10 @@ type zeroLogger struct {
type ZeroLogSettings struct {
CallerSkipFrame int
Caller bool
// TraceLogAllQueries specifies whether or not all queries should be logged
// at the TRACE level.
TraceLogAllQueries bool
}
func ZeroLogger(log zerolog.Logger, cfg ...ZeroLogSettings) DatabaseLogger {
@@ -125,7 +97,7 @@ func (z zeroLogger) QueryTiming(ctx context.Context, method, query string, args
if log.GetLevel() == zerolog.Disabled || log == zerolog.DefaultContextLogger {
log = z.l
}
if log.GetLevel() != zerolog.TraceLevel && duration < 1*time.Second {
if (!z.TraceLogAllQueries || log.GetLevel() != zerolog.TraceLevel) && duration < 1*time.Second {
return
}
if nrows > -1 {

View File

@@ -15,7 +15,8 @@ import (
"github.com/rs/zerolog"
"maunium.net/go/mautrix/util"
"go.mau.fi/util/exerrors"
"go.mau.fi/util/random"
)
var (
@@ -33,10 +34,10 @@ const (
func (db *Database) DoTxn(ctx context.Context, opts *sql.TxOptions, fn func(ctx context.Context) error) error {
if ctx.Value(ContextKeyDatabaseTransaction) != nil {
zerolog.Ctx(ctx).Debug().Msg("Already in a transaction, not creating a new one")
zerolog.Ctx(ctx).Trace().Msg("Already in a transaction, not creating a new one")
return fn(ctx)
}
log := zerolog.Ctx(ctx).With().Str("db_txn_id", util.RandomString(12)).Logger()
log := zerolog.Ctx(ctx).With().Str("db_txn_id", random.String(12)).Logger()
start := time.Now()
defer func() {
dur := time.Since(start)
@@ -49,13 +50,13 @@ func (db *Database) DoTxn(ctx context.Context, opts *sql.TxOptions, fn func(ctx
log.Warn().
Float64("duration_seconds", dur.Seconds()).
Caller(callerSkip).
Msg("Transaction took a long time")
Msg("Transaction took long")
}
}()
tx, err := db.BeginTx(ctx, opts)
if err != nil {
log.Trace().Err(err).Msg("Failed to begin transaction")
return util.NewDualError(ErrTxnBegin, err)
return exerrors.NewDualError(ErrTxnBegin, err)
}
log.Trace().Msg("Transaction started")
tx.noTotalLog = true
@@ -75,7 +76,7 @@ func (db *Database) DoTxn(ctx context.Context, opts *sql.TxOptions, fn func(ctx
err = tx.Commit()
if err != nil {
log.Trace().Err(err).Msg("Commit failed")
return util.NewDualError(ErrTxnCommit, err)
return exerrors.NewDualError(ErrTxnCommit, err)
}
log.Trace().Msg("Commit successful")
return nil

View File

@@ -4,7 +4,7 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
package util
package exerrors
import (
"errors"

View File

@@ -4,13 +4,14 @@
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
package util
package exgjson
import (
"strings"
)
var GJSONEscaper = strings.NewReplacer(
// Escaper escapes a string for use in a GJSON path.
var Escaper = strings.NewReplacer(
`\`, `\\`,
".", `\.`,
"|", `\|`,
@@ -19,10 +20,11 @@ var GJSONEscaper = strings.NewReplacer(
"*", `\*`,
"?", `\?`)
func GJSONPath(path ...string) string {
// Path returns a GJSON path pointing at a nested object, with each provided string being a key.
func Path(path ...string) string {
var result strings.Builder
for i, part := range path {
_, _ = GJSONEscaper.WriteString(&result, part)
_, _ = Escaper.WriteString(&result, part)
if i < len(path)-1 {
result.WriteRune('.')
}

21
vendor/go.mau.fi/util/random/bytes.go vendored Normal file
View File

@@ -0,0 +1,21 @@
// Copyright (c) 2023 Tulir Asokan
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
package random
import (
"crypto/rand"
)
// Bytes generates the given amount of random bytes using crypto/rand, and panics if it fails.
func Bytes(n int) []byte {
data := make([]byte, n)
_, err := rand.Read(data)
if err != nil {
panic(err)
}
return data
}

87
vendor/go.mau.fi/util/random/string.go vendored Normal file
View File

@@ -0,0 +1,87 @@
// Copyright (c) 2023 Tulir Asokan
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
package random
import (
"encoding/binary"
"hash/crc32"
"strings"
"unsafe"
)
const letters = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
// StringBytes generates a random string of the given length and returns it as a byte array.
func StringBytes(n int) []byte {
if n <= 0 {
return []byte{}
}
input := Bytes(n * 2)
for i := 0; i < n; i++ {
// Risk of modulo bias is only 2 in 65535, values between 0 and 65533 are uniformly distributed
input[i] = letters[binary.BigEndian.Uint16(input[i*2:])%uint16(len(letters))]
}
input = input[:n]
return input
}
// String generates a random string of the given length.
func String(n int) string {
if n <= 0 {
return ""
}
str := StringBytes(n)
return *(*string)(unsafe.Pointer(&str))
}
func base62Encode(val uint32, minWidth int) []byte {
out := make([]byte, 0, minWidth)
for val > 0 {
out = append(out, letters[val%uint32(len(letters))])
val /= 62
}
if len(out) < minWidth {
paddedOut := make([]byte, minWidth)
copy(paddedOut[minWidth-len(out):], out)
for i := 0; i < minWidth-len(out); i++ {
paddedOut[i] = '0'
}
out = paddedOut
}
return out
}
// Token generates a GitHub-style token with the given prefix, a random part, and a checksum at the end.
// The format is `prefix_random_checksum`. The checksum is always 6 characters.
func Token(namespace string, randomLength int) string {
token := make([]byte, len(namespace)+1+randomLength+1+6)
copy(token, namespace)
token[len(namespace)] = '_'
copy(token[len(namespace)+1:], StringBytes(randomLength))
token[len(namespace)+randomLength+1] = '_'
checksum := base62Encode(crc32.ChecksumIEEE(token[:len(token)-7]), 6)
copy(token[len(token)-6:], checksum)
return *(*string)(unsafe.Pointer(&token))
}
// GetTokenPrefix parses the given token generated with Token, validates the checksum and returns the prefix namespace.
func GetTokenPrefix(token string) string {
parts := strings.Split(token, "_")
if len(parts) != 3 {
return ""
}
checksum := base62Encode(crc32.ChecksumIEEE([]byte(parts[0]+"_"+parts[1])), 6)
if string(checksum) != parts[2] {
return ""
}
return parts[0]
}
// IsToken checks if the given token is a valid token generated with Token with the given namespace..
func IsToken(namespace, token string) bool {
return GetTokenPrefix(token) == namespace
}

View File

@@ -0,0 +1,53 @@
// Copyright (c) 2021 Dillon Dixon
// Copyright (c) 2023 Tulir Asokan
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
// Package retryafter contains a utility function for parsing the Retry-After HTTP header.
package retryafter
import (
"net/http"
"strconv"
"time"
)
var now = time.Now
// Parse parses the backoff time specified in the Retry-After header if present.
// See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After.
//
// The second parameter is the fallback duration to use if the header is not present or invalid.
//
// Example:
//
// time.Sleep(retryafter.Parse(resp.Header.Get("Retry-After"), 5*time.Second))
func Parse(retryAfter string, fallback time.Duration) time.Duration {
if retryAfter == "" {
return fallback
} else if t, err := time.Parse(http.TimeFormat, retryAfter); err == nil {
return t.Sub(now())
} else if seconds, err := strconv.Atoi(retryAfter); err == nil {
return time.Duration(seconds) * time.Second
}
return fallback
}
// Should returns true if the given status code indicates that the request should be retried.
//
// if retryafter.Should(resp.StatusCode, true) {
// time.Sleep(retryafter.Parse(resp.Header.Get("Retry-After"), 5*time.Second))
// }
func Should(statusCode int, retryOnRateLimit bool) bool {
switch statusCode {
case http.StatusBadGateway, http.StatusServiceUnavailable, http.StatusGatewayTimeout:
return true
case http.StatusTooManyRequests:
return retryOnRateLimit
default:
return false
}
}

View File

@@ -49,7 +49,8 @@ var supportedKexAlgos = []string{
// P384 and P521 are not constant-time yet, but since we don't
// reuse ephemeral keys, using them for ECDH should be OK.
kexAlgoECDH256, kexAlgoECDH384, kexAlgoECDH521,
kexAlgoDH14SHA256, kexAlgoDH14SHA1, kexAlgoDH1SHA1,
kexAlgoDH14SHA256, kexAlgoDH16SHA512, kexAlgoDH14SHA1,
kexAlgoDH1SHA1,
}
// serverForbiddenKexAlgos contains key exchange algorithms, that are forbidden
@@ -59,8 +60,9 @@ var serverForbiddenKexAlgos = map[string]struct{}{
kexAlgoDHGEXSHA256: {}, // server half implementation is only minimal to satisfy the automated tests
}
// preferredKexAlgos specifies the default preference for key-exchange algorithms
// in preference order.
// preferredKexAlgos specifies the default preference for key-exchange
// algorithms in preference order. The diffie-hellman-group16-sha512 algorithm
// is disabled by default because it is a bit slower than the others.
var preferredKexAlgos = []string{
kexAlgoCurve25519SHA256, kexAlgoCurve25519SHA256LibSSH,
kexAlgoECDH256, kexAlgoECDH384, kexAlgoECDH521,
@@ -70,12 +72,12 @@ var preferredKexAlgos = []string{
// supportedHostKeyAlgos specifies the supported host-key algorithms (i.e. methods
// of authenticating servers) in preference order.
var supportedHostKeyAlgos = []string{
CertAlgoRSASHA512v01, CertAlgoRSASHA256v01,
CertAlgoRSASHA256v01, CertAlgoRSASHA512v01,
CertAlgoRSAv01, CertAlgoDSAv01, CertAlgoECDSA256v01,
CertAlgoECDSA384v01, CertAlgoECDSA521v01, CertAlgoED25519v01,
KeyAlgoECDSA256, KeyAlgoECDSA384, KeyAlgoECDSA521,
KeyAlgoRSASHA512, KeyAlgoRSASHA256,
KeyAlgoRSASHA256, KeyAlgoRSASHA512,
KeyAlgoRSA, KeyAlgoDSA,
KeyAlgoED25519,
@@ -85,7 +87,7 @@ var supportedHostKeyAlgos = []string{
// This is based on RFC 4253, section 6.4, but with hmac-md5 variants removed
// because they have reached the end of their useful life.
var supportedMACs = []string{
"hmac-sha2-512-etm@openssh.com", "hmac-sha2-256-etm@openssh.com", "hmac-sha2-256", "hmac-sha1", "hmac-sha1-96",
"hmac-sha2-256-etm@openssh.com", "hmac-sha2-512-etm@openssh.com", "hmac-sha2-256", "hmac-sha2-512", "hmac-sha1", "hmac-sha1-96",
}
var supportedCompressions = []string{compressionNone}
@@ -119,6 +121,13 @@ func algorithmsForKeyFormat(keyFormat string) []string {
}
}
// isRSA returns whether algo is a supported RSA algorithm, including certificate
// algorithms.
func isRSA(algo string) bool {
algos := algorithmsForKeyFormat(KeyAlgoRSA)
return contains(algos, underlyingAlgo(algo))
}
// supportedPubKeyAuthAlgos specifies the supported client public key
// authentication algorithms. Note that this doesn't include certificate types
// since those use the underlying algorithm. This list is sent to the client if
@@ -262,16 +271,16 @@ type Config struct {
// unspecified, a size suitable for the chosen cipher is used.
RekeyThreshold uint64
// The allowed key exchanges algorithms. If unspecified then a
// default set of algorithms is used.
// The allowed key exchanges algorithms. If unspecified then a default set
// of algorithms is used. Unsupported values are silently ignored.
KeyExchanges []string
// The allowed cipher algorithms. If unspecified then a sensible
// default is used.
// The allowed cipher algorithms. If unspecified then a sensible default is
// used. Unsupported values are silently ignored.
Ciphers []string
// The allowed MAC algorithms. If unspecified then a sensible default
// is used.
// The allowed MAC algorithms. If unspecified then a sensible default is
// used. Unsupported values are silently ignored.
MACs []string
}
@@ -288,7 +297,7 @@ func (c *Config) SetDefaults() {
var ciphers []string
for _, c := range c.Ciphers {
if cipherModes[c] != nil {
// reject the cipher if we have no cipherModes definition
// Ignore the cipher if we have no cipherModes definition.
ciphers = append(ciphers, c)
}
}
@@ -297,10 +306,26 @@ func (c *Config) SetDefaults() {
if c.KeyExchanges == nil {
c.KeyExchanges = preferredKexAlgos
}
var kexs []string
for _, k := range c.KeyExchanges {
if kexAlgoMap[k] != nil {
// Ignore the KEX if we have no kexAlgoMap definition.
kexs = append(kexs, k)
}
}
c.KeyExchanges = kexs
if c.MACs == nil {
c.MACs = supportedMACs
}
var macs []string
for _, m := range c.MACs {
if macModes[m] != nil {
// Ignore the MAC if we have no macModes definition.
macs = append(macs, m)
}
}
c.MACs = macs
if c.RekeyThreshold == 0 {
// cipher specific default

View File

@@ -23,6 +23,7 @@ const (
kexAlgoDH1SHA1 = "diffie-hellman-group1-sha1"
kexAlgoDH14SHA1 = "diffie-hellman-group14-sha1"
kexAlgoDH14SHA256 = "diffie-hellman-group14-sha256"
kexAlgoDH16SHA512 = "diffie-hellman-group16-sha512"
kexAlgoECDH256 = "ecdh-sha2-nistp256"
kexAlgoECDH384 = "ecdh-sha2-nistp384"
kexAlgoECDH521 = "ecdh-sha2-nistp521"
@@ -430,6 +431,17 @@ func init() {
hashFunc: crypto.SHA256,
}
// This is the group called diffie-hellman-group16-sha512 in RFC
// 8268 and Oakley Group 16 in RFC 3526.
p, _ = new(big.Int).SetString("FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF", 16)
kexAlgoMap[kexAlgoDH16SHA512] = &dhGroup{
g: new(big.Int).SetInt64(2),
p: p,
pMinus1: new(big.Int).Sub(p, bigOne),
hashFunc: crypto.SHA512,
}
kexAlgoMap[kexAlgoECDH521] = &ecdh{elliptic.P521()}
kexAlgoMap[kexAlgoECDH384] = &ecdh{elliptic.P384()}
kexAlgoMap[kexAlgoECDH256] = &ecdh{elliptic.P256()}

View File

@@ -53,6 +53,9 @@ var macModes = map[string]*macMode{
"hmac-sha2-256-etm@openssh.com": {32, true, func(key []byte) hash.Hash {
return hmac.New(sha256.New, key)
}},
"hmac-sha2-512": {64, false, func(key []byte) hash.Hash {
return hmac.New(sha512.New, key)
}},
"hmac-sha2-256": {32, false, func(key []byte) hash.Hash {
return hmac.New(sha256.New, key)
}},

View File

@@ -370,6 +370,25 @@ func gssExchangeToken(gssapiConfig *GSSAPIWithMICConfig, firstToken []byte, s *c
return authErr, perms, nil
}
// isAlgoCompatible checks if the signature format is compatible with the
// selected algorithm taking into account edge cases that occur with old
// clients.
func isAlgoCompatible(algo, sigFormat string) bool {
// Compatibility for old clients.
//
// For certificate authentication with OpenSSH 7.2-7.7 signature format can
// be rsa-sha2-256 or rsa-sha2-512 for the algorithm
// ssh-rsa-cert-v01@openssh.com.
//
// With gpg-agent < 2.2.6 the algorithm can be rsa-sha2-256 or rsa-sha2-512
// for signature format ssh-rsa.
if isRSA(algo) && isRSA(sigFormat) {
return true
}
// Standard case: the underlying algorithm must match the signature format.
return underlyingAlgo(algo) == sigFormat
}
// ServerAuthError represents server authentication errors and is
// sometimes returned by NewServerConn. It appends any authentication
// errors that may occur, and is returned if all of the authentication
@@ -567,7 +586,7 @@ userAuthLoop:
authErr = fmt.Errorf("ssh: algorithm %q not accepted", sig.Format)
break
}
if underlyingAlgo(algo) != sig.Format {
if !isAlgoCompatible(algo, sig.Format) {
authErr = fmt.Errorf("ssh: signature %q not compatible with selected algorithm %q", sig.Format, algo)
break
}

View File

@@ -194,9 +194,8 @@ func render1(w writer, n *Node) error {
}
}
// Render any child nodes.
switch n.Data {
case "iframe", "noembed", "noframes", "noscript", "plaintext", "script", "style", "xmp":
// Render any child nodes
if childTextNodesAreLiteral(n) {
for c := n.FirstChild; c != nil; c = c.NextSibling {
if c.Type == TextNode {
if _, err := w.WriteString(c.Data); err != nil {
@@ -213,7 +212,7 @@ func render1(w writer, n *Node) error {
// last element in the file, with no closing tag.
return plaintextAbort
}
default:
} else {
for c := n.FirstChild; c != nil; c = c.NextSibling {
if err := render1(w, c); err != nil {
return err
@@ -231,6 +230,27 @@ func render1(w writer, n *Node) error {
return w.WriteByte('>')
}
func childTextNodesAreLiteral(n *Node) bool {
// Per WHATWG HTML 13.3, if the parent of the current node is a style,
// script, xmp, iframe, noembed, noframes, or plaintext element, and the
// current node is a text node, append the value of the node's data
// literally. The specification is not explicit about it, but we only
// enforce this if we are in the HTML namespace (i.e. when the namespace is
// "").
// NOTE: we also always include noscript elements, although the
// specification states that they should only be rendered as such if
// scripting is enabled for the node (which is not something we track).
if n.Namespace != "" {
return false
}
switch n.Data {
case "iframe", "noembed", "noframes", "noscript", "plaintext", "script", "style", "xmp":
return true
default:
return false
}
}
// writeQuoted writes s to w surrounded by quotes. Normally it will use double
// quotes, but if s contains a double quote, it will use single quotes.
// It is used for writing the identifiers in a doctype declaration.

View File

@@ -913,7 +913,14 @@ func (z *Tokenizer) readTagAttrKey() {
case ' ', '\n', '\r', '\t', '\f', '/':
z.pendingAttr[0].end = z.raw.end - 1
return
case '=', '>':
case '=':
if z.pendingAttr[0].start+1 == z.raw.end {
// WHATWG 13.2.5.32, if we see an equals sign before the attribute name
// begins, we treat it as a character in the attribute name and continue.
continue
}
fallthrough
case '>':
z.raw.end--
z.pendingAttr[0].end = z.raw.end
return

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -4,7 +4,7 @@ package publicsuffix
import _ "embed"
const version = "publicsuffix.org's public_suffix_list.dat, git revision e248cbc92a527a166454afe9914c4c1b4253893f (2022-11-15T18:02:38Z)"
const version = "publicsuffix.org's public_suffix_list.dat, git revision 63cbc63d470d7b52c35266aa96c4c98c96ec499c (2023-08-03T10:01:25Z)"
const (
nodesBits = 40
@@ -26,7 +26,7 @@ const (
)
// numTLD is the number of top level domains.
const numTLD = 1494
const numTLD = 1474
// text is the combined text of all labels.
//
@@ -63,8 +63,8 @@ var nodes uint40String
//go:embed data/children
var children uint32String
// max children 718 (capacity 1023)
// max text offset 32976 (capacity 65535)
// max text length 36 (capacity 63)
// max hi 9656 (capacity 16383)
// max lo 9651 (capacity 16383)
// max children 743 (capacity 1023)
// max text offset 30876 (capacity 65535)
// max text length 31 (capacity 63)
// max hi 9322 (capacity 16383)
// max lo 9317 (capacity 16383)

5
vendor/golang.org/x/sys/cpu/cpu.go generated vendored
View File

@@ -38,7 +38,7 @@ var X86 struct {
HasAVX512F bool // Advanced vector extension 512 Foundation Instructions
HasAVX512CD bool // Advanced vector extension 512 Conflict Detection Instructions
HasAVX512ER bool // Advanced vector extension 512 Exponential and Reciprocal Instructions
HasAVX512PF bool // Advanced vector extension 512 Prefetch Instructions Instructions
HasAVX512PF bool // Advanced vector extension 512 Prefetch Instructions
HasAVX512VL bool // Advanced vector extension 512 Vector Length Extensions
HasAVX512BW bool // Advanced vector extension 512 Byte and Word Instructions
HasAVX512DQ bool // Advanced vector extension 512 Doubleword and Quadword Instructions
@@ -54,6 +54,9 @@ var X86 struct {
HasAVX512VBMI2 bool // Advanced vector extension 512 Vector Byte Manipulation Instructions 2
HasAVX512BITALG bool // Advanced vector extension 512 Bit Algorithms
HasAVX512BF16 bool // Advanced vector extension 512 BFloat16 Instructions
HasAMXTile bool // Advanced Matrix Extension Tile instructions
HasAMXInt8 bool // Advanced Matrix Extension Int8 instructions
HasAMXBF16 bool // Advanced Matrix Extension BFloat16 instructions
HasBMI1 bool // Bit manipulation instruction set 1
HasBMI2 bool // Bit manipulation instruction set 2
HasCX16 bool // Compare and exchange 16 Bytes

View File

@@ -37,6 +37,9 @@ func initOptions() {
{Name: "avx512vbmi2", Feature: &X86.HasAVX512VBMI2},
{Name: "avx512bitalg", Feature: &X86.HasAVX512BITALG},
{Name: "avx512bf16", Feature: &X86.HasAVX512BF16},
{Name: "amxtile", Feature: &X86.HasAMXTile},
{Name: "amxint8", Feature: &X86.HasAMXInt8},
{Name: "amxbf16", Feature: &X86.HasAMXBF16},
{Name: "bmi1", Feature: &X86.HasBMI1},
{Name: "bmi2", Feature: &X86.HasBMI2},
{Name: "cx16", Feature: &X86.HasCX16},
@@ -138,6 +141,10 @@ func archInit() {
eax71, _, _, _ := cpuid(7, 1)
X86.HasAVX512BF16 = isSet(5, eax71)
}
X86.HasAMXTile = isSet(24, edx7)
X86.HasAMXInt8 = isSet(25, edx7)
X86.HasAMXBF16 = isSet(22, edx7)
}
func isSet(bitpos uint, value uint32) bool {

View File

@@ -519,7 +519,7 @@ ccflags="$@"
$2 ~ /^LOCK_(SH|EX|NB|UN)$/ ||
$2 ~ /^LO_(KEY|NAME)_SIZE$/ ||
$2 ~ /^LOOP_(CLR|CTL|GET|SET)_/ ||
$2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|TCP|MCAST|EVFILT|NOTE|SHUT|PROT|MAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR|LOCAL|TCPOPT|UDP)_/ ||
$2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|TCP|MCAST|EVFILT|NOTE|SHUT|PROT|MAP|MREMAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR|LOCAL|TCPOPT|UDP)_/ ||
$2 ~ /^NFC_(GENL|PROTO|COMM|RF|SE|DIRECTION|LLCP|SOCKPROTO)_/ ||
$2 ~ /^NFC_.*_(MAX)?SIZE$/ ||
$2 ~ /^RAW_PAYLOAD_/ ||
@@ -583,6 +583,7 @@ ccflags="$@"
$2 ~ /^PERF_/ ||
$2 ~ /^SECCOMP_MODE_/ ||
$2 ~ /^SEEK_/ ||
$2 ~ /^SCHED_/ ||
$2 ~ /^SPLICE_/ ||
$2 ~ /^SYNC_FILE_RANGE_/ ||
$2 !~ /IOC_MAGIC/ &&
@@ -624,7 +625,7 @@ ccflags="$@"
$2 ~ /^MEM/ ||
$2 ~ /^WG/ ||
$2 ~ /^FIB_RULE_/ ||
$2 ~ /^BLK[A-Z]*(GET$|SET$|BUF$|PART$|SIZE)/ {printf("\t%s = C.%s\n", $2, $2)}
$2 ~ /^BLK[A-Z]*(GET$|SET$|BUF$|PART$|SIZE|IOMIN$|IOOPT$|ALIGNOFF$|DISCARD|ROTATIONAL$|ZEROOUT$|GETDISKSEQ$)/ {printf("\t%s = C.%s\n", $2, $2)}
$2 ~ /^__WCOREFLAG$/ {next}
$2 ~ /^__W[A-Z0-9]+$/ {printf("\t%s = C.%s\n", substr($2,3), $2)}

14
vendor/golang.org/x/sys/unix/mmap_nomremap.go generated vendored Normal file
View File

@@ -0,0 +1,14 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build aix || darwin || dragonfly || freebsd || openbsd || solaris
// +build aix darwin dragonfly freebsd openbsd solaris
package unix
var mapper = &mmapper{
active: make(map[*byte][]byte),
mmap: mmap,
munmap: munmap,
}

53
vendor/golang.org/x/sys/unix/mremap.go generated vendored Normal file
View File

@@ -0,0 +1,53 @@
// Copyright 2023 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build linux || netbsd
// +build linux netbsd
package unix
import "unsafe"
type mremapMmapper struct {
mmapper
mremap func(oldaddr uintptr, oldlength uintptr, newlength uintptr, flags int, newaddr uintptr) (xaddr uintptr, err error)
}
var mapper = &mremapMmapper{
mmapper: mmapper{
active: make(map[*byte][]byte),
mmap: mmap,
munmap: munmap,
},
mremap: mremap,
}
func (m *mremapMmapper) Mremap(oldData []byte, newLength int, flags int) (data []byte, err error) {
if newLength <= 0 || len(oldData) == 0 || len(oldData) != cap(oldData) || flags&mremapFixed != 0 {
return nil, EINVAL
}
pOld := &oldData[cap(oldData)-1]
m.Lock()
defer m.Unlock()
bOld := m.active[pOld]
if bOld == nil || &bOld[0] != &oldData[0] {
return nil, EINVAL
}
newAddr, errno := m.mremap(uintptr(unsafe.Pointer(&bOld[0])), uintptr(len(bOld)), uintptr(newLength), flags, 0)
if errno != nil {
return nil, errno
}
bNew := unsafe.Slice((*byte)(unsafe.Pointer(newAddr)), newLength)
pNew := &bNew[cap(bNew)-1]
if flags&mremapDontunmap == 0 {
delete(m.active, pOld)
}
m.active[pNew] = bNew
return bNew, nil
}
func Mremap(oldData []byte, newLength int, flags int) (data []byte, err error) {
return mapper.Mremap(oldData, newLength, flags)
}

View File

@@ -535,21 +535,6 @@ func Fsync(fd int) error {
//sys sendmsg(s int, msg *Msghdr, flags int) (n int, err error) = nsendmsg
//sys munmap(addr uintptr, length uintptr) (err error)
var mapper = &mmapper{
active: make(map[*byte][]byte),
mmap: mmap,
munmap: munmap,
}
func Mmap(fd int, offset int64, length int, prot int, flags int) (data []byte, err error) {
return mapper.Mmap(fd, offset, length, prot, flags)
}
func Munmap(b []byte) (err error) {
return mapper.Munmap(b)
}
//sys Madvise(b []byte, advice int) (err error)
//sys Mprotect(b []byte, prot int) (err error)
//sys Mlock(b []byte) (err error)

View File

@@ -601,20 +601,6 @@ func Poll(fds []PollFd, timeout int) (n int, err error) {
// Gethostuuid(uuid *byte, timeout *Timespec) (err error)
// Ptrace(req int, pid int, addr uintptr, data int) (ret uintptr, err error)
var mapper = &mmapper{
active: make(map[*byte][]byte),
mmap: mmap,
munmap: munmap,
}
func Mmap(fd int, offset int64, length int, prot int, flags int) (data []byte, err error) {
return mapper.Mmap(fd, offset, length, prot, flags)
}
func Munmap(b []byte) (err error) {
return mapper.Munmap(b)
}
//sys Madvise(b []byte, behav int) (err error)
//sys Mlock(b []byte) (err error)
//sys Mlockall(flags int) (err error)

Some files were not shown because too many files have changed in this diff Show More