mirror of
https://github.com/caddyserver/caddy.git
synced 2024-12-26 05:33:49 +03:00
caddyfile: Normalize & flatten all unmarshalers (#6037)
This commit is contained in:
parent
54823f52bc
commit
750d0b8331
44 changed files with 3026 additions and 3013 deletions
|
@ -92,30 +92,26 @@ func FormattingDifference(filename string, body []byte) (caddyconfig.Warning, bo
|
|||
}, true
|
||||
}
|
||||
|
||||
// Unmarshaler is a type that can unmarshal
|
||||
// Caddyfile tokens to set itself up for a
|
||||
// JSON encoding. The goal of an unmarshaler
|
||||
// is not to set itself up for actual use,
|
||||
// but to set itself up for being marshaled
|
||||
// into JSON. Caddyfile-unmarshaled values
|
||||
// will not be used directly; they will be
|
||||
// encoded as JSON and then used from that.
|
||||
// Implementations must be able to support
|
||||
// multiple segments (instances of their
|
||||
// directive or batch of tokens); typically
|
||||
// this means wrapping all token logic in
|
||||
// a loop: `for d.Next() { ... }`.
|
||||
// Unmarshaler is a type that can unmarshal Caddyfile tokens to
|
||||
// set itself up for a JSON encoding. The goal of an unmarshaler
|
||||
// is not to set itself up for actual use, but to set itself up for
|
||||
// being marshaled into JSON. Caddyfile-unmarshaled values will not
|
||||
// be used directly; they will be encoded as JSON and then used from
|
||||
// that. Implementations _may_ be able to support multiple segments
|
||||
// (instances of their directive or batch of tokens); typically this
|
||||
// means wrapping parsing logic in a loop: `for d.Next() { ... }`.
|
||||
// More commonly, only a single segment is supported, so a simple
|
||||
// `d.Next()` at the start should be used to consume the module
|
||||
// identifier token (directive name, etc).
|
||||
type Unmarshaler interface {
|
||||
UnmarshalCaddyfile(d *Dispenser) error
|
||||
}
|
||||
|
||||
// ServerType is a type that can evaluate a Caddyfile and set up a caddy config.
|
||||
type ServerType interface {
|
||||
// Setup takes the server blocks which
|
||||
// contain tokens, as well as options
|
||||
// (e.g. CLI flags) and creates a Caddy
|
||||
// config, along with any warnings or
|
||||
// an error.
|
||||
// Setup takes the server blocks which contain tokens,
|
||||
// as well as options (e.g. CLI flags) and creates a
|
||||
// Caddy config, along with any warnings or an error.
|
||||
Setup([]ServerBlock, map[string]any) (*caddy.Config, []caddyconfig.Warning, error)
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -271,12 +271,6 @@ func (h Helper) GroupRoutes(vals []ConfigValue) {
|
|||
}
|
||||
}
|
||||
|
||||
// NewBindAddresses returns config values relevant to adding
|
||||
// listener bind addresses to the config.
|
||||
func (h Helper) NewBindAddresses(addrs []string) []ConfigValue {
|
||||
return []ConfigValue{{Class: "bind", Value: addrs}}
|
||||
}
|
||||
|
||||
// WithDispenser returns a new instance based on d. All others Helper
|
||||
// fields are copied, so typically maps are shared with this new instance.
|
||||
func (h Helper) WithDispenser(d *caddyfile.Dispenser) Helper {
|
||||
|
|
|
@ -1381,68 +1381,73 @@ func (st *ServerType) compileEncodedMatcherSets(sblock serverBlock) ([]caddy.Mod
|
|||
}
|
||||
|
||||
func parseMatcherDefinitions(d *caddyfile.Dispenser, matchers map[string]caddy.ModuleMap) error {
|
||||
for d.Next() {
|
||||
// this is the "name" for "named matchers"
|
||||
definitionName := d.Val()
|
||||
d.Next() // advance to the first token
|
||||
|
||||
if _, ok := matchers[definitionName]; ok {
|
||||
return fmt.Errorf("matcher is defined more than once: %s", definitionName)
|
||||
// this is the "name" for "named matchers"
|
||||
definitionName := d.Val()
|
||||
|
||||
if _, ok := matchers[definitionName]; ok {
|
||||
return fmt.Errorf("matcher is defined more than once: %s", definitionName)
|
||||
}
|
||||
matchers[definitionName] = make(caddy.ModuleMap)
|
||||
|
||||
// given a matcher name and the tokens following it, parse
|
||||
// the tokens as a matcher module and record it
|
||||
makeMatcher := func(matcherName string, tokens []caddyfile.Token) error {
|
||||
mod, err := caddy.GetModule("http.matchers." + matcherName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getting matcher module '%s': %v", matcherName, err)
|
||||
}
|
||||
matchers[definitionName] = make(caddy.ModuleMap)
|
||||
unm, ok := mod.New().(caddyfile.Unmarshaler)
|
||||
if !ok {
|
||||
return fmt.Errorf("matcher module '%s' is not a Caddyfile unmarshaler", matcherName)
|
||||
}
|
||||
err = unm.UnmarshalCaddyfile(caddyfile.NewDispenser(tokens))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rm, ok := unm.(caddyhttp.RequestMatcher)
|
||||
if !ok {
|
||||
return fmt.Errorf("matcher module '%s' is not a request matcher", matcherName)
|
||||
}
|
||||
matchers[definitionName][matcherName] = caddyconfig.JSON(rm, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
// given a matcher name and the tokens following it, parse
|
||||
// the tokens as a matcher module and record it
|
||||
makeMatcher := func(matcherName string, tokens []caddyfile.Token) error {
|
||||
mod, err := caddy.GetModule("http.matchers." + matcherName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getting matcher module '%s': %v", matcherName, err)
|
||||
}
|
||||
unm, ok := mod.New().(caddyfile.Unmarshaler)
|
||||
if !ok {
|
||||
return fmt.Errorf("matcher module '%s' is not a Caddyfile unmarshaler", matcherName)
|
||||
}
|
||||
err = unm.UnmarshalCaddyfile(caddyfile.NewDispenser(tokens))
|
||||
// if the next token is quoted, we can assume it's not a matcher name
|
||||
// and that it's probably an 'expression' matcher
|
||||
if d.NextArg() {
|
||||
if d.Token().Quoted() {
|
||||
// since it was missing the matcher name, we insert a token
|
||||
// in front of the expression token itself
|
||||
err := makeMatcher("expression", []caddyfile.Token{
|
||||
{Text: "expression", File: d.File(), Line: d.Line()},
|
||||
d.Token(),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rm, ok := unm.(caddyhttp.RequestMatcher)
|
||||
if !ok {
|
||||
return fmt.Errorf("matcher module '%s' is not a request matcher", matcherName)
|
||||
}
|
||||
matchers[definitionName][matcherName] = caddyconfig.JSON(rm, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
// if the next token is quoted, we can assume it's not a matcher name
|
||||
// and that it's probably an 'expression' matcher
|
||||
if d.NextArg() {
|
||||
if d.Token().Quoted() {
|
||||
err := makeMatcher("expression", []caddyfile.Token{d.Token()})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
// if it wasn't quoted, then we need to rewind after calling
|
||||
// d.NextArg() so the below properly grabs the matcher name
|
||||
d.Prev()
|
||||
}
|
||||
|
||||
// if it wasn't quoted, then we need to rewind after calling
|
||||
// d.NextArg() so the below properly grabs the matcher name
|
||||
d.Prev()
|
||||
}
|
||||
|
||||
// in case there are multiple instances of the same matcher, concatenate
|
||||
// their tokens (we expect that UnmarshalCaddyfile should be able to
|
||||
// handle more than one segment); otherwise, we'd overwrite other
|
||||
// instances of the matcher in this set
|
||||
tokensByMatcherName := make(map[string][]caddyfile.Token)
|
||||
for nesting := d.Nesting(); d.NextArg() || d.NextBlock(nesting); {
|
||||
matcherName := d.Val()
|
||||
tokensByMatcherName[matcherName] = append(tokensByMatcherName[matcherName], d.NextSegment()...)
|
||||
}
|
||||
for matcherName, tokens := range tokensByMatcherName {
|
||||
err := makeMatcher(matcherName, tokens)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// in case there are multiple instances of the same matcher, concatenate
|
||||
// their tokens (we expect that UnmarshalCaddyfile should be able to
|
||||
// handle more than one segment); otherwise, we'd overwrite other
|
||||
// instances of the matcher in this set
|
||||
tokensByMatcherName := make(map[string][]caddyfile.Token)
|
||||
for nesting := d.Nesting(); d.NextArg() || d.NextBlock(nesting); {
|
||||
matcherName := d.Val()
|
||||
tokensByMatcherName[matcherName] = append(tokensByMatcherName[matcherName], d.NextSegment()...)
|
||||
}
|
||||
for matcherName, tokens := range tokensByMatcherName {
|
||||
err := makeMatcher(matcherName, tokens)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -62,105 +62,103 @@ func init() {
|
|||
func parseOptTrue(d *caddyfile.Dispenser, _ any) (any, error) { return true, nil }
|
||||
|
||||
func parseOptHTTPPort(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume option name
|
||||
var httpPort int
|
||||
for d.Next() {
|
||||
var httpPortStr string
|
||||
if !d.AllArgs(&httpPortStr) {
|
||||
return 0, d.ArgErr()
|
||||
}
|
||||
var err error
|
||||
httpPort, err = strconv.Atoi(httpPortStr)
|
||||
if err != nil {
|
||||
return 0, d.Errf("converting port '%s' to integer value: %v", httpPortStr, err)
|
||||
}
|
||||
var httpPortStr string
|
||||
if !d.AllArgs(&httpPortStr) {
|
||||
return 0, d.ArgErr()
|
||||
}
|
||||
var err error
|
||||
httpPort, err = strconv.Atoi(httpPortStr)
|
||||
if err != nil {
|
||||
return 0, d.Errf("converting port '%s' to integer value: %v", httpPortStr, err)
|
||||
}
|
||||
return httpPort, nil
|
||||
}
|
||||
|
||||
func parseOptHTTPSPort(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume option name
|
||||
var httpsPort int
|
||||
for d.Next() {
|
||||
var httpsPortStr string
|
||||
if !d.AllArgs(&httpsPortStr) {
|
||||
return 0, d.ArgErr()
|
||||
}
|
||||
var err error
|
||||
httpsPort, err = strconv.Atoi(httpsPortStr)
|
||||
if err != nil {
|
||||
return 0, d.Errf("converting port '%s' to integer value: %v", httpsPortStr, err)
|
||||
}
|
||||
var httpsPortStr string
|
||||
if !d.AllArgs(&httpsPortStr) {
|
||||
return 0, d.ArgErr()
|
||||
}
|
||||
var err error
|
||||
httpsPort, err = strconv.Atoi(httpsPortStr)
|
||||
if err != nil {
|
||||
return 0, d.Errf("converting port '%s' to integer value: %v", httpsPortStr, err)
|
||||
}
|
||||
return httpsPort, nil
|
||||
}
|
||||
|
||||
func parseOptOrder(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume option name
|
||||
|
||||
// get directive name
|
||||
if !d.Next() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dirName := d.Val()
|
||||
if _, ok := registeredDirectives[dirName]; !ok {
|
||||
return nil, d.Errf("%s is not a registered directive", dirName)
|
||||
}
|
||||
|
||||
// get positional token
|
||||
if !d.Next() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pos := d.Val()
|
||||
|
||||
newOrder := directiveOrder
|
||||
|
||||
for d.Next() {
|
||||
// get directive name
|
||||
if !d.Next() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dirName := d.Val()
|
||||
if _, ok := registeredDirectives[dirName]; !ok {
|
||||
return nil, d.Errf("%s is not a registered directive", dirName)
|
||||
// if directive exists, first remove it
|
||||
for i, d := range newOrder {
|
||||
if d == dirName {
|
||||
newOrder = append(newOrder[:i], newOrder[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// get positional token
|
||||
if !d.Next() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pos := d.Val()
|
||||
|
||||
// if directive exists, first remove it
|
||||
for i, d := range newOrder {
|
||||
if d == dirName {
|
||||
newOrder = append(newOrder[:i], newOrder[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// act on the positional
|
||||
switch pos {
|
||||
case "first":
|
||||
newOrder = append([]string{dirName}, newOrder...)
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
directiveOrder = newOrder
|
||||
return newOrder, nil
|
||||
case "last":
|
||||
newOrder = append(newOrder, dirName)
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
directiveOrder = newOrder
|
||||
return newOrder, nil
|
||||
case "before":
|
||||
case "after":
|
||||
default:
|
||||
return nil, d.Errf("unknown positional '%s'", pos)
|
||||
}
|
||||
|
||||
// get name of other directive
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
otherDir := d.Val()
|
||||
// act on the positional
|
||||
switch pos {
|
||||
case "first":
|
||||
newOrder = append([]string{dirName}, newOrder...)
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
directiveOrder = newOrder
|
||||
return newOrder, nil
|
||||
case "last":
|
||||
newOrder = append(newOrder, dirName)
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
directiveOrder = newOrder
|
||||
return newOrder, nil
|
||||
case "before":
|
||||
case "after":
|
||||
default:
|
||||
return nil, d.Errf("unknown positional '%s'", pos)
|
||||
}
|
||||
|
||||
// insert directive into proper position
|
||||
for i, d := range newOrder {
|
||||
if d == otherDir {
|
||||
if pos == "before" {
|
||||
newOrder = append(newOrder[:i], append([]string{dirName}, newOrder[i:]...)...)
|
||||
} else if pos == "after" {
|
||||
newOrder = append(newOrder[:i+1], append([]string{dirName}, newOrder[i+1:]...)...)
|
||||
}
|
||||
break
|
||||
// get name of other directive
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
otherDir := d.Val()
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
|
||||
// insert directive into proper position
|
||||
for i, d := range newOrder {
|
||||
if d == otherDir {
|
||||
if pos == "before" {
|
||||
newOrder = append(newOrder[:i], append([]string{dirName}, newOrder[i:]...)...)
|
||||
} else if pos == "after" {
|
||||
newOrder = append(newOrder[:i+1], append([]string{dirName}, newOrder[i+1:]...)...)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,57 +221,58 @@ func parseOptACMEDNS(d *caddyfile.Dispenser, _ any) (any, error) {
|
|||
|
||||
func parseOptACMEEAB(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
eab := new(acme.EAB)
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "key_id":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
eab.KeyID = d.Val()
|
||||
|
||||
case "mac_key":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
eab.MACKey = d.Val()
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized parameter '%s'", d.Val())
|
||||
d.Next() // consume option name
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "key_id":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
eab.KeyID = d.Val()
|
||||
|
||||
case "mac_key":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
eab.MACKey = d.Val()
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized parameter '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
return eab, nil
|
||||
}
|
||||
|
||||
func parseOptCertIssuer(d *caddyfile.Dispenser, existing any) (any, error) {
|
||||
d.Next() // consume option name
|
||||
|
||||
var issuers []certmagic.Issuer
|
||||
if existing != nil {
|
||||
issuers = existing.([]certmagic.Issuer)
|
||||
}
|
||||
for d.Next() { // consume option name
|
||||
if !d.Next() { // get issuer module name
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
modID := "tls.issuance." + d.Val()
|
||||
unm, err := caddyfile.UnmarshalModule(d, modID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
iss, ok := unm.(certmagic.Issuer)
|
||||
if !ok {
|
||||
return nil, d.Errf("module %s (%T) is not a certmagic.Issuer", modID, unm)
|
||||
}
|
||||
issuers = append(issuers, iss)
|
||||
|
||||
// get issuer module name
|
||||
if !d.Next() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
modID := "tls.issuance." + d.Val()
|
||||
unm, err := caddyfile.UnmarshalModule(d, modID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
iss, ok := unm.(certmagic.Issuer)
|
||||
if !ok {
|
||||
return nil, d.Errf("module %s (%T) is not a certmagic.Issuer", modID, unm)
|
||||
}
|
||||
issuers = append(issuers, iss)
|
||||
return issuers, nil
|
||||
}
|
||||
|
||||
func parseOptSingleString(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume parameter name
|
||||
d.Next() // consume option name
|
||||
if !d.Next() {
|
||||
return "", d.ArgErr()
|
||||
}
|
||||
|
@ -285,7 +284,7 @@ func parseOptSingleString(d *caddyfile.Dispenser, _ any) (any, error) {
|
|||
}
|
||||
|
||||
func parseOptStringList(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume parameter name
|
||||
d.Next() // consume option name
|
||||
val := d.RemainingArgs()
|
||||
if len(val) == 0 {
|
||||
return "", d.ArgErr()
|
||||
|
@ -294,33 +293,33 @@ func parseOptStringList(d *caddyfile.Dispenser, _ any) (any, error) {
|
|||
}
|
||||
|
||||
func parseOptAdmin(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume option name
|
||||
|
||||
adminCfg := new(caddy.AdminConfig)
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
listenAddress := d.Val()
|
||||
if listenAddress == "off" {
|
||||
adminCfg.Disabled = true
|
||||
if d.Next() { // Do not accept any remaining options including block
|
||||
return nil, d.Err("No more option is allowed after turning off admin config")
|
||||
}
|
||||
} else {
|
||||
adminCfg.Listen = listenAddress
|
||||
if d.NextArg() { // At most 1 arg is allowed
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
if d.NextArg() {
|
||||
listenAddress := d.Val()
|
||||
if listenAddress == "off" {
|
||||
adminCfg.Disabled = true
|
||||
if d.Next() { // Do not accept any remaining options including block
|
||||
return nil, d.Err("No more option is allowed after turning off admin config")
|
||||
}
|
||||
} else {
|
||||
adminCfg.Listen = listenAddress
|
||||
if d.NextArg() { // At most 1 arg is allowed
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "enforce_origin":
|
||||
adminCfg.EnforceOrigin = true
|
||||
}
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "enforce_origin":
|
||||
adminCfg.EnforceOrigin = true
|
||||
|
||||
case "origins":
|
||||
adminCfg.Origins = d.RemainingArgs()
|
||||
case "origins":
|
||||
adminCfg.Origins = d.RemainingArgs()
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized parameter '%s'", d.Val())
|
||||
}
|
||||
default:
|
||||
return nil, d.Errf("unrecognized parameter '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
if adminCfg.Listen == "" && !adminCfg.Disabled {
|
||||
|
@ -330,57 +329,57 @@ func parseOptAdmin(d *caddyfile.Dispenser, _ any) (any, error) {
|
|||
}
|
||||
|
||||
func parseOptOnDemand(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume option name
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
|
||||
var ond *caddytls.OnDemandConfig
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "ask":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
if ond == nil {
|
||||
ond = new(caddytls.OnDemandConfig)
|
||||
}
|
||||
ond.Ask = d.Val()
|
||||
|
||||
case "interval":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ond == nil {
|
||||
ond = new(caddytls.OnDemandConfig)
|
||||
}
|
||||
if ond.RateLimit == nil {
|
||||
ond.RateLimit = new(caddytls.RateLimit)
|
||||
}
|
||||
ond.RateLimit.Interval = caddy.Duration(dur)
|
||||
|
||||
case "burst":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
burst, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ond == nil {
|
||||
ond = new(caddytls.OnDemandConfig)
|
||||
}
|
||||
if ond.RateLimit == nil {
|
||||
ond.RateLimit = new(caddytls.RateLimit)
|
||||
}
|
||||
ond.RateLimit.Burst = burst
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized parameter '%s'", d.Val())
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "ask":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
if ond == nil {
|
||||
ond = new(caddytls.OnDemandConfig)
|
||||
}
|
||||
ond.Ask = d.Val()
|
||||
|
||||
case "interval":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ond == nil {
|
||||
ond = new(caddytls.OnDemandConfig)
|
||||
}
|
||||
if ond.RateLimit == nil {
|
||||
ond.RateLimit = new(caddytls.RateLimit)
|
||||
}
|
||||
ond.RateLimit.Interval = caddy.Duration(dur)
|
||||
|
||||
case "burst":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
burst, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ond == nil {
|
||||
ond = new(caddytls.OnDemandConfig)
|
||||
}
|
||||
if ond.RateLimit == nil {
|
||||
ond.RateLimit = new(caddytls.RateLimit)
|
||||
}
|
||||
ond.RateLimit.Burst = burst
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized parameter '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
if ond == nil {
|
||||
|
@ -390,7 +389,7 @@ func parseOptOnDemand(d *caddyfile.Dispenser, _ any) (any, error) {
|
|||
}
|
||||
|
||||
func parseOptPersistConfig(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume parameter name
|
||||
d.Next() // consume option name
|
||||
if !d.Next() {
|
||||
return "", d.ArgErr()
|
||||
}
|
||||
|
@ -405,7 +404,7 @@ func parseOptPersistConfig(d *caddyfile.Dispenser, _ any) (any, error) {
|
|||
}
|
||||
|
||||
func parseOptAutoHTTPS(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume parameter name
|
||||
d.Next() // consume option name
|
||||
if !d.Next() {
|
||||
return "", d.ArgErr()
|
||||
}
|
||||
|
|
|
@ -48,124 +48,124 @@ func init() {
|
|||
//
|
||||
// When the CA ID is unspecified, 'local' is assumed.
|
||||
func parsePKIApp(d *caddyfile.Dispenser, existingVal any) (any, error) {
|
||||
pki := &caddypki.PKI{CAs: make(map[string]*caddypki.CA)}
|
||||
d.Next() // consume app name
|
||||
|
||||
for d.Next() {
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "ca":
|
||||
pkiCa := new(caddypki.CA)
|
||||
pki := &caddypki.PKI{
|
||||
CAs: make(map[string]*caddypki.CA),
|
||||
}
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "ca":
|
||||
pkiCa := new(caddypki.CA)
|
||||
if d.NextArg() {
|
||||
pkiCa.ID = d.Val()
|
||||
if d.NextArg() {
|
||||
pkiCa.ID = d.Val()
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
}
|
||||
if pkiCa.ID == "" {
|
||||
pkiCa.ID = caddypki.DefaultCAID
|
||||
}
|
||||
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "name":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
}
|
||||
if pkiCa.ID == "" {
|
||||
pkiCa.ID = caddypki.DefaultCAID
|
||||
}
|
||||
pkiCa.Name = d.Val()
|
||||
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "name":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Name = d.Val()
|
||||
|
||||
case "root_cn":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.RootCommonName = d.Val()
|
||||
|
||||
case "intermediate_cn":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.IntermediateCommonName = d.Val()
|
||||
|
||||
case "intermediate_lifetime":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pkiCa.IntermediateLifetime = caddy.Duration(dur)
|
||||
|
||||
case "root":
|
||||
if pkiCa.Root == nil {
|
||||
pkiCa.Root = new(caddypki.KeyPair)
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "cert":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Root.Certificate = d.Val()
|
||||
|
||||
case "key":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Root.PrivateKey = d.Val()
|
||||
|
||||
case "format":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Root.Format = d.Val()
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized pki ca root option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
|
||||
case "intermediate":
|
||||
if pkiCa.Intermediate == nil {
|
||||
pkiCa.Intermediate = new(caddypki.KeyPair)
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "cert":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Intermediate.Certificate = d.Val()
|
||||
|
||||
case "key":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Intermediate.PrivateKey = d.Val()
|
||||
|
||||
case "format":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Intermediate.Format = d.Val()
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized pki ca intermediate option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized pki ca option '%s'", d.Val())
|
||||
case "root_cn":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.RootCommonName = d.Val()
|
||||
|
||||
case "intermediate_cn":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.IntermediateCommonName = d.Val()
|
||||
|
||||
case "intermediate_lifetime":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pkiCa.IntermediateLifetime = caddy.Duration(dur)
|
||||
|
||||
case "root":
|
||||
if pkiCa.Root == nil {
|
||||
pkiCa.Root = new(caddypki.KeyPair)
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "cert":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Root.Certificate = d.Val()
|
||||
|
||||
case "key":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Root.PrivateKey = d.Val()
|
||||
|
||||
case "format":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Root.Format = d.Val()
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized pki ca root option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
|
||||
case "intermediate":
|
||||
if pkiCa.Intermediate == nil {
|
||||
pkiCa.Intermediate = new(caddypki.KeyPair)
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "cert":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Intermediate.Certificate = d.Val()
|
||||
|
||||
case "key":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Intermediate.PrivateKey = d.Val()
|
||||
|
||||
case "format":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
pkiCa.Intermediate.Format = d.Val()
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized pki ca intermediate option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized pki ca option '%s'", d.Val())
|
||||
}
|
||||
|
||||
pki.CAs[pkiCa.ID] = pkiCa
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized pki option '%s'", d.Val())
|
||||
}
|
||||
|
||||
pki.CAs[pkiCa.ID] = pkiCa
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized pki option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
|
||||
return pki, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -53,235 +53,235 @@ type serverOptions struct {
|
|||
}
|
||||
|
||||
func unmarshalCaddyfileServerOptions(d *caddyfile.Dispenser) (any, error) {
|
||||
d.Next() // consume option name
|
||||
|
||||
serverOpts := serverOptions{}
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
serverOpts.ListenerAddress = d.Val()
|
||||
if d.NextArg() {
|
||||
serverOpts.ListenerAddress = d.Val()
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
}
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "name":
|
||||
if serverOpts.ListenerAddress == "" {
|
||||
return nil, d.Errf("cannot set a name for a server without a listener address")
|
||||
}
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "name":
|
||||
if serverOpts.ListenerAddress == "" {
|
||||
return nil, d.Errf("cannot set a name for a server without a listener address")
|
||||
}
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
serverOpts.Name = d.Val()
|
||||
serverOpts.Name = d.Val()
|
||||
|
||||
case "listener_wrappers":
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
modID := "caddy.listeners." + d.Val()
|
||||
unm, err := caddyfile.UnmarshalModule(d, modID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
listenerWrapper, ok := unm.(caddy.ListenerWrapper)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("module %s (%T) is not a listener wrapper", modID, unm)
|
||||
}
|
||||
jsonListenerWrapper := caddyconfig.JSONModuleObject(
|
||||
listenerWrapper,
|
||||
"wrapper",
|
||||
listenerWrapper.(caddy.Module).CaddyModule().ID.Name(),
|
||||
nil,
|
||||
)
|
||||
serverOpts.ListenerWrappersRaw = append(serverOpts.ListenerWrappersRaw, jsonListenerWrapper)
|
||||
}
|
||||
|
||||
case "timeouts":
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "read_body":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing read_body timeout duration: %v", err)
|
||||
}
|
||||
serverOpts.ReadTimeout = caddy.Duration(dur)
|
||||
|
||||
case "read_header":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing read_header timeout duration: %v", err)
|
||||
}
|
||||
serverOpts.ReadHeaderTimeout = caddy.Duration(dur)
|
||||
|
||||
case "write":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing write timeout duration: %v", err)
|
||||
}
|
||||
serverOpts.WriteTimeout = caddy.Duration(dur)
|
||||
|
||||
case "idle":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing idle timeout duration: %v", err)
|
||||
}
|
||||
serverOpts.IdleTimeout = caddy.Duration(dur)
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized timeouts option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
case "keepalive_interval":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing keepalive interval duration: %v", err)
|
||||
}
|
||||
serverOpts.KeepAliveInterval = caddy.Duration(dur)
|
||||
|
||||
case "max_header_size":
|
||||
var sizeStr string
|
||||
if !d.AllArgs(&sizeStr) {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
size, err := humanize.ParseBytes(sizeStr)
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing max_header_size: %v", err)
|
||||
}
|
||||
serverOpts.MaxHeaderBytes = int(size)
|
||||
|
||||
case "enable_full_duplex":
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
serverOpts.EnableFullDuplex = true
|
||||
|
||||
case "log_credentials":
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
serverOpts.ShouldLogCredentials = true
|
||||
|
||||
case "protocols":
|
||||
protos := d.RemainingArgs()
|
||||
for _, proto := range protos {
|
||||
if proto != "h1" && proto != "h2" && proto != "h2c" && proto != "h3" {
|
||||
return nil, d.Errf("unknown protocol '%s': expected h1, h2, h2c, or h3", proto)
|
||||
}
|
||||
if sliceContains(serverOpts.Protocols, proto) {
|
||||
return nil, d.Errf("protocol %s specified more than once", proto)
|
||||
}
|
||||
serverOpts.Protocols = append(serverOpts.Protocols, proto)
|
||||
}
|
||||
if nesting := d.Nesting(); d.NextBlock(nesting) {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
|
||||
case "strict_sni_host":
|
||||
if d.NextArg() && d.Val() != "insecure_off" && d.Val() != "on" {
|
||||
return nil, d.Errf("strict_sni_host only supports 'on' or 'insecure_off', got '%s'", d.Val())
|
||||
}
|
||||
boolVal := true
|
||||
if d.Val() == "insecure_off" {
|
||||
boolVal = false
|
||||
}
|
||||
serverOpts.StrictSNIHost = &boolVal
|
||||
|
||||
case "trusted_proxies":
|
||||
if !d.NextArg() {
|
||||
return nil, d.Err("trusted_proxies expects an IP range source module name as its first argument")
|
||||
}
|
||||
modID := "http.ip_sources." + d.Val()
|
||||
case "listener_wrappers":
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
modID := "caddy.listeners." + d.Val()
|
||||
unm, err := caddyfile.UnmarshalModule(d, modID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
source, ok := unm.(caddyhttp.IPRangeSource)
|
||||
listenerWrapper, ok := unm.(caddy.ListenerWrapper)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("module %s (%T) is not an IP range source", modID, unm)
|
||||
return nil, fmt.Errorf("module %s (%T) is not a listener wrapper", modID, unm)
|
||||
}
|
||||
jsonSource := caddyconfig.JSONModuleObject(
|
||||
source,
|
||||
"source",
|
||||
source.(caddy.Module).CaddyModule().ID.Name(),
|
||||
jsonListenerWrapper := caddyconfig.JSONModuleObject(
|
||||
listenerWrapper,
|
||||
"wrapper",
|
||||
listenerWrapper.(caddy.Module).CaddyModule().ID.Name(),
|
||||
nil,
|
||||
)
|
||||
serverOpts.TrustedProxiesRaw = jsonSource
|
||||
|
||||
case "trusted_proxies_strict":
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
serverOpts.TrustedProxiesStrict = 1
|
||||
|
||||
case "client_ip_headers":
|
||||
headers := d.RemainingArgs()
|
||||
for _, header := range headers {
|
||||
if sliceContains(serverOpts.ClientIPHeaders, header) {
|
||||
return nil, d.Errf("client IP header %s specified more than once", header)
|
||||
}
|
||||
serverOpts.ClientIPHeaders = append(serverOpts.ClientIPHeaders, header)
|
||||
}
|
||||
if nesting := d.Nesting(); d.NextBlock(nesting) {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
|
||||
case "metrics":
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
if nesting := d.Nesting(); d.NextBlock(nesting) {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
serverOpts.Metrics = new(caddyhttp.Metrics)
|
||||
|
||||
// TODO: DEPRECATED. (August 2022)
|
||||
case "protocol":
|
||||
caddy.Log().Named("caddyfile").Warn("DEPRECATED: protocol sub-option will be removed soon")
|
||||
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "allow_h2c":
|
||||
caddy.Log().Named("caddyfile").Warn("DEPRECATED: allow_h2c will be removed soon; use protocols option instead")
|
||||
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
if sliceContains(serverOpts.Protocols, "h2c") {
|
||||
return nil, d.Errf("protocol h2c already specified")
|
||||
}
|
||||
serverOpts.Protocols = append(serverOpts.Protocols, "h2c")
|
||||
|
||||
case "strict_sni_host":
|
||||
caddy.Log().Named("caddyfile").Warn("DEPRECATED: protocol > strict_sni_host in this position will be removed soon; move up to the servers block instead")
|
||||
|
||||
if d.NextArg() && d.Val() != "insecure_off" && d.Val() != "on" {
|
||||
return nil, d.Errf("strict_sni_host only supports 'on' or 'insecure_off', got '%s'", d.Val())
|
||||
}
|
||||
boolVal := true
|
||||
if d.Val() == "insecure_off" {
|
||||
boolVal = false
|
||||
}
|
||||
serverOpts.StrictSNIHost = &boolVal
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized protocol option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized servers option '%s'", d.Val())
|
||||
serverOpts.ListenerWrappersRaw = append(serverOpts.ListenerWrappersRaw, jsonListenerWrapper)
|
||||
}
|
||||
|
||||
case "timeouts":
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "read_body":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing read_body timeout duration: %v", err)
|
||||
}
|
||||
serverOpts.ReadTimeout = caddy.Duration(dur)
|
||||
|
||||
case "read_header":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing read_header timeout duration: %v", err)
|
||||
}
|
||||
serverOpts.ReadHeaderTimeout = caddy.Duration(dur)
|
||||
|
||||
case "write":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing write timeout duration: %v", err)
|
||||
}
|
||||
serverOpts.WriteTimeout = caddy.Duration(dur)
|
||||
|
||||
case "idle":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing idle timeout duration: %v", err)
|
||||
}
|
||||
serverOpts.IdleTimeout = caddy.Duration(dur)
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized timeouts option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
case "keepalive_interval":
|
||||
if !d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing keepalive interval duration: %v", err)
|
||||
}
|
||||
serverOpts.KeepAliveInterval = caddy.Duration(dur)
|
||||
|
||||
case "max_header_size":
|
||||
var sizeStr string
|
||||
if !d.AllArgs(&sizeStr) {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
size, err := humanize.ParseBytes(sizeStr)
|
||||
if err != nil {
|
||||
return nil, d.Errf("parsing max_header_size: %v", err)
|
||||
}
|
||||
serverOpts.MaxHeaderBytes = int(size)
|
||||
|
||||
case "enable_full_duplex":
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
serverOpts.EnableFullDuplex = true
|
||||
|
||||
case "log_credentials":
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
serverOpts.ShouldLogCredentials = true
|
||||
|
||||
case "protocols":
|
||||
protos := d.RemainingArgs()
|
||||
for _, proto := range protos {
|
||||
if proto != "h1" && proto != "h2" && proto != "h2c" && proto != "h3" {
|
||||
return nil, d.Errf("unknown protocol '%s': expected h1, h2, h2c, or h3", proto)
|
||||
}
|
||||
if sliceContains(serverOpts.Protocols, proto) {
|
||||
return nil, d.Errf("protocol %s specified more than once", proto)
|
||||
}
|
||||
serverOpts.Protocols = append(serverOpts.Protocols, proto)
|
||||
}
|
||||
if nesting := d.Nesting(); d.NextBlock(nesting) {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
|
||||
case "strict_sni_host":
|
||||
if d.NextArg() && d.Val() != "insecure_off" && d.Val() != "on" {
|
||||
return nil, d.Errf("strict_sni_host only supports 'on' or 'insecure_off', got '%s'", d.Val())
|
||||
}
|
||||
boolVal := true
|
||||
if d.Val() == "insecure_off" {
|
||||
boolVal = false
|
||||
}
|
||||
serverOpts.StrictSNIHost = &boolVal
|
||||
|
||||
case "trusted_proxies":
|
||||
if !d.NextArg() {
|
||||
return nil, d.Err("trusted_proxies expects an IP range source module name as its first argument")
|
||||
}
|
||||
modID := "http.ip_sources." + d.Val()
|
||||
unm, err := caddyfile.UnmarshalModule(d, modID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
source, ok := unm.(caddyhttp.IPRangeSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("module %s (%T) is not an IP range source", modID, unm)
|
||||
}
|
||||
jsonSource := caddyconfig.JSONModuleObject(
|
||||
source,
|
||||
"source",
|
||||
source.(caddy.Module).CaddyModule().ID.Name(),
|
||||
nil,
|
||||
)
|
||||
serverOpts.TrustedProxiesRaw = jsonSource
|
||||
|
||||
case "trusted_proxies_strict":
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
serverOpts.TrustedProxiesStrict = 1
|
||||
|
||||
case "client_ip_headers":
|
||||
headers := d.RemainingArgs()
|
||||
for _, header := range headers {
|
||||
if sliceContains(serverOpts.ClientIPHeaders, header) {
|
||||
return nil, d.Errf("client IP header %s specified more than once", header)
|
||||
}
|
||||
serverOpts.ClientIPHeaders = append(serverOpts.ClientIPHeaders, header)
|
||||
}
|
||||
if nesting := d.Nesting(); d.NextBlock(nesting) {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
|
||||
case "metrics":
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
if nesting := d.Nesting(); d.NextBlock(nesting) {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
serverOpts.Metrics = new(caddyhttp.Metrics)
|
||||
|
||||
// TODO: DEPRECATED. (August 2022)
|
||||
case "protocol":
|
||||
caddy.Log().Named("caddyfile").Warn("DEPRECATED: protocol sub-option will be removed soon")
|
||||
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "allow_h2c":
|
||||
caddy.Log().Named("caddyfile").Warn("DEPRECATED: allow_h2c will be removed soon; use protocols option instead")
|
||||
|
||||
if d.NextArg() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
if sliceContains(serverOpts.Protocols, "h2c") {
|
||||
return nil, d.Errf("protocol h2c already specified")
|
||||
}
|
||||
serverOpts.Protocols = append(serverOpts.Protocols, "h2c")
|
||||
|
||||
case "strict_sni_host":
|
||||
caddy.Log().Named("caddyfile").Warn("DEPRECATED: protocol > strict_sni_host in this position will be removed soon; move up to the servers block instead")
|
||||
|
||||
if d.NextArg() && d.Val() != "insecure_off" && d.Val() != "on" {
|
||||
return nil, d.Errf("strict_sni_host only supports 'on' or 'insecure_off', got '%s'", d.Val())
|
||||
}
|
||||
boolVal := true
|
||||
if d.Val() == "insecure_off" {
|
||||
boolVal = false
|
||||
}
|
||||
serverOpts.StrictSNIHost = &boolVal
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized protocol option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, d.Errf("unrecognized servers option '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
return serverOpts, nil
|
||||
|
|
78
caddytest/integration/caddyfile_adapt/push.txt
Normal file
78
caddytest/integration/caddyfile_adapt/push.txt
Normal file
|
@ -0,0 +1,78 @@
|
|||
:80
|
||||
|
||||
push * /foo.txt
|
||||
|
||||
push {
|
||||
GET /foo.txt
|
||||
}
|
||||
|
||||
push {
|
||||
GET /foo.txt
|
||||
HEAD /foo.txt
|
||||
}
|
||||
|
||||
push {
|
||||
headers {
|
||||
Foo bar
|
||||
}
|
||||
}
|
||||
----------
|
||||
{
|
||||
"apps": {
|
||||
"http": {
|
||||
"servers": {
|
||||
"srv0": {
|
||||
"listen": [
|
||||
":80"
|
||||
],
|
||||
"routes": [
|
||||
{
|
||||
"handle": [
|
||||
{
|
||||
"handler": "push",
|
||||
"resources": [
|
||||
{
|
||||
"target": "/foo.txt"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"handler": "push",
|
||||
"resources": [
|
||||
{
|
||||
"method": "GET",
|
||||
"target": "/foo.txt"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"handler": "push",
|
||||
"resources": [
|
||||
{
|
||||
"method": "GET",
|
||||
"target": "/foo.txt"
|
||||
},
|
||||
{
|
||||
"method": "HEAD",
|
||||
"target": "/foo.txt"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"handler": "push",
|
||||
"headers": {
|
||||
"set": {
|
||||
"Foo": [
|
||||
"bar"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -40,14 +40,8 @@ func init() {
|
|||
//
|
||||
// If <event> is *, then it will bind to all events.
|
||||
func parseApp(d *caddyfile.Dispenser, _ any) (any, error) {
|
||||
d.Next() // consume option name
|
||||
app := new(caddyevents.App)
|
||||
|
||||
// consume the option name
|
||||
if !d.Next() {
|
||||
return nil, d.ArgErr()
|
||||
}
|
||||
|
||||
// handle the block
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "on":
|
||||
|
|
|
@ -34,56 +34,56 @@ func init() {
|
|||
//
|
||||
// If no hash algorithm is supplied, bcrypt will be assumed.
|
||||
func parseCaddyfile(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, error) {
|
||||
h.Next() // consume directive name
|
||||
|
||||
var ba HTTPBasicAuth
|
||||
ba.HashCache = new(Cache)
|
||||
|
||||
for h.Next() {
|
||||
var cmp Comparer
|
||||
args := h.RemainingArgs()
|
||||
var cmp Comparer
|
||||
args := h.RemainingArgs()
|
||||
|
||||
var hashName string
|
||||
switch len(args) {
|
||||
case 0:
|
||||
hashName = "bcrypt"
|
||||
case 1:
|
||||
hashName = args[0]
|
||||
case 2:
|
||||
hashName = args[0]
|
||||
ba.Realm = args[1]
|
||||
default:
|
||||
var hashName string
|
||||
switch len(args) {
|
||||
case 0:
|
||||
hashName = "bcrypt"
|
||||
case 1:
|
||||
hashName = args[0]
|
||||
case 2:
|
||||
hashName = args[0]
|
||||
ba.Realm = args[1]
|
||||
default:
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
switch hashName {
|
||||
case "bcrypt":
|
||||
cmp = BcryptHash{}
|
||||
case "scrypt":
|
||||
cmp = ScryptHash{}
|
||||
default:
|
||||
return nil, h.Errf("unrecognized hash algorithm: %s", hashName)
|
||||
}
|
||||
|
||||
ba.HashRaw = caddyconfig.JSONModuleObject(cmp, "algorithm", hashName, nil)
|
||||
|
||||
for h.NextBlock(0) {
|
||||
username := h.Val()
|
||||
|
||||
var b64Pwd, b64Salt string
|
||||
h.Args(&b64Pwd, &b64Salt)
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
switch hashName {
|
||||
case "bcrypt":
|
||||
cmp = BcryptHash{}
|
||||
case "scrypt":
|
||||
cmp = ScryptHash{}
|
||||
default:
|
||||
return nil, h.Errf("unrecognized hash algorithm: %s", hashName)
|
||||
if username == "" || b64Pwd == "" {
|
||||
return nil, h.Err("username and password cannot be empty or missing")
|
||||
}
|
||||
|
||||
ba.HashRaw = caddyconfig.JSONModuleObject(cmp, "algorithm", hashName, nil)
|
||||
|
||||
for h.NextBlock(0) {
|
||||
username := h.Val()
|
||||
|
||||
var b64Pwd, b64Salt string
|
||||
h.Args(&b64Pwd, &b64Salt)
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
if username == "" || b64Pwd == "" {
|
||||
return nil, h.Err("username and password cannot be empty or missing")
|
||||
}
|
||||
|
||||
ba.AccountList = append(ba.AccountList, Account{
|
||||
Username: username,
|
||||
Password: b64Pwd,
|
||||
Salt: b64Salt,
|
||||
})
|
||||
}
|
||||
ba.AccountList = append(ba.AccountList, Account{
|
||||
Username: username,
|
||||
Password: b64Pwd,
|
||||
Salt: b64Salt,
|
||||
})
|
||||
}
|
||||
|
||||
return Authentication{
|
||||
|
|
|
@ -176,13 +176,27 @@ func (m MatchExpression) Match(r *http.Request) bool {
|
|||
|
||||
// UnmarshalCaddyfile implements caddyfile.Unmarshaler.
|
||||
func (m *MatchExpression) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.CountRemainingArgs() > 1 {
|
||||
m.Expr = strings.Join(d.RemainingArgsRaw(), " ")
|
||||
} else {
|
||||
m.Expr = d.Val()
|
||||
}
|
||||
d.Next() // consume matcher name
|
||||
|
||||
// if there's multiple args, then we need to keep the raw
|
||||
// tokens because the user may have used quotes within their
|
||||
// CEL expression (e.g. strings) and we should retain that
|
||||
if d.CountRemainingArgs() > 1 {
|
||||
m.Expr = strings.Join(d.RemainingArgsRaw(), " ")
|
||||
return nil
|
||||
}
|
||||
|
||||
// there should at least be one arg
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
// if there's only one token, then we can safely grab the
|
||||
// cleaned token (no quotes) and use that as the expression
|
||||
// because there's no valid CEL expression that is only a
|
||||
// quoted string; commonly quotes are used in Caddyfile to
|
||||
// define the expression
|
||||
m.Expr = d.Val()
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -54,62 +54,60 @@ func parseCaddyfile(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, error)
|
|||
//
|
||||
// Specifying the formats on the first line will use those formats' defaults.
|
||||
func (enc *Encode) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
var prefer []string
|
||||
d.Next() // consume directive name
|
||||
|
||||
prefer := []string{}
|
||||
for _, arg := range d.RemainingArgs() {
|
||||
mod, err := caddy.GetModule("http.encoders." + arg)
|
||||
if err != nil {
|
||||
return d.Errf("finding encoder module '%s': %v", mod, err)
|
||||
}
|
||||
encoding, ok := mod.New().(Encoding)
|
||||
if !ok {
|
||||
return d.Errf("module %s is not an HTTP encoding", mod)
|
||||
}
|
||||
if enc.EncodingsRaw == nil {
|
||||
enc.EncodingsRaw = make(caddy.ModuleMap)
|
||||
}
|
||||
enc.EncodingsRaw[arg] = caddyconfig.JSON(encoding, nil)
|
||||
prefer = append(prefer, arg)
|
||||
}
|
||||
|
||||
responseMatchers := make(map[string]caddyhttp.ResponseMatcher)
|
||||
|
||||
for d.Next() {
|
||||
for _, arg := range d.RemainingArgs() {
|
||||
mod, err := caddy.GetModule("http.encoders." + arg)
|
||||
if err != nil {
|
||||
return d.Errf("finding encoder module '%s': %v", mod, err)
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "minimum_length":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
encoding, ok := mod.New().(Encoding)
|
||||
minLength, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
enc.MinLength = minLength
|
||||
case "match":
|
||||
err := caddyhttp.ParseNamedResponseMatcher(d.NewFromNextSegment(), responseMatchers)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
matcher := responseMatchers["match"]
|
||||
enc.Matcher = &matcher
|
||||
default:
|
||||
name := d.Val()
|
||||
modID := "http.encoders." + name
|
||||
unm, err := caddyfile.UnmarshalModule(d, modID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
encoding, ok := unm.(Encoding)
|
||||
if !ok {
|
||||
return d.Errf("module %s is not an HTTP encoding", mod)
|
||||
return d.Errf("module %s is not an HTTP encoding; is %T", modID, unm)
|
||||
}
|
||||
if enc.EncodingsRaw == nil {
|
||||
enc.EncodingsRaw = make(caddy.ModuleMap)
|
||||
}
|
||||
enc.EncodingsRaw[arg] = caddyconfig.JSON(encoding, nil)
|
||||
prefer = append(prefer, arg)
|
||||
}
|
||||
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "minimum_length":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
minLength, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
enc.MinLength = minLength
|
||||
case "match":
|
||||
err := caddyhttp.ParseNamedResponseMatcher(d.NewFromNextSegment(), responseMatchers)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
matcher := responseMatchers["match"]
|
||||
enc.Matcher = &matcher
|
||||
default:
|
||||
name := d.Val()
|
||||
modID := "http.encoders." + name
|
||||
unm, err := caddyfile.UnmarshalModule(d, modID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
encoding, ok := unm.(Encoding)
|
||||
if !ok {
|
||||
return d.Errf("module %s is not an HTTP encoding; is %T", modID, unm)
|
||||
}
|
||||
if enc.EncodingsRaw == nil {
|
||||
enc.EncodingsRaw = make(caddy.ModuleMap)
|
||||
}
|
||||
enc.EncodingsRaw[name] = caddyconfig.JSON(encoding, nil)
|
||||
prefer = append(prefer, name)
|
||||
}
|
||||
enc.EncodingsRaw[name] = caddyconfig.JSON(encoding, nil)
|
||||
prefer = append(prefer, name)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -44,17 +44,16 @@ func (Gzip) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile sets up the handler from Caddyfile tokens.
|
||||
func (g *Gzip) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if !d.NextArg() {
|
||||
continue
|
||||
}
|
||||
levelStr := d.Val()
|
||||
level, err := strconv.Atoi(levelStr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
g.Level = level
|
||||
d.Next() // consume option name
|
||||
if !d.NextArg() {
|
||||
return nil
|
||||
}
|
||||
levelStr := d.Val()
|
||||
level, err := strconv.Atoi(levelStr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
g.Level = level
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -223,7 +223,7 @@ func parseTryFiles(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue, error)
|
|||
|
||||
// parse out the optional try policy
|
||||
var tryPolicy string
|
||||
for nesting := h.Nesting(); h.NextBlock(nesting); {
|
||||
for h.NextBlock(0) {
|
||||
switch h.Val() {
|
||||
case "policy":
|
||||
if tryPolicy != "" {
|
||||
|
|
|
@ -127,6 +127,7 @@ func (MatchFile) CaddyModule() caddy.ModuleInfo {
|
|||
// try_policy first_exist|smallest_size|largest_size|most_recently_modified
|
||||
// }
|
||||
func (m *MatchFile) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
m.TryFiles = append(m.TryFiles, d.RemainingArgs()...)
|
||||
for d.NextBlock(0) {
|
||||
|
|
|
@ -47,14 +47,12 @@ func init() {
|
|||
// ? conditionally sets a value only if the header field is not already set,
|
||||
// and > sets a field with defer enabled.
|
||||
func parseCaddyfile(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue, error) {
|
||||
if !h.Next() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
h.Next() // consume directive name
|
||||
matcherSet, err := h.ExtractMatcherSet()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
h.Next() // consume the directive name again (matcher parsing resets)
|
||||
|
||||
makeHandler := func() Handler {
|
||||
return Handler{
|
||||
|
@ -65,73 +63,71 @@ func parseCaddyfile(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue, error)
|
|||
}
|
||||
handler, handlerWithRequire := makeHandler(), makeHandler()
|
||||
|
||||
for h.Next() {
|
||||
// first see if headers are in the initial line
|
||||
var hasArgs bool
|
||||
// first see if headers are in the initial line
|
||||
var hasArgs bool
|
||||
if h.NextArg() {
|
||||
hasArgs = true
|
||||
field := h.Val()
|
||||
var value, replacement string
|
||||
if h.NextArg() {
|
||||
hasArgs = true
|
||||
field := h.Val()
|
||||
var value, replacement string
|
||||
if h.NextArg() {
|
||||
value = h.Val()
|
||||
}
|
||||
if h.NextArg() {
|
||||
replacement = h.Val()
|
||||
}
|
||||
err := applyHeaderOp(
|
||||
handler.Response.HeaderOps,
|
||||
handler.Response,
|
||||
field,
|
||||
value,
|
||||
replacement,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, h.Err(err.Error())
|
||||
}
|
||||
if len(handler.Response.HeaderOps.Delete) > 0 {
|
||||
handler.Response.Deferred = true
|
||||
}
|
||||
value = h.Val()
|
||||
}
|
||||
if h.NextArg() {
|
||||
replacement = h.Val()
|
||||
}
|
||||
err := applyHeaderOp(
|
||||
handler.Response.HeaderOps,
|
||||
handler.Response,
|
||||
field,
|
||||
value,
|
||||
replacement,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, h.Err(err.Error())
|
||||
}
|
||||
if len(handler.Response.HeaderOps.Delete) > 0 {
|
||||
handler.Response.Deferred = true
|
||||
}
|
||||
}
|
||||
|
||||
// if not, they should be in a block
|
||||
for h.NextBlock(0) {
|
||||
field := h.Val()
|
||||
if field == "defer" {
|
||||
handler.Response.Deferred = true
|
||||
continue
|
||||
}
|
||||
if hasArgs {
|
||||
return nil, h.Err("cannot specify headers in both arguments and block") // because it would be weird
|
||||
}
|
||||
|
||||
// if not, they should be in a block
|
||||
for h.NextBlock(0) {
|
||||
field := h.Val()
|
||||
if field == "defer" {
|
||||
handler.Response.Deferred = true
|
||||
continue
|
||||
}
|
||||
if hasArgs {
|
||||
return nil, h.Err("cannot specify headers in both arguments and block") // because it would be weird
|
||||
}
|
||||
// sometimes it is habitual for users to suffix a field name with a colon,
|
||||
// as if they were writing a curl command or something; see
|
||||
// https://caddy.community/t/v2-reverse-proxy-please-add-cors-example-to-the-docs/7349/19
|
||||
field = strings.TrimSuffix(field, ":")
|
||||
|
||||
// sometimes it is habitual for users to suffix a field name with a colon,
|
||||
// as if they were writing a curl command or something; see
|
||||
// https://caddy.community/t/v2-reverse-proxy-please-add-cors-example-to-the-docs/7349/19
|
||||
field = strings.TrimSuffix(field, ":")
|
||||
var value, replacement string
|
||||
if h.NextArg() {
|
||||
value = h.Val()
|
||||
}
|
||||
if h.NextArg() {
|
||||
replacement = h.Val()
|
||||
}
|
||||
|
||||
var value, replacement string
|
||||
if h.NextArg() {
|
||||
value = h.Val()
|
||||
}
|
||||
if h.NextArg() {
|
||||
replacement = h.Val()
|
||||
}
|
||||
handlerToUse := handler
|
||||
if strings.HasPrefix(field, "?") {
|
||||
handlerToUse = handlerWithRequire
|
||||
}
|
||||
|
||||
handlerToUse := handler
|
||||
if strings.HasPrefix(field, "?") {
|
||||
handlerToUse = handlerWithRequire
|
||||
}
|
||||
|
||||
err := applyHeaderOp(
|
||||
handlerToUse.Response.HeaderOps,
|
||||
handlerToUse.Response,
|
||||
field,
|
||||
value,
|
||||
replacement,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, h.Err(err.Error())
|
||||
}
|
||||
err := applyHeaderOp(
|
||||
handlerToUse.Response.HeaderOps,
|
||||
handlerToUse.Response,
|
||||
field,
|
||||
value,
|
||||
replacement,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, h.Err(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -151,56 +147,52 @@ func parseCaddyfile(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue, error)
|
|||
//
|
||||
// request_header [<matcher>] [[+|-]<field> [<value|regexp>] [<replacement>]]
|
||||
func parseReqHdrCaddyfile(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue, error) {
|
||||
if !h.Next() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
h.Next() // consume directive name
|
||||
matcherSet, err := h.ExtractMatcherSet()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
h.Next() // consume the directive name again (matcher parsing resets)
|
||||
|
||||
configValues := []httpcaddyfile.ConfigValue{}
|
||||
|
||||
for h.Next() {
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
field := h.Val()
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
field := h.Val()
|
||||
|
||||
hdr := Handler{
|
||||
Request: &HeaderOps{},
|
||||
}
|
||||
hdr := Handler{
|
||||
Request: &HeaderOps{},
|
||||
}
|
||||
|
||||
// sometimes it is habitual for users to suffix a field name with a colon,
|
||||
// as if they were writing a curl command or something; see
|
||||
// https://caddy.community/t/v2-reverse-proxy-please-add-cors-example-to-the-docs/7349/19
|
||||
field = strings.TrimSuffix(field, ":")
|
||||
|
||||
var value, replacement string
|
||||
if h.NextArg() {
|
||||
value = h.Val()
|
||||
}
|
||||
if h.NextArg() {
|
||||
replacement = h.Val()
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
}
|
||||
|
||||
if hdr.Request == nil {
|
||||
hdr.Request = new(HeaderOps)
|
||||
}
|
||||
if err := CaddyfileHeaderOp(hdr.Request, field, value, replacement); err != nil {
|
||||
return nil, h.Err(err.Error())
|
||||
}
|
||||
|
||||
configValues = append(configValues, h.NewRoute(matcherSet, hdr)...)
|
||||
// sometimes it is habitual for users to suffix a field name with a colon,
|
||||
// as if they were writing a curl command or something; see
|
||||
// https://caddy.community/t/v2-reverse-proxy-please-add-cors-example-to-the-docs/7349/19
|
||||
field = strings.TrimSuffix(field, ":")
|
||||
|
||||
var value, replacement string
|
||||
if h.NextArg() {
|
||||
value = h.Val()
|
||||
}
|
||||
if h.NextArg() {
|
||||
replacement = h.Val()
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
}
|
||||
|
||||
if hdr.Request == nil {
|
||||
hdr.Request = new(HeaderOps)
|
||||
}
|
||||
if err := CaddyfileHeaderOp(hdr.Request, field, value, replacement); err != nil {
|
||||
return nil, h.Err(err.Error())
|
||||
}
|
||||
|
||||
configValues = append(configValues, h.NewRoute(matcherSet, hdr)...)
|
||||
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
return configValues, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -79,24 +79,23 @@ func (MatchRemoteIP) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile implements caddyfile.Unmarshaler.
|
||||
func (m *MatchRemoteIP) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
for d.NextArg() {
|
||||
if d.Val() == "forwarded" {
|
||||
if len(m.Ranges) > 0 {
|
||||
return d.Err("if used, 'forwarded' must be first argument")
|
||||
}
|
||||
m.Forwarded = true
|
||||
continue
|
||||
d.Next() // consume matcher name
|
||||
for d.NextArg() {
|
||||
if d.Val() == "forwarded" {
|
||||
if len(m.Ranges) > 0 {
|
||||
return d.Err("if used, 'forwarded' must be first argument")
|
||||
}
|
||||
if d.Val() == "private_ranges" {
|
||||
m.Ranges = append(m.Ranges, PrivateRangesCIDR()...)
|
||||
continue
|
||||
}
|
||||
m.Ranges = append(m.Ranges, d.Val())
|
||||
m.Forwarded = true
|
||||
continue
|
||||
}
|
||||
if d.NextBlock(0) {
|
||||
return d.Err("malformed remote_ip matcher: blocks are not supported")
|
||||
if d.Val() == "private_ranges" {
|
||||
m.Ranges = append(m.Ranges, PrivateRangesCIDR()...)
|
||||
continue
|
||||
}
|
||||
m.Ranges = append(m.Ranges, d.Val())
|
||||
}
|
||||
if d.NextBlock(0) {
|
||||
return d.Err("malformed remote_ip matcher: blocks are not supported")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -189,17 +188,16 @@ func (MatchClientIP) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile implements caddyfile.Unmarshaler.
|
||||
func (m *MatchClientIP) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
for d.NextArg() {
|
||||
if d.Val() == "private_ranges" {
|
||||
m.Ranges = append(m.Ranges, PrivateRangesCIDR()...)
|
||||
continue
|
||||
}
|
||||
m.Ranges = append(m.Ranges, d.Val())
|
||||
}
|
||||
if d.NextBlock(0) {
|
||||
return d.Err("malformed client_ip matcher: blocks are not supported")
|
||||
d.Next() // consume matcher name
|
||||
for d.NextArg() {
|
||||
if d.Val() == "private_ranges" {
|
||||
m.Ranges = append(m.Ranges, PrivateRangesCIDR()...)
|
||||
continue
|
||||
}
|
||||
m.Ranges = append(m.Ranges, d.Val())
|
||||
}
|
||||
if d.NextBlock(0) {
|
||||
return d.Err("malformed client_ip matcher: blocks are not supported")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -42,74 +42,73 @@ func init() {
|
|||
// However, for convenience, there may be fewer outputs than destinations and any missing
|
||||
// outputs will be filled in implicitly.
|
||||
func parseCaddyfile(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, error) {
|
||||
h.Next() // consume directive name
|
||||
|
||||
var handler Handler
|
||||
|
||||
for h.Next() {
|
||||
// source
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
handler.Source = h.Val()
|
||||
// source
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
handler.Source = h.Val()
|
||||
|
||||
// destinations
|
||||
handler.Destinations = h.RemainingArgs()
|
||||
if len(handler.Destinations) == 0 {
|
||||
return nil, h.Err("missing destination argument(s)")
|
||||
}
|
||||
for _, dest := range handler.Destinations {
|
||||
if shorthand := httpcaddyfile.WasReplacedPlaceholderShorthand(dest); shorthand != "" {
|
||||
return nil, h.Errf("destination %s conflicts with a Caddyfile placeholder shorthand", shorthand)
|
||||
}
|
||||
}
|
||||
|
||||
// mappings
|
||||
for h.NextBlock(0) {
|
||||
// defaults are a special case
|
||||
if h.Val() == "default" {
|
||||
if len(handler.Defaults) > 0 {
|
||||
return nil, h.Err("defaults already defined")
|
||||
}
|
||||
handler.Defaults = h.RemainingArgs()
|
||||
for len(handler.Defaults) < len(handler.Destinations) {
|
||||
handler.Defaults = append(handler.Defaults, "")
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// every line maps an input value to one or more outputs
|
||||
in := h.Val()
|
||||
var outs []any
|
||||
for h.NextArg() {
|
||||
val := h.ScalarVal()
|
||||
if val == "-" {
|
||||
outs = append(outs, nil)
|
||||
} else {
|
||||
outs = append(outs, val)
|
||||
}
|
||||
}
|
||||
|
||||
// cannot have more outputs than destinations
|
||||
if len(outs) > len(handler.Destinations) {
|
||||
return nil, h.Err("too many outputs")
|
||||
}
|
||||
|
||||
// for convenience, can have fewer outputs than destinations, but the
|
||||
// underlying handler won't accept that, so we fill in nil values
|
||||
for len(outs) < len(handler.Destinations) {
|
||||
outs = append(outs, nil)
|
||||
}
|
||||
|
||||
// create the mapping
|
||||
mapping := Mapping{Outputs: outs}
|
||||
if strings.HasPrefix(in, "~") {
|
||||
mapping.InputRegexp = in[1:]
|
||||
} else {
|
||||
mapping.Input = in
|
||||
}
|
||||
|
||||
handler.Mappings = append(handler.Mappings, mapping)
|
||||
// destinations
|
||||
handler.Destinations = h.RemainingArgs()
|
||||
if len(handler.Destinations) == 0 {
|
||||
return nil, h.Err("missing destination argument(s)")
|
||||
}
|
||||
for _, dest := range handler.Destinations {
|
||||
if shorthand := httpcaddyfile.WasReplacedPlaceholderShorthand(dest); shorthand != "" {
|
||||
return nil, h.Errf("destination %s conflicts with a Caddyfile placeholder shorthand", shorthand)
|
||||
}
|
||||
}
|
||||
|
||||
// mappings
|
||||
for h.NextBlock(0) {
|
||||
// defaults are a special case
|
||||
if h.Val() == "default" {
|
||||
if len(handler.Defaults) > 0 {
|
||||
return nil, h.Err("defaults already defined")
|
||||
}
|
||||
handler.Defaults = h.RemainingArgs()
|
||||
for len(handler.Defaults) < len(handler.Destinations) {
|
||||
handler.Defaults = append(handler.Defaults, "")
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// every line maps an input value to one or more outputs
|
||||
in := h.Val()
|
||||
var outs []any
|
||||
for h.NextArg() {
|
||||
val := h.ScalarVal()
|
||||
if val == "-" {
|
||||
outs = append(outs, nil)
|
||||
} else {
|
||||
outs = append(outs, val)
|
||||
}
|
||||
}
|
||||
|
||||
// cannot have more outputs than destinations
|
||||
if len(outs) > len(handler.Destinations) {
|
||||
return nil, h.Err("too many outputs")
|
||||
}
|
||||
|
||||
// for convenience, can have fewer outputs than destinations, but the
|
||||
// underlying handler won't accept that, so we fill in nil values
|
||||
for len(outs) < len(handler.Destinations) {
|
||||
outs = append(outs, nil)
|
||||
}
|
||||
|
||||
// create the mapping
|
||||
mapping := Mapping{Outputs: outs}
|
||||
if strings.HasPrefix(in, "~") {
|
||||
mapping.InputRegexp = in[1:]
|
||||
} else {
|
||||
mapping.Input = in
|
||||
}
|
||||
|
||||
handler.Mappings = append(handler.Mappings, mapping)
|
||||
}
|
||||
return handler, nil
|
||||
}
|
||||
|
|
|
@ -225,6 +225,7 @@ func (MatchHost) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile implements caddyfile.Unmarshaler.
|
||||
func (m *MatchHost) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
*m = append(*m, d.RemainingArgs()...)
|
||||
if d.NextBlock(0) {
|
||||
|
@ -632,6 +633,7 @@ func (MatchPath) CELLibrary(ctx caddy.Context) (cel.Library, error) {
|
|||
|
||||
// UnmarshalCaddyfile implements caddyfile.Unmarshaler.
|
||||
func (m *MatchPath) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
*m = append(*m, d.RemainingArgs()...)
|
||||
if d.NextBlock(0) {
|
||||
|
@ -716,6 +718,7 @@ func (MatchMethod) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile implements caddyfile.Unmarshaler.
|
||||
func (m *MatchMethod) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
*m = append(*m, d.RemainingArgs()...)
|
||||
if d.NextBlock(0) {
|
||||
|
@ -770,6 +773,7 @@ func (m *MatchQuery) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
|||
if *m == nil {
|
||||
*m = make(map[string][]string)
|
||||
}
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
for _, query := range d.RemainingArgs() {
|
||||
if query == "" {
|
||||
|
@ -868,6 +872,7 @@ func (m *MatchHeader) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
|||
if *m == nil {
|
||||
*m = make(map[string][]string)
|
||||
}
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
var field, val string
|
||||
if !d.Args(&field) {
|
||||
|
@ -1002,6 +1007,7 @@ func (m *MatchHeaderRE) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
|||
if *m == nil {
|
||||
*m = make(map[string]*MatchRegexp)
|
||||
}
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
var first, second, third string
|
||||
if !d.Args(&first, &second) {
|
||||
|
@ -1166,6 +1172,7 @@ func (m MatchProtocol) Match(r *http.Request) bool {
|
|||
|
||||
// UnmarshalCaddyfile implements caddyfile.Unmarshaler.
|
||||
func (m *MatchProtocol) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
var proto string
|
||||
if !d.Args(&proto) {
|
||||
|
@ -1207,6 +1214,7 @@ func (MatchNot) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile implements caddyfile.Unmarshaler.
|
||||
func (m *MatchNot) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
matcherSet, err := ParseCaddyfileNestedMatcherSet(d)
|
||||
if err != nil {
|
||||
|
@ -1331,6 +1339,7 @@ func (mre *MatchRegexp) Match(input string, repl *caddy.Replacer) bool {
|
|||
|
||||
// UnmarshalCaddyfile implements caddyfile.Unmarshaler.
|
||||
func (mre *MatchRegexp) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
// If this is the second iteration of the loop
|
||||
// then there's more than one path_regexp matcher
|
||||
|
|
|
@ -39,40 +39,40 @@ func (ListenerWrapper) CaddyModule() caddy.ModuleInfo {
|
|||
// fallback_policy <policy>
|
||||
// }
|
||||
func (w *ListenerWrapper) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
// No same-line options are supported
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume wrapper name
|
||||
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("parsing proxy_protocol timeout duration: %v", err)
|
||||
}
|
||||
w.Timeout = caddy.Duration(dur)
|
||||
// No same-line options are supported
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "allow":
|
||||
w.Allow = append(w.Allow, d.RemainingArgs()...)
|
||||
case "deny":
|
||||
w.Deny = append(w.Deny, d.RemainingArgs()...)
|
||||
case "fallback_policy":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
p, err := parsePolicy(d.Val())
|
||||
if err != nil {
|
||||
return d.WrapErr(err)
|
||||
}
|
||||
w.FallbackPolicy = p
|
||||
default:
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("parsing proxy_protocol timeout duration: %v", err)
|
||||
}
|
||||
w.Timeout = caddy.Duration(dur)
|
||||
|
||||
case "allow":
|
||||
w.Allow = append(w.Allow, d.RemainingArgs()...)
|
||||
case "deny":
|
||||
w.Deny = append(w.Deny, d.RemainingArgs()...)
|
||||
case "fallback_policy":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
p, err := parsePolicy(d.Val())
|
||||
if err != nil {
|
||||
return d.WrapErr(err)
|
||||
}
|
||||
w.FallbackPolicy = p
|
||||
default:
|
||||
return d.ArgErr()
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -44,63 +44,63 @@ func init() {
|
|||
// Placeholders are accepted in resource and header field
|
||||
// name and value and replacement tokens.
|
||||
func parseCaddyfile(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, error) {
|
||||
h.Next() // consume directive name
|
||||
|
||||
handler := new(Handler)
|
||||
|
||||
for h.Next() {
|
||||
if h.NextArg() {
|
||||
handler.Resources = append(handler.Resources, Resource{Target: h.Val()})
|
||||
}
|
||||
|
||||
// optional block
|
||||
for outerNesting := h.Nesting(); h.NextBlock(outerNesting); {
|
||||
switch h.Val() {
|
||||
case "headers":
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
for innerNesting := h.Nesting(); h.NextBlock(innerNesting); {
|
||||
var err error
|
||||
|
||||
// include current token, which we treat as an argument here
|
||||
args := []string{h.Val()}
|
||||
args = append(args, h.RemainingArgs()...)
|
||||
|
||||
if handler.Headers == nil {
|
||||
handler.Headers = new(HeaderConfig)
|
||||
}
|
||||
|
||||
switch len(args) {
|
||||
case 1:
|
||||
err = headers.CaddyfileHeaderOp(&handler.Headers.HeaderOps, args[0], "", "")
|
||||
case 2:
|
||||
err = headers.CaddyfileHeaderOp(&handler.Headers.HeaderOps, args[0], args[1], "")
|
||||
case 3:
|
||||
err = headers.CaddyfileHeaderOp(&handler.Headers.HeaderOps, args[0], args[1], args[2])
|
||||
default:
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, h.Err(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
case "GET", "HEAD":
|
||||
method := h.Val()
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
target := h.Val()
|
||||
handler.Resources = append(handler.Resources, Resource{
|
||||
Method: method,
|
||||
Target: target,
|
||||
})
|
||||
|
||||
default:
|
||||
handler.Resources = append(handler.Resources, Resource{Target: h.Val()})
|
||||
}
|
||||
}
|
||||
// inline resources
|
||||
if h.NextArg() {
|
||||
handler.Resources = append(handler.Resources, Resource{Target: h.Val()})
|
||||
}
|
||||
|
||||
// optional block
|
||||
for h.NextBlock(0) {
|
||||
switch h.Val() {
|
||||
case "headers":
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
for nesting := h.Nesting(); h.NextBlock(nesting); {
|
||||
var err error
|
||||
|
||||
// include current token, which we treat as an argument here
|
||||
args := []string{h.Val()}
|
||||
args = append(args, h.RemainingArgs()...)
|
||||
|
||||
if handler.Headers == nil {
|
||||
handler.Headers = new(HeaderConfig)
|
||||
}
|
||||
|
||||
switch len(args) {
|
||||
case 1:
|
||||
err = headers.CaddyfileHeaderOp(&handler.Headers.HeaderOps, args[0], "", "")
|
||||
case 2:
|
||||
err = headers.CaddyfileHeaderOp(&handler.Headers.HeaderOps, args[0], args[1], "")
|
||||
case 3:
|
||||
err = headers.CaddyfileHeaderOp(&handler.Headers.HeaderOps, args[0], args[1], args[2])
|
||||
default:
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, h.Err(err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
case "GET", "HEAD":
|
||||
method := h.Val()
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
target := h.Val()
|
||||
handler.Resources = append(handler.Resources, Resource{
|
||||
Method: method,
|
||||
Target: target,
|
||||
})
|
||||
|
||||
default:
|
||||
handler.Resources = append(handler.Resources, Resource{Target: h.Val()})
|
||||
}
|
||||
}
|
||||
return handler, nil
|
||||
}
|
||||
|
|
|
@ -26,25 +26,26 @@ func init() {
|
|||
}
|
||||
|
||||
func parseCaddyfile(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, error) {
|
||||
h.Next() // consume directive name
|
||||
|
||||
rb := new(RequestBody)
|
||||
|
||||
for h.Next() {
|
||||
// configuration should be in a block
|
||||
for h.NextBlock(0) {
|
||||
switch h.Val() {
|
||||
case "max_size":
|
||||
var sizeStr string
|
||||
if !h.AllArgs(&sizeStr) {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
size, err := humanize.ParseBytes(sizeStr)
|
||||
if err != nil {
|
||||
return nil, h.Errf("parsing max_size: %v", err)
|
||||
}
|
||||
rb.MaxSize = int64(size)
|
||||
default:
|
||||
return nil, h.Errf("unrecognized servers option '%s'", h.Val())
|
||||
// configuration should be in a block
|
||||
for h.NextBlock(0) {
|
||||
switch h.Val() {
|
||||
case "max_size":
|
||||
var sizeStr string
|
||||
if !h.AllArgs(&sizeStr) {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
size, err := humanize.ParseBytes(sizeStr)
|
||||
if err != nil {
|
||||
return nil, h.Errf("parsing max_size: %v", err)
|
||||
}
|
||||
rb.MaxSize = int64(size)
|
||||
|
||||
default:
|
||||
return nil, h.Errf("unrecognized servers option '%s'", h.Val())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -67,55 +67,53 @@ func (rm ResponseMatcher) matchStatusCode(statusCode int) bool {
|
|||
//
|
||||
// @name [header <field> [<value>]] | [status <code...>]
|
||||
func ParseNamedResponseMatcher(d *caddyfile.Dispenser, matchers map[string]ResponseMatcher) error {
|
||||
for d.Next() {
|
||||
definitionName := d.Val()
|
||||
d.Next() // consume matcher name
|
||||
definitionName := d.Val()
|
||||
|
||||
if _, ok := matchers[definitionName]; ok {
|
||||
return d.Errf("matcher is defined more than once: %s", definitionName)
|
||||
}
|
||||
|
||||
matcher := ResponseMatcher{}
|
||||
for nesting := d.Nesting(); d.NextArg() || d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "header":
|
||||
if matcher.Headers == nil {
|
||||
matcher.Headers = http.Header{}
|
||||
}
|
||||
|
||||
// reuse the header request matcher's unmarshaler
|
||||
headerMatcher := MatchHeader(matcher.Headers)
|
||||
err := headerMatcher.UnmarshalCaddyfile(d.NewFromNextSegment())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
matcher.Headers = http.Header(headerMatcher)
|
||||
case "status":
|
||||
if matcher.StatusCode == nil {
|
||||
matcher.StatusCode = []int{}
|
||||
}
|
||||
|
||||
args := d.RemainingArgs()
|
||||
if len(args) == 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
for _, arg := range args {
|
||||
if len(arg) == 3 && strings.HasSuffix(arg, "xx") {
|
||||
arg = arg[:1]
|
||||
}
|
||||
statusNum, err := strconv.Atoi(arg)
|
||||
if err != nil {
|
||||
return d.Errf("bad status value '%s': %v", arg, err)
|
||||
}
|
||||
matcher.StatusCode = append(matcher.StatusCode, statusNum)
|
||||
}
|
||||
default:
|
||||
return d.Errf("unrecognized response matcher %s", d.Val())
|
||||
}
|
||||
}
|
||||
|
||||
matchers[definitionName] = matcher
|
||||
if _, ok := matchers[definitionName]; ok {
|
||||
return d.Errf("matcher is defined more than once: %s", definitionName)
|
||||
}
|
||||
|
||||
matcher := ResponseMatcher{}
|
||||
for nesting := d.Nesting(); d.NextArg() || d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "header":
|
||||
if matcher.Headers == nil {
|
||||
matcher.Headers = http.Header{}
|
||||
}
|
||||
|
||||
// reuse the header request matcher's unmarshaler
|
||||
headerMatcher := MatchHeader(matcher.Headers)
|
||||
err := headerMatcher.UnmarshalCaddyfile(d.NewFromNextSegment())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
matcher.Headers = http.Header(headerMatcher)
|
||||
case "status":
|
||||
if matcher.StatusCode == nil {
|
||||
matcher.StatusCode = []int{}
|
||||
}
|
||||
|
||||
args := d.RemainingArgs()
|
||||
if len(args) == 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
for _, arg := range args {
|
||||
if len(arg) == 3 && strings.HasSuffix(arg, "xx") {
|
||||
arg = arg[:1]
|
||||
}
|
||||
statusNum, err := strconv.Atoi(arg)
|
||||
if err != nil {
|
||||
return d.Errf("bad status value '%s': %v", arg, err)
|
||||
}
|
||||
matcher.StatusCode = append(matcher.StatusCode, statusNum)
|
||||
}
|
||||
default:
|
||||
return d.Errf("unrecognized response matcher %s", d.Val())
|
||||
}
|
||||
}
|
||||
matchers[definitionName] = matcher
|
||||
return nil
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -46,76 +46,75 @@ func init() {
|
|||
// capture_stderr
|
||||
// }
|
||||
func (t *Transport) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "root":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
t.Root = d.Val()
|
||||
|
||||
case "split":
|
||||
t.SplitPath = d.RemainingArgs()
|
||||
if len(t.SplitPath) == 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "env":
|
||||
args := d.RemainingArgs()
|
||||
if len(args) != 2 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
if t.EnvVars == nil {
|
||||
t.EnvVars = make(map[string]string)
|
||||
}
|
||||
t.EnvVars[args[0]] = args[1]
|
||||
|
||||
case "resolve_root_symlink":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
t.ResolveRootSymlink = true
|
||||
|
||||
case "dial_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("bad timeout value %s: %v", d.Val(), err)
|
||||
}
|
||||
t.DialTimeout = caddy.Duration(dur)
|
||||
|
||||
case "read_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("bad timeout value %s: %v", d.Val(), err)
|
||||
}
|
||||
t.ReadTimeout = caddy.Duration(dur)
|
||||
|
||||
case "write_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("bad timeout value %s: %v", d.Val(), err)
|
||||
}
|
||||
t.WriteTimeout = caddy.Duration(dur)
|
||||
|
||||
case "capture_stderr":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
t.CaptureStderr = true
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
d.Next() // consume transport name
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "root":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
t.Root = d.Val()
|
||||
|
||||
case "split":
|
||||
t.SplitPath = d.RemainingArgs()
|
||||
if len(t.SplitPath) == 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "env":
|
||||
args := d.RemainingArgs()
|
||||
if len(args) != 2 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
if t.EnvVars == nil {
|
||||
t.EnvVars = make(map[string]string)
|
||||
}
|
||||
t.EnvVars[args[0]] = args[1]
|
||||
|
||||
case "resolve_root_symlink":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
t.ResolveRootSymlink = true
|
||||
|
||||
case "dial_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("bad timeout value %s: %v", d.Val(), err)
|
||||
}
|
||||
t.DialTimeout = caddy.Duration(dur)
|
||||
|
||||
case "read_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("bad timeout value %s: %v", d.Val(), err)
|
||||
}
|
||||
t.ReadTimeout = caddy.Duration(dur)
|
||||
|
||||
case "write_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("bad timeout value %s: %v", d.Val(), err)
|
||||
}
|
||||
t.WriteTimeout = caddy.Duration(dur)
|
||||
|
||||
case "capture_stderr":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
t.CaptureStderr = true
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -68,10 +68,9 @@ func (r RandomSelection) Select(pool UpstreamPool, request *http.Request, _ http
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (r *RandomSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume policy name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -98,22 +97,22 @@ func (WeightedRoundRobinSelection) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (r *WeightedRoundRobinSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
args := d.RemainingArgs()
|
||||
if len(args) == 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume policy name
|
||||
|
||||
for _, weight := range args {
|
||||
weightInt, err := strconv.Atoi(weight)
|
||||
if err != nil {
|
||||
return d.Errf("invalid weight value '%s': %v", weight, err)
|
||||
}
|
||||
if weightInt < 1 {
|
||||
return d.Errf("invalid weight value '%s': weight should be non-zero and positive", weight)
|
||||
}
|
||||
r.Weights = append(r.Weights, weightInt)
|
||||
args := d.RemainingArgs()
|
||||
if len(args) == 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
for _, weight := range args {
|
||||
weightInt, err := strconv.Atoi(weight)
|
||||
if err != nil {
|
||||
return d.Errf("invalid weight value '%s': %v", weight, err)
|
||||
}
|
||||
if weightInt < 1 {
|
||||
return d.Errf("invalid weight value '%s': weight should be non-zero and positive", weight)
|
||||
}
|
||||
r.Weights = append(r.Weights, weightInt)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -179,17 +178,17 @@ func (RandomChoiceSelection) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (r *RandomChoiceSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
chooseStr := d.Val()
|
||||
choose, err := strconv.Atoi(chooseStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid choice value '%s': %v", chooseStr, err)
|
||||
}
|
||||
r.Choose = choose
|
||||
d.Next() // consume policy name
|
||||
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
chooseStr := d.Val()
|
||||
choose, err := strconv.Atoi(chooseStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid choice value '%s': %v", chooseStr, err)
|
||||
}
|
||||
r.Choose = choose
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -280,10 +279,9 @@ func (LeastConnSelection) Select(pool UpstreamPool, _ *http.Request, _ http.Resp
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (r *LeastConnSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume policy name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -320,10 +318,9 @@ func (r *RoundRobinSelection) Select(pool UpstreamPool, _ *http.Request, _ http.
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (r *RoundRobinSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume policy name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -352,10 +349,9 @@ func (FirstSelection) Select(pool UpstreamPool, _ *http.Request, _ http.Response
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (r *FirstSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume policy name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -383,10 +379,9 @@ func (IPHashSelection) Select(pool UpstreamPool, req *http.Request, _ http.Respo
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (r *IPHashSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume policy name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -416,10 +411,9 @@ func (ClientIPHashSelection) Select(pool UpstreamPool, req *http.Request, _ http
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (r *ClientIPHashSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume policy name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -443,10 +437,9 @@ func (URIHashSelection) Select(pool UpstreamPool, req *http.Request, _ http.Resp
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (r *URIHashSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume policy name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -504,13 +497,14 @@ func (s QueryHashSelection) Select(pool UpstreamPool, req *http.Request, _ http.
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (s *QueryHashSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
s.Key = d.Val()
|
||||
d.Next() // consume policy name
|
||||
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
s.Key = d.Val()
|
||||
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "fallback":
|
||||
if !d.NextArg() {
|
||||
|
@ -583,13 +577,14 @@ func (s HeaderHashSelection) Select(pool UpstreamPool, req *http.Request, _ http
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (s *HeaderHashSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
s.Field = d.Val()
|
||||
d.Next() // consume policy name
|
||||
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
s.Field = d.Val()
|
||||
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "fallback":
|
||||
if !d.NextArg() {
|
||||
|
@ -708,7 +703,7 @@ func (s *CookieHashSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
|||
default:
|
||||
return d.ArgErr()
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "fallback":
|
||||
if !d.NextArg() {
|
||||
|
|
|
@ -39,10 +39,7 @@ func init() {
|
|||
// Only URI components which are given in <to> will be set in the resulting URI.
|
||||
// See the docs for the rewrite handler for more information.
|
||||
func parseCaddyfileRewrite(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue, error) {
|
||||
// consume directive name
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
h.Next() // consume directive name
|
||||
|
||||
// count the tokens to determine what to do
|
||||
argsCount := h.CountRemainingArgs()
|
||||
|
@ -66,26 +63,9 @@ func parseCaddyfileRewrite(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue,
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
h.Next() // consume directive name again, matcher parsing does a reset
|
||||
h.Next() // advance to the rewrite URI
|
||||
|
||||
// consume directive name, again, because extracting matcher does a reset
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
// advance to the rewrite URI
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
var rewr Rewrite
|
||||
for h.Next() {
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
rewr.URI = h.Val()
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
}
|
||||
return h.NewRoute(userMatcherSet, Rewrite{URI: h.Val()}), nil
|
||||
}
|
||||
|
||||
|
@ -93,17 +73,14 @@ func parseCaddyfileRewrite(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue,
|
|||
//
|
||||
// method [<matcher>] <method>
|
||||
func parseCaddyfileMethod(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, error) {
|
||||
var rewr Rewrite
|
||||
for h.Next() {
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
rewr.Method = h.Val()
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
h.Next() // consume directive name
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
return rewr, nil
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
return Rewrite{Method: h.Val()}, nil
|
||||
}
|
||||
|
||||
// parseCaddyfileURI sets up a handler for manipulating (but not "rewriting") the
|
||||
|
@ -118,65 +95,71 @@ func parseCaddyfileMethod(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler,
|
|||
// path_regexp is used, then regular expression replacements will be performed
|
||||
// on the path portion of the URI (and a limit cannot be set).
|
||||
func parseCaddyfileURI(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, error) {
|
||||
h.Next() // consume directive name
|
||||
|
||||
args := h.RemainingArgs()
|
||||
if len(args) < 2 {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
var rewr Rewrite
|
||||
for h.Next() {
|
||||
args := h.RemainingArgs()
|
||||
if len(args) < 2 {
|
||||
|
||||
switch args[0] {
|
||||
case "strip_prefix":
|
||||
if len(args) > 2 {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
switch args[0] {
|
||||
case "strip_prefix":
|
||||
if len(args) > 2 {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
rewr.StripPathPrefix = args[1]
|
||||
if !strings.HasPrefix(rewr.StripPathPrefix, "/") {
|
||||
rewr.StripPathPrefix = "/" + rewr.StripPathPrefix
|
||||
}
|
||||
case "strip_suffix":
|
||||
if len(args) > 2 {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
rewr.StripPathSuffix = args[1]
|
||||
case "replace":
|
||||
var find, replace, lim string
|
||||
switch len(args) {
|
||||
case 4:
|
||||
lim = args[3]
|
||||
fallthrough
|
||||
case 3:
|
||||
find = args[1]
|
||||
replace = args[2]
|
||||
default:
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
var limInt int
|
||||
if lim != "" {
|
||||
var err error
|
||||
limInt, err = strconv.Atoi(lim)
|
||||
if err != nil {
|
||||
return nil, h.Errf("limit must be an integer; invalid: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
rewr.URISubstring = append(rewr.URISubstring, substrReplacer{
|
||||
Find: find,
|
||||
Replace: replace,
|
||||
Limit: limInt,
|
||||
})
|
||||
case "path_regexp":
|
||||
if len(args) != 3 {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
find, replace := args[1], args[2]
|
||||
rewr.PathRegexp = append(rewr.PathRegexp, ®exReplacer{
|
||||
Find: find,
|
||||
Replace: replace,
|
||||
})
|
||||
default:
|
||||
return nil, h.Errf("unrecognized URI manipulation '%s'", args[0])
|
||||
rewr.StripPathPrefix = args[1]
|
||||
if !strings.HasPrefix(rewr.StripPathPrefix, "/") {
|
||||
rewr.StripPathPrefix = "/" + rewr.StripPathPrefix
|
||||
}
|
||||
|
||||
case "strip_suffix":
|
||||
if len(args) > 2 {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
rewr.StripPathSuffix = args[1]
|
||||
|
||||
case "replace":
|
||||
var find, replace, lim string
|
||||
switch len(args) {
|
||||
case 4:
|
||||
lim = args[3]
|
||||
fallthrough
|
||||
case 3:
|
||||
find = args[1]
|
||||
replace = args[2]
|
||||
default:
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
var limInt int
|
||||
if lim != "" {
|
||||
var err error
|
||||
limInt, err = strconv.Atoi(lim)
|
||||
if err != nil {
|
||||
return nil, h.Errf("limit must be an integer; invalid: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
rewr.URISubstring = append(rewr.URISubstring, substrReplacer{
|
||||
Find: find,
|
||||
Replace: replace,
|
||||
Limit: limInt,
|
||||
})
|
||||
|
||||
case "path_regexp":
|
||||
if len(args) != 3 {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
find, replace := args[1], args[2]
|
||||
rewr.PathRegexp = append(rewr.PathRegexp, ®exReplacer{
|
||||
Find: find,
|
||||
Replace: replace,
|
||||
})
|
||||
|
||||
default:
|
||||
return nil, h.Errf("unrecognized URI manipulation '%s'", args[0])
|
||||
}
|
||||
return rewr, nil
|
||||
}
|
||||
|
@ -190,9 +173,9 @@ func parseCaddyfileURI(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, err
|
|||
// Only path matchers (with a `/` prefix) are supported as this is a shortcut
|
||||
// for the handle directive with a strip_prefix rewrite.
|
||||
func parseCaddyfileHandlePath(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue, error) {
|
||||
if !h.Next() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
h.Next() // consume directive name
|
||||
|
||||
// there must be a path matcher
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
|
|
@ -60,36 +60,35 @@ func (StaticError) CaddyModule() caddy.ModuleInfo {
|
|||
// If there is just one argument (other than the matcher), it is considered
|
||||
// to be a status code if it's a valid positive integer of 3 digits.
|
||||
func (e *StaticError) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
args := d.RemainingArgs()
|
||||
switch len(args) {
|
||||
case 1:
|
||||
if len(args[0]) == 3 {
|
||||
if num, err := strconv.Atoi(args[0]); err == nil && num > 0 {
|
||||
e.StatusCode = WeakString(args[0])
|
||||
break
|
||||
}
|
||||
d.Next() // consume directive name
|
||||
args := d.RemainingArgs()
|
||||
switch len(args) {
|
||||
case 1:
|
||||
if len(args[0]) == 3 {
|
||||
if num, err := strconv.Atoi(args[0]); err == nil && num > 0 {
|
||||
e.StatusCode = WeakString(args[0])
|
||||
break
|
||||
}
|
||||
e.Error = args[0]
|
||||
case 2:
|
||||
e.Error = args[0]
|
||||
e.StatusCode = WeakString(args[1])
|
||||
default:
|
||||
return d.ArgErr()
|
||||
}
|
||||
e.Error = args[0]
|
||||
case 2:
|
||||
e.Error = args[0]
|
||||
e.StatusCode = WeakString(args[1])
|
||||
default:
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "message":
|
||||
if e.Error != "" {
|
||||
return d.Err("message already specified")
|
||||
}
|
||||
if !d.AllArgs(&e.Error) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective '%s'", d.Val())
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "message":
|
||||
if e.Error != "" {
|
||||
return d.Err("message already specified")
|
||||
}
|
||||
if !d.AllArgs(&e.Error) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -138,41 +138,40 @@ func (StaticResponse) CaddyModule() caddy.ModuleInfo {
|
|||
// If there is just one argument (other than the matcher), it is considered
|
||||
// to be a status code if it's a valid positive integer of 3 digits.
|
||||
func (s *StaticResponse) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
args := d.RemainingArgs()
|
||||
switch len(args) {
|
||||
case 1:
|
||||
if len(args[0]) == 3 {
|
||||
if num, err := strconv.Atoi(args[0]); err == nil && num > 0 {
|
||||
s.StatusCode = WeakString(args[0])
|
||||
break
|
||||
}
|
||||
d.Next() // consume directive name
|
||||
args := d.RemainingArgs()
|
||||
switch len(args) {
|
||||
case 1:
|
||||
if len(args[0]) == 3 {
|
||||
if num, err := strconv.Atoi(args[0]); err == nil && num > 0 {
|
||||
s.StatusCode = WeakString(args[0])
|
||||
break
|
||||
}
|
||||
s.Body = args[0]
|
||||
case 2:
|
||||
s.Body = args[0]
|
||||
s.StatusCode = WeakString(args[1])
|
||||
default:
|
||||
return d.ArgErr()
|
||||
}
|
||||
s.Body = args[0]
|
||||
case 2:
|
||||
s.Body = args[0]
|
||||
s.StatusCode = WeakString(args[1])
|
||||
default:
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "body":
|
||||
if s.Body != "" {
|
||||
return d.Err("body already specified")
|
||||
}
|
||||
if !d.AllArgs(&s.Body) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
case "close":
|
||||
if s.Close {
|
||||
return d.Err("close already specified")
|
||||
}
|
||||
s.Close = true
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective '%s'", d.Val())
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "body":
|
||||
if s.Body != "" {
|
||||
return d.Err("body already specified")
|
||||
}
|
||||
if !d.AllArgs(&s.Body) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
case "close":
|
||||
if s.Close {
|
||||
return d.Err("close already specified")
|
||||
}
|
||||
s.Close = true
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective '%s'", d.Val())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -34,47 +34,46 @@ func init() {
|
|||
// root <path>
|
||||
// }
|
||||
func parseCaddyfile(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, error) {
|
||||
h.Next() // consume directive name
|
||||
t := new(Templates)
|
||||
for h.Next() {
|
||||
for h.NextBlock(0) {
|
||||
switch h.Val() {
|
||||
case "mime":
|
||||
t.MIMETypes = h.RemainingArgs()
|
||||
if len(t.MIMETypes) == 0 {
|
||||
return nil, h.ArgErr()
|
||||
for h.NextBlock(0) {
|
||||
switch h.Val() {
|
||||
case "mime":
|
||||
t.MIMETypes = h.RemainingArgs()
|
||||
if len(t.MIMETypes) == 0 {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
case "between":
|
||||
t.Delimiters = h.RemainingArgs()
|
||||
if len(t.Delimiters) != 2 {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
case "root":
|
||||
if !h.Args(&t.FileRoot) {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
case "extensions":
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
if t.ExtensionsRaw != nil {
|
||||
return nil, h.Err("extensions already specified")
|
||||
}
|
||||
for nesting := h.Nesting(); h.NextBlock(nesting); {
|
||||
extensionModuleName := h.Val()
|
||||
modID := "http.handlers.templates.functions." + extensionModuleName
|
||||
unm, err := caddyfile.UnmarshalModule(h.Dispenser, modID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case "between":
|
||||
t.Delimiters = h.RemainingArgs()
|
||||
if len(t.Delimiters) != 2 {
|
||||
return nil, h.ArgErr()
|
||||
cf, ok := unm.(CustomFunctions)
|
||||
if !ok {
|
||||
return nil, h.Errf("module %s (%T) does not provide template functions", modID, unm)
|
||||
}
|
||||
case "root":
|
||||
if !h.Args(&t.FileRoot) {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
case "extensions":
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
if t.ExtensionsRaw != nil {
|
||||
return nil, h.Err("extensions already specified")
|
||||
}
|
||||
for nesting := h.Nesting(); h.NextBlock(nesting); {
|
||||
extensionModuleName := h.Val()
|
||||
modID := "http.handlers.templates.functions." + extensionModuleName
|
||||
unm, err := caddyfile.UnmarshalModule(h.Dispenser, modID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cf, ok := unm.(CustomFunctions)
|
||||
if !ok {
|
||||
return nil, h.Errf("module %s (%T) does not provide template functions", modID, unm)
|
||||
}
|
||||
if t.ExtensionsRaw == nil {
|
||||
t.ExtensionsRaw = make(caddy.ModuleMap)
|
||||
}
|
||||
t.ExtensionsRaw[extensionModuleName] = caddyconfig.JSON(cf, nil)
|
||||
if t.ExtensionsRaw == nil {
|
||||
t.ExtensionsRaw = make(caddy.ModuleMap)
|
||||
}
|
||||
t.ExtensionsRaw[extensionModuleName] = caddyconfig.JSON(cf, nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -88,20 +88,18 @@ func (ot *Tracing) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
|||
"span": &ot.SpanName,
|
||||
}
|
||||
|
||||
for d.Next() {
|
||||
args := d.RemainingArgs()
|
||||
if len(args) > 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume directive name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
for d.NextBlock(0) {
|
||||
if dst, ok := paramsMap[d.Val()]; ok {
|
||||
if err := setParameter(d, dst); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return d.ArgErr()
|
||||
for d.NextBlock(0) {
|
||||
if dst, ok := paramsMap[d.Val()]; ok {
|
||||
if err := setParameter(d, dst); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return d.ArgErr()
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -68,6 +68,8 @@ func (m VarsMiddleware) ServeHTTP(w http.ResponseWriter, r *http.Request, next H
|
|||
// ...
|
||||
// }
|
||||
func (m *VarsMiddleware) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
d.Next() // consume directive name
|
||||
|
||||
if *m == nil {
|
||||
*m = make(VarsMiddleware)
|
||||
}
|
||||
|
@ -94,15 +96,13 @@ func (m *VarsMiddleware) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
for d.Next() {
|
||||
if err := nextVar(true); err != nil {
|
||||
if err := nextVar(true); err != nil {
|
||||
return err
|
||||
}
|
||||
for d.NextBlock(0) {
|
||||
if err := nextVar(false); err != nil {
|
||||
return err
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
if err := nextVar(false); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
|
@ -135,6 +135,7 @@ func (m *VarsMatcher) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
|||
if *m == nil {
|
||||
*m = make(map[string][]string)
|
||||
}
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
var field string
|
||||
if !d.Args(&field) {
|
||||
|
@ -216,6 +217,7 @@ func (m *MatchVarsRE) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
|||
if *m == nil {
|
||||
*m = make(map[string]*MatchRegexp)
|
||||
}
|
||||
// iterate to merge multiple matchers into one
|
||||
for d.Next() {
|
||||
var first, second, third string
|
||||
if !d.Args(&first, &second) {
|
||||
|
|
|
@ -34,53 +34,52 @@ func init() {
|
|||
// resolvers <addresses...>
|
||||
// }
|
||||
func parseACMEServer(h httpcaddyfile.Helper) ([]httpcaddyfile.ConfigValue, error) {
|
||||
if !h.Next() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
h.Next() // consume directive name
|
||||
matcherSet, err := h.ExtractMatcherSet()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
h.Next() // consume the directive name again (matcher parsing resets)
|
||||
|
||||
// no inline args allowed
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
var acmeServer Handler
|
||||
var ca *caddypki.CA
|
||||
|
||||
for h.Next() {
|
||||
if h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
for h.NextBlock(0) {
|
||||
switch h.Val() {
|
||||
case "ca":
|
||||
if !h.AllArgs(&acmeServer.CA) {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
if ca == nil {
|
||||
ca = new(caddypki.CA)
|
||||
}
|
||||
ca.ID = acmeServer.CA
|
||||
case "lifetime":
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
for h.NextBlock(0) {
|
||||
switch h.Val() {
|
||||
case "ca":
|
||||
if !h.AllArgs(&acmeServer.CA) {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
if ca == nil {
|
||||
ca = new(caddypki.CA)
|
||||
}
|
||||
ca.ID = acmeServer.CA
|
||||
|
||||
dur, err := caddy.ParseDuration(h.Val())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case "lifetime":
|
||||
if !h.NextArg() {
|
||||
return nil, h.ArgErr()
|
||||
}
|
||||
|
||||
if d := time.Duration(ca.IntermediateLifetime); d > 0 && dur > d {
|
||||
return nil, h.Errf("certificate lifetime (%s) exceeds intermediate certificate lifetime (%s)", dur, d)
|
||||
}
|
||||
dur, err := caddy.ParseDuration(h.Val())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
acmeServer.Lifetime = caddy.Duration(dur)
|
||||
if d := time.Duration(ca.IntermediateLifetime); d > 0 && dur > d {
|
||||
return nil, h.Errf("certificate lifetime (%s) exceeds intermediate certificate lifetime (%s)", dur, d)
|
||||
}
|
||||
|
||||
case "resolvers":
|
||||
acmeServer.Resolvers = h.RemainingArgs()
|
||||
if len(acmeServer.Resolvers) == 0 {
|
||||
return nil, h.Errf("must specify at least one resolver address")
|
||||
}
|
||||
acmeServer.Lifetime = caddy.Duration(dur)
|
||||
|
||||
case "resolvers":
|
||||
acmeServer.Resolvers = h.RemainingArgs()
|
||||
if len(acmeServer.Resolvers) == 0 {
|
||||
return nil, h.Errf("must specify at least one resolver address")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -277,218 +277,219 @@ func (iss *ACMEIssuer) GetACMEIssuer() *ACMEIssuer { return iss }
|
|||
// }
|
||||
// }
|
||||
func (iss *ACMEIssuer) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
d.Next() // consume issuer name
|
||||
|
||||
if d.NextArg() {
|
||||
iss.CA = d.Val()
|
||||
if d.NextArg() {
|
||||
iss.CA = d.Val()
|
||||
return d.ArgErr()
|
||||
}
|
||||
}
|
||||
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "dir":
|
||||
if iss.CA != "" {
|
||||
return d.Errf("directory is already specified: %s", iss.CA)
|
||||
}
|
||||
if !d.AllArgs(&iss.CA) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "test_dir":
|
||||
if !d.AllArgs(&iss.TestCA) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "email":
|
||||
if !d.AllArgs(&iss.Email) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "timeout":
|
||||
var timeoutStr string
|
||||
if !d.AllArgs(&timeoutStr) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
timeout, err := caddy.ParseDuration(timeoutStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid timeout duration %s: %v", timeoutStr, err)
|
||||
}
|
||||
iss.ACMETimeout = caddy.Duration(timeout)
|
||||
|
||||
case "disable_http_challenge":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "dir":
|
||||
if iss.CA != "" {
|
||||
return d.Errf("directory is already specified: %s", iss.CA)
|
||||
}
|
||||
if !d.AllArgs(&iss.CA) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "test_dir":
|
||||
if !d.AllArgs(&iss.TestCA) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "email":
|
||||
if !d.AllArgs(&iss.Email) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "timeout":
|
||||
var timeoutStr string
|
||||
if !d.AllArgs(&timeoutStr) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
timeout, err := caddy.ParseDuration(timeoutStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid timeout duration %s: %v", timeoutStr, err)
|
||||
}
|
||||
iss.ACMETimeout = caddy.Duration(timeout)
|
||||
|
||||
case "disable_http_challenge":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.HTTP == nil {
|
||||
iss.Challenges.HTTP = new(HTTPChallengeConfig)
|
||||
}
|
||||
iss.Challenges.HTTP.Disabled = true
|
||||
|
||||
case "disable_tlsalpn_challenge":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.TLSALPN == nil {
|
||||
iss.Challenges.TLSALPN = new(TLSALPNChallengeConfig)
|
||||
}
|
||||
iss.Challenges.TLSALPN.Disabled = true
|
||||
|
||||
case "alt_http_port":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
port, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("invalid port %s: %v", d.Val(), err)
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.HTTP == nil {
|
||||
iss.Challenges.HTTP = new(HTTPChallengeConfig)
|
||||
}
|
||||
iss.Challenges.HTTP.AlternatePort = port
|
||||
|
||||
case "alt_tlsalpn_port":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
port, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("invalid port %s: %v", d.Val(), err)
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.TLSALPN == nil {
|
||||
iss.Challenges.TLSALPN = new(TLSALPNChallengeConfig)
|
||||
}
|
||||
iss.Challenges.TLSALPN.AlternatePort = port
|
||||
|
||||
case "eab":
|
||||
iss.ExternalAccount = new(acme.EAB)
|
||||
if !d.AllArgs(&iss.ExternalAccount.KeyID, &iss.ExternalAccount.MACKey) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "trusted_roots":
|
||||
iss.TrustedRootsPEMFiles = d.RemainingArgs()
|
||||
|
||||
case "dns":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
provName := d.Val()
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
unm, err := caddyfile.UnmarshalModule(d, "dns.providers."+provName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
iss.Challenges.DNS.ProviderRaw = caddyconfig.JSONModuleObject(unm, "name", provName, nil)
|
||||
|
||||
case "propagation_delay":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
delayStr := d.Val()
|
||||
delay, err := caddy.ParseDuration(delayStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid propagation_delay duration %s: %v", delayStr, err)
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.PropagationDelay = caddy.Duration(delay)
|
||||
|
||||
case "propagation_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
timeoutStr := d.Val()
|
||||
var timeout time.Duration
|
||||
if timeoutStr == "-1" {
|
||||
timeout = time.Duration(-1)
|
||||
} else {
|
||||
var err error
|
||||
timeout, err = caddy.ParseDuration(timeoutStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid propagation_timeout duration %s: %v", timeoutStr, err)
|
||||
}
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.PropagationTimeout = caddy.Duration(timeout)
|
||||
|
||||
case "resolvers":
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.Resolvers = d.RemainingArgs()
|
||||
if len(iss.Challenges.DNS.Resolvers) == 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "dns_ttl":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
ttlStr := d.Val()
|
||||
ttl, err := caddy.ParseDuration(ttlStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid dns_ttl duration %s: %v", ttlStr, err)
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.TTL = caddy.Duration(ttl)
|
||||
|
||||
case "dns_challenge_override_domain":
|
||||
arg := d.RemainingArgs()
|
||||
if len(arg) != 1 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.OverrideDomain = arg[0]
|
||||
|
||||
case "preferred_chains":
|
||||
chainPref, err := ParseCaddyfilePreferredChainsOptions(d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
iss.PreferredChains = chainPref
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized ACME issuer property: %s", d.Val())
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.HTTP == nil {
|
||||
iss.Challenges.HTTP = new(HTTPChallengeConfig)
|
||||
}
|
||||
iss.Challenges.HTTP.Disabled = true
|
||||
|
||||
case "disable_tlsalpn_challenge":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.TLSALPN == nil {
|
||||
iss.Challenges.TLSALPN = new(TLSALPNChallengeConfig)
|
||||
}
|
||||
iss.Challenges.TLSALPN.Disabled = true
|
||||
|
||||
case "alt_http_port":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
port, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("invalid port %s: %v", d.Val(), err)
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.HTTP == nil {
|
||||
iss.Challenges.HTTP = new(HTTPChallengeConfig)
|
||||
}
|
||||
iss.Challenges.HTTP.AlternatePort = port
|
||||
|
||||
case "alt_tlsalpn_port":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
port, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("invalid port %s: %v", d.Val(), err)
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.TLSALPN == nil {
|
||||
iss.Challenges.TLSALPN = new(TLSALPNChallengeConfig)
|
||||
}
|
||||
iss.Challenges.TLSALPN.AlternatePort = port
|
||||
|
||||
case "eab":
|
||||
iss.ExternalAccount = new(acme.EAB)
|
||||
if !d.AllArgs(&iss.ExternalAccount.KeyID, &iss.ExternalAccount.MACKey) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "trusted_roots":
|
||||
iss.TrustedRootsPEMFiles = d.RemainingArgs()
|
||||
|
||||
case "dns":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
provName := d.Val()
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
unm, err := caddyfile.UnmarshalModule(d, "dns.providers."+provName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
iss.Challenges.DNS.ProviderRaw = caddyconfig.JSONModuleObject(unm, "name", provName, nil)
|
||||
|
||||
case "propagation_delay":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
delayStr := d.Val()
|
||||
delay, err := caddy.ParseDuration(delayStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid propagation_delay duration %s: %v", delayStr, err)
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.PropagationDelay = caddy.Duration(delay)
|
||||
|
||||
case "propagation_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
timeoutStr := d.Val()
|
||||
var timeout time.Duration
|
||||
if timeoutStr == "-1" {
|
||||
timeout = time.Duration(-1)
|
||||
} else {
|
||||
var err error
|
||||
timeout, err = caddy.ParseDuration(timeoutStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid propagation_timeout duration %s: %v", timeoutStr, err)
|
||||
}
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.PropagationTimeout = caddy.Duration(timeout)
|
||||
|
||||
case "resolvers":
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.Resolvers = d.RemainingArgs()
|
||||
if len(iss.Challenges.DNS.Resolvers) == 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "dns_ttl":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
ttlStr := d.Val()
|
||||
ttl, err := caddy.ParseDuration(ttlStr)
|
||||
if err != nil {
|
||||
return d.Errf("invalid dns_ttl duration %s: %v", ttlStr, err)
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.TTL = caddy.Duration(ttl)
|
||||
|
||||
case "dns_challenge_override_domain":
|
||||
arg := d.RemainingArgs()
|
||||
if len(arg) != 1 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
if iss.Challenges == nil {
|
||||
iss.Challenges = new(ChallengesConfig)
|
||||
}
|
||||
if iss.Challenges.DNS == nil {
|
||||
iss.Challenges.DNS = new(DNSChallengeConfig)
|
||||
}
|
||||
iss.Challenges.DNS.OverrideDomain = arg[0]
|
||||
|
||||
case "preferred_chains":
|
||||
chainPref, err := ParseCaddyfilePreferredChainsOptions(d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
iss.PreferredChains = chainPref
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized ACME issuer property: %s", d.Val())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -72,10 +72,9 @@ func (ts Tailscale) canHazCertificate(ctx context.Context, hello *tls.ClientHell
|
|||
//
|
||||
// ... tailscale
|
||||
func (Tailscale) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume cert manager name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -169,17 +168,18 @@ func (hcg HTTPCertGetter) GetCertificate(ctx context.Context, hello *tls.ClientH
|
|||
//
|
||||
// ... http <url>
|
||||
func (hcg *HTTPCertGetter) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
hcg.URL = d.Val()
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
return d.Err("block not allowed here")
|
||||
}
|
||||
d.Next() // consume cert manager name
|
||||
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
hcg.URL = d.Val()
|
||||
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
if d.NextBlock(0) {
|
||||
return d.Err("block not allowed here")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -155,31 +155,30 @@ func (iss InternalIssuer) Issue(ctx context.Context, csr *x509.CertificateReques
|
|||
// sign_with_root
|
||||
// }
|
||||
func (iss *InternalIssuer) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "ca":
|
||||
if !d.AllArgs(&iss.CA) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "lifetime":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
iss.Lifetime = caddy.Duration(dur)
|
||||
|
||||
case "sign_with_root":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
iss.SignWithRoot = true
|
||||
|
||||
d.Next() // consume issuer name
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "ca":
|
||||
if !d.AllArgs(&iss.CA) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "lifetime":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
dur, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
iss.Lifetime = caddy.Duration(dur)
|
||||
|
||||
case "sign_with_root":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
iss.SignWithRoot = true
|
||||
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -208,21 +208,20 @@ func (iss *ZeroSSLIssuer) Revoke(ctx context.Context, cert certmagic.Certificate
|
|||
//
|
||||
// Any of the subdirectives for the ACME issuer can be used in the block.
|
||||
func (iss *ZeroSSLIssuer) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
d.Next() // consume issuer name
|
||||
if d.NextArg() {
|
||||
iss.APIKey = d.Val()
|
||||
if d.NextArg() {
|
||||
iss.APIKey = d.Val()
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
return d.ArgErr()
|
||||
}
|
||||
}
|
||||
|
||||
if iss.ACMEIssuer == nil {
|
||||
iss.ACMEIssuer = new(ACMEIssuer)
|
||||
}
|
||||
err := iss.ACMEIssuer.UnmarshalCaddyfile(d.NewFromNextSegment())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if iss.ACMEIssuer == nil {
|
||||
iss.ACMEIssuer = new(ACMEIssuer)
|
||||
}
|
||||
err := iss.ACMEIssuer.UnmarshalCaddyfile(d.NewFromNextSegment())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ func (s *FileStorage) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
|||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "root":
|
||||
if !d.NextArg() {
|
||||
|
|
|
@ -65,14 +65,13 @@ func (ce *ConsoleEncoder) Provision(_ caddy.Context) error {
|
|||
// See the godoc on the LogEncoderConfig type for the syntax of
|
||||
// subdirectives that are common to most/all encoders.
|
||||
func (ce *ConsoleEncoder) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
err := ce.LogEncoderConfig.UnmarshalCaddyfile(d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.Next() // consume encoder name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
err := ce.LogEncoderConfig.UnmarshalCaddyfile(d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -106,14 +105,13 @@ func (je *JSONEncoder) Provision(_ caddy.Context) error {
|
|||
// See the godoc on the LogEncoderConfig type for the syntax of
|
||||
// subdirectives that are common to most/all encoders.
|
||||
func (je *JSONEncoder) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
err := je.LogEncoderConfig.UnmarshalCaddyfile(d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.Next() // consume encoder name
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
err := je.LogEncoderConfig.UnmarshalCaddyfile(d)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -149,7 +147,7 @@ type LogEncoderConfig struct {
|
|||
// level_format <format>
|
||||
// }
|
||||
func (lec *LogEncoderConfig) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
for d.NextBlock(0) {
|
||||
subdir := d.Val()
|
||||
switch subdir {
|
||||
case "time_local":
|
||||
|
|
|
@ -154,73 +154,72 @@ func (fw FileWriter) OpenWriter() (io.WriteCloser, error) {
|
|||
// omitted or set to a zero value, then Caddy's default value for that
|
||||
// subdirective is used.
|
||||
func (fw *FileWriter) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
fw.Filename = d.Val()
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume writer name
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
fw.Filename = d.Val()
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "roll_disabled":
|
||||
var f bool
|
||||
fw.Roll = &f
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "roll_size":
|
||||
var sizeStr string
|
||||
if !d.AllArgs(&sizeStr) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
size, err := humanize.ParseBytes(sizeStr)
|
||||
if err != nil {
|
||||
return d.Errf("parsing size: %v", err)
|
||||
}
|
||||
fw.RollSizeMB = int(math.Ceil(float64(size) / humanize.MiByte))
|
||||
|
||||
case "roll_uncompressed":
|
||||
var f bool
|
||||
fw.RollCompress = &f
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "roll_local_time":
|
||||
fw.RollLocalTime = true
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "roll_keep":
|
||||
var keepStr string
|
||||
if !d.AllArgs(&keepStr) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
keep, err := strconv.Atoi(keepStr)
|
||||
if err != nil {
|
||||
return d.Errf("parsing roll_keep number: %v", err)
|
||||
}
|
||||
fw.RollKeep = keep
|
||||
|
||||
case "roll_keep_for":
|
||||
var keepForStr string
|
||||
if !d.AllArgs(&keepForStr) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
keepFor, err := caddy.ParseDuration(keepForStr)
|
||||
if err != nil {
|
||||
return d.Errf("parsing roll_keep_for duration: %v", err)
|
||||
}
|
||||
if keepFor < 0 {
|
||||
return d.Errf("negative roll_keep_for duration: %v", keepFor)
|
||||
}
|
||||
fw.RollKeepDays = int(math.Ceil(keepFor.Hours() / 24))
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "roll_disabled":
|
||||
var f bool
|
||||
fw.Roll = &f
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "roll_size":
|
||||
var sizeStr string
|
||||
if !d.AllArgs(&sizeStr) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
size, err := humanize.ParseBytes(sizeStr)
|
||||
if err != nil {
|
||||
return d.Errf("parsing size: %v", err)
|
||||
}
|
||||
fw.RollSizeMB = int(math.Ceil(float64(size) / humanize.MiByte))
|
||||
|
||||
case "roll_uncompressed":
|
||||
var f bool
|
||||
fw.RollCompress = &f
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "roll_local_time":
|
||||
fw.RollLocalTime = true
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
case "roll_keep":
|
||||
var keepStr string
|
||||
if !d.AllArgs(&keepStr) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
keep, err := strconv.Atoi(keepStr)
|
||||
if err != nil {
|
||||
return d.Errf("parsing roll_keep number: %v", err)
|
||||
}
|
||||
fw.RollKeep = keep
|
||||
|
||||
case "roll_keep_for":
|
||||
var keepForStr string
|
||||
if !d.AllArgs(&keepForStr) {
|
||||
return d.ArgErr()
|
||||
}
|
||||
keepFor, err := caddy.ParseDuration(keepForStr)
|
||||
if err != nil {
|
||||
return d.Errf("parsing roll_keep_for duration: %v", err)
|
||||
}
|
||||
if keepFor < 0 {
|
||||
return d.Errf("negative roll_keep_for duration: %v", keepFor)
|
||||
}
|
||||
fw.RollKeepDays = int(math.Ceil(keepFor.Hours() / 24))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -108,50 +108,49 @@ func (fe *FilterEncoder) Provision(ctx caddy.Context) error {
|
|||
// }
|
||||
// }
|
||||
func (fe *FilterEncoder) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "wrap":
|
||||
d.Next() // consume encoder name
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "wrap":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
moduleName := d.Val()
|
||||
moduleID := "caddy.logging.encoders." + moduleName
|
||||
unm, err := caddyfile.UnmarshalModule(d, moduleID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
enc, ok := unm.(zapcore.Encoder)
|
||||
if !ok {
|
||||
return d.Errf("module %s (%T) is not a zapcore.Encoder", moduleID, unm)
|
||||
}
|
||||
fe.WrappedRaw = caddyconfig.JSONModuleObject(enc, "format", moduleName, nil)
|
||||
|
||||
case "fields":
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
field := d.Val()
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
moduleName := d.Val()
|
||||
moduleID := "caddy.logging.encoders." + moduleName
|
||||
filterName := d.Val()
|
||||
moduleID := "caddy.logging.encoders.filter." + filterName
|
||||
unm, err := caddyfile.UnmarshalModule(d, moduleID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
enc, ok := unm.(zapcore.Encoder)
|
||||
filter, ok := unm.(LogFieldFilter)
|
||||
if !ok {
|
||||
return d.Errf("module %s (%T) is not a zapcore.Encoder", moduleID, unm)
|
||||
return d.Errf("module %s (%T) is not a logging.LogFieldFilter", moduleID, unm)
|
||||
}
|
||||
fe.WrappedRaw = caddyconfig.JSONModuleObject(enc, "format", moduleName, nil)
|
||||
|
||||
case "fields":
|
||||
for d.NextBlock(1) {
|
||||
field := d.Val()
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
filterName := d.Val()
|
||||
moduleID := "caddy.logging.encoders.filter." + filterName
|
||||
unm, err := caddyfile.UnmarshalModule(d, moduleID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
filter, ok := unm.(LogFieldFilter)
|
||||
if !ok {
|
||||
return d.Errf("module %s (%T) is not a logging.LogFieldFilter", moduleID, unm)
|
||||
}
|
||||
if fe.FieldsRaw == nil {
|
||||
fe.FieldsRaw = make(map[string]json.RawMessage)
|
||||
}
|
||||
fe.FieldsRaw[field] = caddyconfig.JSONModuleObject(filter, "filter", filterName, nil)
|
||||
if fe.FieldsRaw == nil {
|
||||
fe.FieldsRaw = make(map[string]json.RawMessage)
|
||||
}
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
fe.FieldsRaw[field] = caddyconfig.JSONModuleObject(filter, "filter", filterName, nil)
|
||||
}
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -128,10 +128,9 @@ func (ReplaceFilter) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (f *ReplaceFilter) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
f.Value = d.Val()
|
||||
}
|
||||
d.Next() // consume filter name
|
||||
if d.NextArg() {
|
||||
f.Value = d.Val()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -169,32 +168,31 @@ func (IPMaskFilter) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (m *IPMaskFilter) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "ipv4":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
val, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("error parsing %s: %v", d.Val(), err)
|
||||
}
|
||||
m.IPv4MaskRaw = val
|
||||
|
||||
case "ipv6":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
val, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("error parsing %s: %v", d.Val(), err)
|
||||
}
|
||||
m.IPv6MaskRaw = val
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
d.Next() // consume filter name
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "ipv4":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
val, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("error parsing %s: %v", d.Val(), err)
|
||||
}
|
||||
m.IPv4MaskRaw = val
|
||||
|
||||
case "ipv6":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
val, err := strconv.Atoi(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("error parsing %s: %v", d.Val(), err)
|
||||
}
|
||||
m.IPv6MaskRaw = val
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
@ -328,45 +326,44 @@ func (QueryFilter) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (m *QueryFilter) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
for d.NextBlock(0) {
|
||||
qfa := queryFilterAction{}
|
||||
switch d.Val() {
|
||||
case "replace":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
qfa.Type = replaceAction
|
||||
qfa.Parameter = d.Val()
|
||||
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
qfa.Value = d.Val()
|
||||
|
||||
case "hash":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
qfa.Type = hashAction
|
||||
qfa.Parameter = d.Val()
|
||||
|
||||
case "delete":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
qfa.Type = deleteAction
|
||||
qfa.Parameter = d.Val()
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
d.Next() // consume filter name
|
||||
for d.NextBlock(0) {
|
||||
qfa := queryFilterAction{}
|
||||
switch d.Val() {
|
||||
case "replace":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
m.Actions = append(m.Actions, qfa)
|
||||
qfa.Type = replaceAction
|
||||
qfa.Parameter = d.Val()
|
||||
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
qfa.Value = d.Val()
|
||||
|
||||
case "hash":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
qfa.Type = hashAction
|
||||
qfa.Parameter = d.Val()
|
||||
|
||||
case "delete":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
qfa.Type = deleteAction
|
||||
qfa.Parameter = d.Val()
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
}
|
||||
|
||||
m.Actions = append(m.Actions, qfa)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -460,45 +457,44 @@ func (CookieFilter) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (m *CookieFilter) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
for d.NextBlock(0) {
|
||||
cfa := cookieFilterAction{}
|
||||
switch d.Val() {
|
||||
case "replace":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
cfa.Type = replaceAction
|
||||
cfa.Name = d.Val()
|
||||
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
cfa.Value = d.Val()
|
||||
|
||||
case "hash":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
cfa.Type = hashAction
|
||||
cfa.Name = d.Val()
|
||||
|
||||
case "delete":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
cfa.Type = deleteAction
|
||||
cfa.Name = d.Val()
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
d.Next() // consume filter name
|
||||
for d.NextBlock(0) {
|
||||
cfa := cookieFilterAction{}
|
||||
switch d.Val() {
|
||||
case "replace":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
m.Actions = append(m.Actions, cfa)
|
||||
cfa.Type = replaceAction
|
||||
cfa.Name = d.Val()
|
||||
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
cfa.Value = d.Val()
|
||||
|
||||
case "hash":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
cfa.Type = hashAction
|
||||
cfa.Name = d.Val()
|
||||
|
||||
case "delete":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
cfa.Type = deleteAction
|
||||
cfa.Name = d.Val()
|
||||
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %s", d.Val())
|
||||
}
|
||||
|
||||
m.Actions = append(m.Actions, cfa)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -571,13 +567,12 @@ func (RegexpFilter) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (f *RegexpFilter) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
f.RawRegexp = d.Val()
|
||||
}
|
||||
if d.NextArg() {
|
||||
f.Value = d.Val()
|
||||
}
|
||||
d.Next() // consume filter name
|
||||
if d.NextArg() {
|
||||
f.RawRegexp = d.Val()
|
||||
}
|
||||
if d.NextArg() {
|
||||
f.Value = d.Val()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -625,10 +620,9 @@ func (RenameFilter) CaddyModule() caddy.ModuleInfo {
|
|||
|
||||
// UnmarshalCaddyfile sets up the module from Caddyfile tokens.
|
||||
func (f *RenameFilter) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if d.NextArg() {
|
||||
f.Name = d.Val()
|
||||
}
|
||||
d.Next() // consume filter name
|
||||
if d.NextArg() {
|
||||
f.Name = d.Val()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -117,35 +117,34 @@ func (nw NetWriter) OpenWriter() (io.WriteCloser, error) {
|
|||
// soft_start
|
||||
// }
|
||||
func (nw *NetWriter) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
nw.Address = d.Val()
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
for nesting := d.Nesting(); d.NextBlock(nesting); {
|
||||
switch d.Val() {
|
||||
case "dial_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
timeout, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("invalid duration: %s", d.Val())
|
||||
}
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
nw.DialTimeout = caddy.Duration(timeout)
|
||||
|
||||
case "soft_start":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
nw.SoftStart = true
|
||||
d.Next() // consume writer name
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
nw.Address = d.Val()
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "dial_timeout":
|
||||
if !d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
timeout, err := caddy.ParseDuration(d.Val())
|
||||
if err != nil {
|
||||
return d.Errf("invalid duration: %s", d.Val())
|
||||
}
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
nw.DialTimeout = caddy.Duration(timeout)
|
||||
|
||||
case "soft_start":
|
||||
if d.NextArg() {
|
||||
return d.ArgErr()
|
||||
}
|
||||
nw.SoftStart = true
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -78,19 +78,18 @@ func parseCaddyfile(h httpcaddyfile.Helper) (caddyhttp.MiddlewareHandler, error)
|
|||
// disable_openmetrics
|
||||
// }
|
||||
func (m *Metrics) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
|
||||
for d.Next() {
|
||||
args := d.RemainingArgs()
|
||||
if len(args) > 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
d.Next() // consume directive name
|
||||
args := d.RemainingArgs()
|
||||
if len(args) > 0 {
|
||||
return d.ArgErr()
|
||||
}
|
||||
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "disable_openmetrics":
|
||||
m.DisableOpenMetrics = true
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %q", d.Val())
|
||||
}
|
||||
for d.NextBlock(0) {
|
||||
switch d.Val() {
|
||||
case "disable_openmetrics":
|
||||
m.DisableOpenMetrics = true
|
||||
default:
|
||||
return d.Errf("unrecognized subdirective %q", d.Val())
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
Loading…
Reference in a new issue