Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

caddyfile: Pass blocks to import for snippets #6130

Draft
wants to merge 12 commits into
base: master
Choose a base branch
from
67 changes: 65 additions & 2 deletions caddyconfig/caddyfile/parse.go
Expand Up @@ -359,9 +359,45 @@ func (p *parser) doImport(nesting int) error {
// set up a replacer for non-variadic args replacement
repl := makeArgsReplacer(args)

// grab all the tokens (if it exists) from within a block that follows the import
var blockTokens []Token
for currentNesting := p.Nesting(); p.NextBlock(currentNesting); {
blockTokens = append(blockTokens, p.Token())
}
// initialize with size 1
blockMapping := make(map[string][]Token, 1)
if len(blockTokens) > 0 {
// use such tokens to create a new dispenser, and then use it to parse each block
bd := NewDispenser(blockTokens)
for bd.Next() {
// see if we can grab a key
var currentMappingKey string
if bd.Val() == "{" {
return p.Err("anonymous blocks are not supported")
}
currentMappingKey = bd.Val()
currentMappingTokens := []Token{}
// read all args until end of line / {
if bd.NextArg() {
currentMappingTokens = append(currentMappingTokens, bd.Token())
for bd.NextArg() {
currentMappingTokens = append(currentMappingTokens, bd.Token())
}
// TODO(elee1766): we don't enter another mapping here because it's annoying to extract the { and } properly.
// maybe someone can do that in the future
} else {
// attempt to enter a block and add tokens to the currentMappingTokens
for mappingNesting := bd.Nesting(); bd.NextBlock(mappingNesting); {
currentMappingTokens = append(currentMappingTokens, bd.Token())
}
}
blockMapping[currentMappingKey] = currentMappingTokens
}
}

// splice out the import directive and its arguments
// (2 tokens, plus the length of args)
tokensBefore := p.tokens[:p.cursor-1-len(args)]
tokensBefore := p.tokens[:p.cursor-1-len(args)-len(blockTokens)]
tokensAfter := p.tokens[p.cursor+1:]
var importedTokens []Token
var nodes []string
Expand Down Expand Up @@ -490,6 +526,33 @@ func (p *parser) doImport(nesting int) error {
maybeSnippet = false
}
}
// if it is {block}, we substitute with all tokens in the block
// if it is {blocks.*}, we substitute with the tokens in the mapping for the *
var skip bool
var tokensToAdd []Token
switch {
case token.Text == "{block}":
tokensToAdd = blockTokens
case strings.HasPrefix(token.Text, "{blocks.") && strings.HasSuffix(token.Text, "}"):
// {blocks.foo.bar} will be extracted to key `foo.bar`
blockKey := strings.TrimPrefix(strings.TrimSuffix(token.Text, "}"), "{blocks.")
val, ok := blockMapping[blockKey]
if ok {
tokensToAdd = val
}
default:
skip = true
}
if !skip {
if len(tokensToAdd) == 0 {
// if there is no content in the snippet block, don't do any replacement
// this allows snippets which contained {block}/{block.*} before this change to continue functioning as normal
tokensCopy = append(tokensCopy, token)
} else {
tokensCopy = append(tokensCopy, tokensToAdd...)
}
continue
}

if maybeSnippet {
tokensCopy = append(tokensCopy, token)
Expand All @@ -511,7 +574,7 @@ func (p *parser) doImport(nesting int) error {
// splice the imported tokens in the place of the import statement
// and rewind cursor so Next() will land on first imported token
p.tokens = append(tokensBefore, append(tokensCopy, tokensAfter...)...)
p.cursor -= len(args) + 1
p.cursor -= len(args) + len(blockTokens) + 1

return nil
}
Expand Down
@@ -0,0 +1,58 @@
(snippet) {
header {
{block}
}
}

example.com {
import snippet {
foo bar
}
}
----------
{
"apps": {
"http": {
"servers": {
"srv0": {
"listen": [
":443"
],
"routes": [
{
"match": [
{
"host": [
"example.com"
]
}
],
"handle": [
{
"handler": "subroute",
"routes": [
{
"handle": [
{
"handler": "headers",
"response": {
"set": {
"Foo": [
"bar"
]
}
}
}
]
}
]
}
],
"terminal": true
}
]
}
}
}
}
}
@@ -0,0 +1,56 @@
(snippet) {
{block}
}

example.com {
import snippet {
header foo bar
}
}
----------
{
"apps": {
"http": {
"servers": {
"srv0": {
"listen": [
":443"
],
"routes": [
{
"match": [
{
"host": [
"example.com"
]
}
],
"handle": [
{
"handler": "subroute",
"routes": [
{
"handle": [
{
"handler": "headers",
"response": {
"set": {
"Foo": [
"bar"
]
}
}
}
]
}
]
}
],
"terminal": true
}
]
}
}
}
}
}
@@ -0,0 +1,76 @@
(snippet) {
header {
{blocks.foo}
}
header {
{blocks.bar}
}
}

example.com {
import snippet {
foo {
foo a
}
bar {
bar b
}
}
}
----------
{
"apps": {
"http": {
"servers": {
"srv0": {
"listen": [
":443"
],
"routes": [
{
"match": [
{
"host": [
"example.com"
]
}
],
"handle": [
{
"handler": "subroute",
"routes": [
{
"handle": [
{
"handler": "headers",
"response": {
"set": {
"Foo": [
"a"
]
}
}
},
{
"handler": "headers",
"response": {
"set": {
"Bar": [
"b"
]
}
}
}
]
}
]
}
],
"terminal": true
}
]
}
}
}
}
}
@@ -0,0 +1,82 @@
(snippet) {
header {
{blocks.bar}
}
import sub_snippet {
bar {
{blocks.foo}
}
}
}
(sub_snippet) {
header {
{blocks.bar}
}
}
example.com {
import snippet {
foo {
foo a
}
bar {
bar b
}
}
}
----------
{
"apps": {
"http": {
"servers": {
"srv0": {
"listen": [
":443"
],
"routes": [
{
"match": [
{
"host": [
"example.com"
]
}
],
"handle": [
{
"handler": "subroute",
"routes": [
{
"handle": [
{
"handler": "headers",
"response": {
"set": {
"Bar": [
"b"
]
}
}
},
{
"handler": "headers",
"response": {
"set": {
"Foo": [
"a"
]
}
}
}
]
}
]
}
],
"terminal": true
}
]
}
}
}
}
}