Use vendored modules
Signed-off-by: Izuru Yakumo <yakumo.izuru@chaotic.ninja> git-svn-id: file:///srv/svn/repo/aya/trunk@67 cec141ff-132a-4243-88a5-ce187bd62f94
This commit is contained in:
285
vendor/github.com/eknkc/amber/parser/nodes.go
generated
vendored
Normal file
285
vendor/github.com/eknkc/amber/parser/nodes.go
generated
vendored
Normal file
@@ -0,0 +1,285 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var selfClosingTags = [...]string{
|
||||
"meta",
|
||||
"img",
|
||||
"link",
|
||||
"input",
|
||||
"source",
|
||||
"area",
|
||||
"base",
|
||||
"col",
|
||||
"br",
|
||||
"hr",
|
||||
}
|
||||
|
||||
var doctypes = map[string]string{
|
||||
"5": `<!DOCTYPE html>`,
|
||||
"default": `<!DOCTYPE html>`,
|
||||
"xml": `<?xml version="1.0" encoding="utf-8" ?>`,
|
||||
"transitional": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">`,
|
||||
"strict": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">`,
|
||||
"frameset": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">`,
|
||||
"1.1": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">`,
|
||||
"basic": `<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML Basic 1.1//EN" "http://www.w3.org/TR/xhtml-basic/xhtml-basic11.dtd">`,
|
||||
"mobile": `<!DOCTYPE html PUBLIC "-//WAPFORUM//DTD XHTML Mobile 1.2//EN" "http://www.openmobilealliance.org/tech/DTD/xhtml-mobile12.dtd">`,
|
||||
}
|
||||
|
||||
type Node interface {
|
||||
Pos() SourcePosition
|
||||
}
|
||||
|
||||
type SourcePosition struct {
|
||||
LineNum int
|
||||
ColNum int
|
||||
TokenLength int
|
||||
Filename string
|
||||
}
|
||||
|
||||
func (s *SourcePosition) Pos() SourcePosition {
|
||||
return *s
|
||||
}
|
||||
|
||||
type Doctype struct {
|
||||
SourcePosition
|
||||
Value string
|
||||
}
|
||||
|
||||
func newDoctype(value string) *Doctype {
|
||||
dt := new(Doctype)
|
||||
dt.Value = value
|
||||
return dt
|
||||
}
|
||||
|
||||
func (d *Doctype) String() string {
|
||||
if defined := doctypes[d.Value]; len(defined) != 0 {
|
||||
return defined
|
||||
}
|
||||
|
||||
return `<!DOCTYPE ` + d.Value + `>`
|
||||
}
|
||||
|
||||
type Comment struct {
|
||||
SourcePosition
|
||||
Value string
|
||||
Block *Block
|
||||
Silent bool
|
||||
}
|
||||
|
||||
func newComment(value string) *Comment {
|
||||
dt := new(Comment)
|
||||
dt.Value = value
|
||||
dt.Block = nil
|
||||
dt.Silent = false
|
||||
return dt
|
||||
}
|
||||
|
||||
type Text struct {
|
||||
SourcePosition
|
||||
Value string
|
||||
Raw bool
|
||||
}
|
||||
|
||||
func newText(value string, raw bool) *Text {
|
||||
dt := new(Text)
|
||||
dt.Value = value
|
||||
dt.Raw = raw
|
||||
return dt
|
||||
}
|
||||
|
||||
type Block struct {
|
||||
SourcePosition
|
||||
Children []Node
|
||||
}
|
||||
|
||||
func newBlock() *Block {
|
||||
block := new(Block)
|
||||
block.Children = make([]Node, 0)
|
||||
return block
|
||||
}
|
||||
|
||||
func (b *Block) push(node Node) {
|
||||
b.Children = append(b.Children, node)
|
||||
}
|
||||
|
||||
func (b *Block) pushFront(node Node) {
|
||||
b.Children = append([]Node{node}, b.Children...)
|
||||
}
|
||||
|
||||
func (b *Block) CanInline() bool {
|
||||
if len(b.Children) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
allText := true
|
||||
|
||||
for _, child := range b.Children {
|
||||
if txt, ok := child.(*Text); !ok || txt.Raw {
|
||||
allText = false
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return allText
|
||||
}
|
||||
|
||||
const (
|
||||
NamedBlockDefault = iota
|
||||
NamedBlockAppend
|
||||
NamedBlockPrepend
|
||||
)
|
||||
|
||||
type NamedBlock struct {
|
||||
Block
|
||||
Name string
|
||||
Modifier int
|
||||
}
|
||||
|
||||
func newNamedBlock(name string) *NamedBlock {
|
||||
bb := new(NamedBlock)
|
||||
bb.Name = name
|
||||
bb.Block.Children = make([]Node, 0)
|
||||
bb.Modifier = NamedBlockDefault
|
||||
return bb
|
||||
}
|
||||
|
||||
type Attribute struct {
|
||||
SourcePosition
|
||||
Name string
|
||||
Value string
|
||||
IsRaw bool
|
||||
Condition string
|
||||
}
|
||||
|
||||
type Tag struct {
|
||||
SourcePosition
|
||||
Block *Block
|
||||
Name string
|
||||
IsInterpolated bool
|
||||
Attributes []Attribute
|
||||
}
|
||||
|
||||
func newTag(name string) *Tag {
|
||||
tag := new(Tag)
|
||||
tag.Block = nil
|
||||
tag.Name = name
|
||||
tag.Attributes = make([]Attribute, 0)
|
||||
tag.IsInterpolated = false
|
||||
return tag
|
||||
|
||||
}
|
||||
|
||||
func (t *Tag) IsSelfClosing() bool {
|
||||
for _, tag := range selfClosingTags {
|
||||
if tag == t.Name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (t *Tag) IsRawText() bool {
|
||||
return t.Name == "style" || t.Name == "script"
|
||||
}
|
||||
|
||||
type Condition struct {
|
||||
SourcePosition
|
||||
Positive *Block
|
||||
Negative *Block
|
||||
Expression string
|
||||
}
|
||||
|
||||
func newCondition(exp string) *Condition {
|
||||
cond := new(Condition)
|
||||
cond.Expression = exp
|
||||
return cond
|
||||
}
|
||||
|
||||
type Each struct {
|
||||
SourcePosition
|
||||
X string
|
||||
Y string
|
||||
Expression string
|
||||
Block *Block
|
||||
}
|
||||
|
||||
func newEach(exp string) *Each {
|
||||
each := new(Each)
|
||||
each.Expression = exp
|
||||
return each
|
||||
}
|
||||
|
||||
type Assignment struct {
|
||||
SourcePosition
|
||||
X string
|
||||
Expression string
|
||||
}
|
||||
|
||||
func newAssignment(x, expression string) *Assignment {
|
||||
assgn := new(Assignment)
|
||||
assgn.X = x
|
||||
assgn.Expression = expression
|
||||
return assgn
|
||||
}
|
||||
|
||||
type Mixin struct {
|
||||
SourcePosition
|
||||
Block *Block
|
||||
Name string
|
||||
Args []string
|
||||
}
|
||||
|
||||
func newMixin(name, args string) *Mixin {
|
||||
mixin := new(Mixin)
|
||||
mixin.Name = name
|
||||
|
||||
delExp := regexp.MustCompile(`,\s`)
|
||||
mixin.Args = delExp.Split(args, -1)
|
||||
|
||||
for i := 0; i < len(mixin.Args); i++ {
|
||||
mixin.Args[i] = strings.TrimSpace(mixin.Args[i])
|
||||
if mixin.Args[i] == "" {
|
||||
mixin.Args = append(mixin.Args[:i], mixin.Args[i+1:]...)
|
||||
i--
|
||||
}
|
||||
}
|
||||
|
||||
return mixin
|
||||
}
|
||||
|
||||
type MixinCall struct {
|
||||
SourcePosition
|
||||
Name string
|
||||
Args []string
|
||||
}
|
||||
|
||||
func newMixinCall(name, args string) *MixinCall {
|
||||
mixinCall := new(MixinCall)
|
||||
mixinCall.Name = name
|
||||
|
||||
if args != "" {
|
||||
const t = "%s"
|
||||
quoteExp := regexp.MustCompile(`"(.*?)"`)
|
||||
delExp := regexp.MustCompile(`,\s`)
|
||||
|
||||
quotes := quoteExp.FindAllString(args, -1)
|
||||
replaced := quoteExp.ReplaceAllString(args, t)
|
||||
mixinCall.Args = delExp.Split(replaced, -1)
|
||||
|
||||
qi := 0
|
||||
for i, arg := range mixinCall.Args {
|
||||
if arg == t {
|
||||
mixinCall.Args[i] = quotes[qi]
|
||||
qi++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mixinCall
|
||||
}
|
||||
482
vendor/github.com/eknkc/amber/parser/parser.go
generated
vendored
Normal file
482
vendor/github.com/eknkc/amber/parser/parser.go
generated
vendored
Normal file
@@ -0,0 +1,482 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Parser struct {
|
||||
scanner *scanner
|
||||
filename string
|
||||
fs http.FileSystem
|
||||
currenttoken *token
|
||||
namedBlocks map[string]*NamedBlock
|
||||
parent *Parser
|
||||
result *Block
|
||||
}
|
||||
|
||||
func newParser(rdr io.Reader) *Parser {
|
||||
p := new(Parser)
|
||||
p.scanner = newScanner(rdr)
|
||||
p.namedBlocks = make(map[string]*NamedBlock)
|
||||
return p
|
||||
}
|
||||
|
||||
func StringParser(input string) (*Parser, error) {
|
||||
return newParser(bytes.NewReader([]byte(input))), nil
|
||||
}
|
||||
|
||||
func ByteParser(input []byte) (*Parser, error) {
|
||||
return newParser(bytes.NewReader(input)), nil
|
||||
}
|
||||
|
||||
func (p *Parser) SetFilename(filename string) {
|
||||
p.filename = filename
|
||||
}
|
||||
|
||||
func (p *Parser) SetVirtualFilesystem(fs http.FileSystem) {
|
||||
p.fs = fs
|
||||
}
|
||||
|
||||
func FileParser(filename string) (*Parser, error) {
|
||||
data, err := ioutil.ReadFile(filename)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
parser := newParser(bytes.NewReader(data))
|
||||
parser.filename = filename
|
||||
return parser, nil
|
||||
}
|
||||
|
||||
func VirtualFileParser(filename string, fs http.FileSystem) (*Parser, error) {
|
||||
file, err := fs.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data, err := ioutil.ReadAll(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
parser := newParser(bytes.NewReader(data))
|
||||
parser.filename = filename
|
||||
parser.fs = fs
|
||||
return parser, nil
|
||||
}
|
||||
|
||||
func (p *Parser) Parse() *Block {
|
||||
if p.result != nil {
|
||||
return p.result
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if rs, ok := r.(string); ok && rs[:len("Amber Error")] == "Amber Error" {
|
||||
panic(r)
|
||||
}
|
||||
|
||||
pos := p.pos()
|
||||
|
||||
if len(pos.Filename) > 0 {
|
||||
panic(fmt.Sprintf("Amber Error in <%s>: %v - Line: %d, Column: %d, Length: %d", pos.Filename, r, pos.LineNum, pos.ColNum, pos.TokenLength))
|
||||
} else {
|
||||
panic(fmt.Sprintf("Amber Error: %v - Line: %d, Column: %d, Length: %d", r, pos.LineNum, pos.ColNum, pos.TokenLength))
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
block := newBlock()
|
||||
p.advance()
|
||||
|
||||
for {
|
||||
if p.currenttoken == nil || p.currenttoken.Kind == tokEOF {
|
||||
break
|
||||
}
|
||||
|
||||
if p.currenttoken.Kind == tokBlank {
|
||||
p.advance()
|
||||
continue
|
||||
}
|
||||
|
||||
block.push(p.parse())
|
||||
}
|
||||
|
||||
if p.parent != nil {
|
||||
p.parent.Parse()
|
||||
|
||||
for _, prev := range p.parent.namedBlocks {
|
||||
ours := p.namedBlocks[prev.Name]
|
||||
|
||||
if ours == nil {
|
||||
// Put a copy of the named block into current context, so that sub-templates can use the block
|
||||
p.namedBlocks[prev.Name] = prev
|
||||
continue
|
||||
}
|
||||
|
||||
top := findTopmostParentWithNamedBlock(p, prev.Name)
|
||||
nb := top.namedBlocks[prev.Name]
|
||||
switch ours.Modifier {
|
||||
case NamedBlockAppend:
|
||||
for i := 0; i < len(ours.Children); i++ {
|
||||
nb.push(ours.Children[i])
|
||||
}
|
||||
case NamedBlockPrepend:
|
||||
for i := len(ours.Children) - 1; i >= 0; i-- {
|
||||
nb.pushFront(ours.Children[i])
|
||||
}
|
||||
default:
|
||||
nb.Children = ours.Children
|
||||
}
|
||||
}
|
||||
|
||||
block = p.parent.result
|
||||
}
|
||||
|
||||
p.result = block
|
||||
return block
|
||||
}
|
||||
|
||||
func (p *Parser) pos() SourcePosition {
|
||||
pos := p.scanner.Pos()
|
||||
pos.Filename = p.filename
|
||||
return pos
|
||||
}
|
||||
|
||||
func (p *Parser) parseRelativeFile(filename string) *Parser {
|
||||
if len(p.filename) == 0 {
|
||||
panic("Unable to import or extend " + filename + " in a non filesystem based parser.")
|
||||
}
|
||||
|
||||
filename = filepath.Join(filepath.Dir(p.filename), filename)
|
||||
|
||||
if strings.IndexRune(filepath.Base(filename), '.') < 0 {
|
||||
filename = filename + ".amber"
|
||||
}
|
||||
|
||||
parser, err := FileParser(filename)
|
||||
if err != nil && p.fs != nil {
|
||||
parser, err = VirtualFileParser(filename, p.fs)
|
||||
}
|
||||
if err != nil {
|
||||
panic("Unable to read " + filename + ", Error: " + string(err.Error()))
|
||||
}
|
||||
|
||||
return parser
|
||||
}
|
||||
|
||||
func (p *Parser) parse() Node {
|
||||
switch p.currenttoken.Kind {
|
||||
case tokDoctype:
|
||||
return p.parseDoctype()
|
||||
case tokComment:
|
||||
return p.parseComment()
|
||||
case tokText:
|
||||
return p.parseText()
|
||||
case tokIf:
|
||||
return p.parseIf()
|
||||
case tokEach:
|
||||
return p.parseEach()
|
||||
case tokImport:
|
||||
return p.parseImport()
|
||||
case tokTag:
|
||||
return p.parseTag()
|
||||
case tokAssignment:
|
||||
return p.parseAssignment()
|
||||
case tokNamedBlock:
|
||||
return p.parseNamedBlock()
|
||||
case tokExtends:
|
||||
return p.parseExtends()
|
||||
case tokIndent:
|
||||
return p.parseBlock(nil)
|
||||
case tokMixin:
|
||||
return p.parseMixin()
|
||||
case tokMixinCall:
|
||||
return p.parseMixinCall()
|
||||
}
|
||||
|
||||
panic(fmt.Sprintf("Unexpected token: %d", p.currenttoken.Kind))
|
||||
}
|
||||
|
||||
func (p *Parser) expect(typ rune) *token {
|
||||
if p.currenttoken.Kind != typ {
|
||||
panic("Unexpected token!")
|
||||
}
|
||||
curtok := p.currenttoken
|
||||
p.advance()
|
||||
return curtok
|
||||
}
|
||||
|
||||
func (p *Parser) advance() {
|
||||
p.currenttoken = p.scanner.Next()
|
||||
}
|
||||
|
||||
func (p *Parser) parseExtends() *Block {
|
||||
if p.parent != nil {
|
||||
panic("Unable to extend multiple parent templates.")
|
||||
}
|
||||
|
||||
tok := p.expect(tokExtends)
|
||||
parser := p.parseRelativeFile(tok.Value)
|
||||
parser.Parse()
|
||||
p.parent = parser
|
||||
return newBlock()
|
||||
}
|
||||
|
||||
func (p *Parser) parseBlock(parent Node) *Block {
|
||||
p.expect(tokIndent)
|
||||
block := newBlock()
|
||||
block.SourcePosition = p.pos()
|
||||
|
||||
for {
|
||||
if p.currenttoken == nil || p.currenttoken.Kind == tokEOF || p.currenttoken.Kind == tokOutdent {
|
||||
break
|
||||
}
|
||||
|
||||
if p.currenttoken.Kind == tokBlank {
|
||||
p.advance()
|
||||
continue
|
||||
}
|
||||
|
||||
if p.currenttoken.Kind == tokId ||
|
||||
p.currenttoken.Kind == tokClassName ||
|
||||
p.currenttoken.Kind == tokAttribute {
|
||||
|
||||
if tag, ok := parent.(*Tag); ok {
|
||||
attr := p.expect(p.currenttoken.Kind)
|
||||
cond := attr.Data["Condition"]
|
||||
|
||||
switch attr.Kind {
|
||||
case tokId:
|
||||
tag.Attributes = append(tag.Attributes, Attribute{p.pos(), "id", attr.Value, true, cond})
|
||||
case tokClassName:
|
||||
tag.Attributes = append(tag.Attributes, Attribute{p.pos(), "class", attr.Value, true, cond})
|
||||
case tokAttribute:
|
||||
tag.Attributes = append(tag.Attributes, Attribute{p.pos(), attr.Value, attr.Data["Content"], attr.Data["Mode"] == "raw", cond})
|
||||
}
|
||||
|
||||
continue
|
||||
} else {
|
||||
panic("Conditional attributes must be placed immediately within a parent tag.")
|
||||
}
|
||||
}
|
||||
|
||||
block.push(p.parse())
|
||||
}
|
||||
|
||||
p.expect(tokOutdent)
|
||||
|
||||
return block
|
||||
}
|
||||
|
||||
func (p *Parser) parseIf() *Condition {
|
||||
tok := p.expect(tokIf)
|
||||
cnd := newCondition(tok.Value)
|
||||
cnd.SourcePosition = p.pos()
|
||||
|
||||
readmore:
|
||||
switch p.currenttoken.Kind {
|
||||
case tokIndent:
|
||||
cnd.Positive = p.parseBlock(cnd)
|
||||
goto readmore
|
||||
case tokElse:
|
||||
p.expect(tokElse)
|
||||
if p.currenttoken.Kind == tokIf {
|
||||
cnd.Negative = newBlock()
|
||||
cnd.Negative.push(p.parseIf())
|
||||
} else if p.currenttoken.Kind == tokIndent {
|
||||
cnd.Negative = p.parseBlock(cnd)
|
||||
} else {
|
||||
panic("Unexpected token!")
|
||||
}
|
||||
goto readmore
|
||||
}
|
||||
|
||||
return cnd
|
||||
}
|
||||
|
||||
func (p *Parser) parseEach() *Each {
|
||||
tok := p.expect(tokEach)
|
||||
ech := newEach(tok.Value)
|
||||
ech.SourcePosition = p.pos()
|
||||
ech.X = tok.Data["X"]
|
||||
ech.Y = tok.Data["Y"]
|
||||
|
||||
if p.currenttoken.Kind == tokIndent {
|
||||
ech.Block = p.parseBlock(ech)
|
||||
}
|
||||
|
||||
return ech
|
||||
}
|
||||
|
||||
func (p *Parser) parseImport() *Block {
|
||||
tok := p.expect(tokImport)
|
||||
node := p.parseRelativeFile(tok.Value).Parse()
|
||||
node.SourcePosition = p.pos()
|
||||
return node
|
||||
}
|
||||
|
||||
func (p *Parser) parseNamedBlock() *Block {
|
||||
tok := p.expect(tokNamedBlock)
|
||||
|
||||
if p.namedBlocks[tok.Value] != nil {
|
||||
panic("Multiple definitions of named blocks are not permitted. Block " + tok.Value + " has been re defined.")
|
||||
}
|
||||
|
||||
block := newNamedBlock(tok.Value)
|
||||
block.SourcePosition = p.pos()
|
||||
|
||||
if tok.Data["Modifier"] == "append" {
|
||||
block.Modifier = NamedBlockAppend
|
||||
} else if tok.Data["Modifier"] == "prepend" {
|
||||
block.Modifier = NamedBlockPrepend
|
||||
}
|
||||
|
||||
if p.currenttoken.Kind == tokIndent {
|
||||
block.Block = *(p.parseBlock(nil))
|
||||
}
|
||||
|
||||
p.namedBlocks[block.Name] = block
|
||||
|
||||
if block.Modifier == NamedBlockDefault {
|
||||
return &block.Block
|
||||
}
|
||||
|
||||
return newBlock()
|
||||
}
|
||||
|
||||
func (p *Parser) parseDoctype() *Doctype {
|
||||
tok := p.expect(tokDoctype)
|
||||
node := newDoctype(tok.Value)
|
||||
node.SourcePosition = p.pos()
|
||||
return node
|
||||
}
|
||||
|
||||
func (p *Parser) parseComment() *Comment {
|
||||
tok := p.expect(tokComment)
|
||||
cmnt := newComment(tok.Value)
|
||||
cmnt.SourcePosition = p.pos()
|
||||
cmnt.Silent = tok.Data["Mode"] == "silent"
|
||||
|
||||
if p.currenttoken.Kind == tokIndent {
|
||||
cmnt.Block = p.parseBlock(cmnt)
|
||||
}
|
||||
|
||||
return cmnt
|
||||
}
|
||||
|
||||
func (p *Parser) parseText() *Text {
|
||||
tok := p.expect(tokText)
|
||||
node := newText(tok.Value, tok.Data["Mode"] == "raw")
|
||||
node.SourcePosition = p.pos()
|
||||
return node
|
||||
}
|
||||
|
||||
func (p *Parser) parseAssignment() *Assignment {
|
||||
tok := p.expect(tokAssignment)
|
||||
node := newAssignment(tok.Data["X"], tok.Value)
|
||||
node.SourcePosition = p.pos()
|
||||
return node
|
||||
}
|
||||
|
||||
func (p *Parser) parseTag() *Tag {
|
||||
tok := p.expect(tokTag)
|
||||
tag := newTag(tok.Value)
|
||||
tag.SourcePosition = p.pos()
|
||||
|
||||
ensureBlock := func() {
|
||||
if tag.Block == nil {
|
||||
tag.Block = newBlock()
|
||||
}
|
||||
}
|
||||
|
||||
readmore:
|
||||
switch p.currenttoken.Kind {
|
||||
case tokIndent:
|
||||
if tag.IsRawText() {
|
||||
p.scanner.readRaw = true
|
||||
}
|
||||
|
||||
block := p.parseBlock(tag)
|
||||
if tag.Block == nil {
|
||||
tag.Block = block
|
||||
} else {
|
||||
for _, c := range block.Children {
|
||||
tag.Block.push(c)
|
||||
}
|
||||
}
|
||||
case tokId:
|
||||
id := p.expect(tokId)
|
||||
if len(id.Data["Condition"]) > 0 {
|
||||
panic("Conditional attributes must be placed in a block within a tag.")
|
||||
}
|
||||
tag.Attributes = append(tag.Attributes, Attribute{p.pos(), "id", id.Value, true, ""})
|
||||
goto readmore
|
||||
case tokClassName:
|
||||
cls := p.expect(tokClassName)
|
||||
if len(cls.Data["Condition"]) > 0 {
|
||||
panic("Conditional attributes must be placed in a block within a tag.")
|
||||
}
|
||||
tag.Attributes = append(tag.Attributes, Attribute{p.pos(), "class", cls.Value, true, ""})
|
||||
goto readmore
|
||||
case tokAttribute:
|
||||
attr := p.expect(tokAttribute)
|
||||
if len(attr.Data["Condition"]) > 0 {
|
||||
panic("Conditional attributes must be placed in a block within a tag.")
|
||||
}
|
||||
tag.Attributes = append(tag.Attributes, Attribute{p.pos(), attr.Value, attr.Data["Content"], attr.Data["Mode"] == "raw", ""})
|
||||
goto readmore
|
||||
case tokText:
|
||||
if p.currenttoken.Data["Mode"] != "piped" {
|
||||
ensureBlock()
|
||||
tag.Block.pushFront(p.parseText())
|
||||
goto readmore
|
||||
}
|
||||
}
|
||||
|
||||
return tag
|
||||
}
|
||||
|
||||
func (p *Parser) parseMixin() *Mixin {
|
||||
tok := p.expect(tokMixin)
|
||||
mixin := newMixin(tok.Value, tok.Data["Args"])
|
||||
mixin.SourcePosition = p.pos()
|
||||
|
||||
if p.currenttoken.Kind == tokIndent {
|
||||
mixin.Block = p.parseBlock(mixin)
|
||||
}
|
||||
|
||||
return mixin
|
||||
}
|
||||
|
||||
func (p *Parser) parseMixinCall() *MixinCall {
|
||||
tok := p.expect(tokMixinCall)
|
||||
mixinCall := newMixinCall(tok.Value, tok.Data["Args"])
|
||||
mixinCall.SourcePosition = p.pos()
|
||||
return mixinCall
|
||||
}
|
||||
|
||||
func findTopmostParentWithNamedBlock(p *Parser, name string) *Parser {
|
||||
top := p
|
||||
|
||||
for {
|
||||
if top.namedBlocks[name] == nil {
|
||||
return nil
|
||||
}
|
||||
if top.parent == nil {
|
||||
return top
|
||||
}
|
||||
if top.parent.namedBlocks[name] != nil {
|
||||
top = top.parent
|
||||
} else {
|
||||
return top
|
||||
}
|
||||
}
|
||||
}
|
||||
501
vendor/github.com/eknkc/amber/parser/scanner.go
generated
vendored
Normal file
501
vendor/github.com/eknkc/amber/parser/scanner.go
generated
vendored
Normal file
@@ -0,0 +1,501 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"container/list"
|
||||
"fmt"
|
||||
"io"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
const (
|
||||
tokEOF = -(iota + 1)
|
||||
tokDoctype
|
||||
tokComment
|
||||
tokIndent
|
||||
tokOutdent
|
||||
tokBlank
|
||||
tokId
|
||||
tokClassName
|
||||
tokTag
|
||||
tokText
|
||||
tokAttribute
|
||||
tokIf
|
||||
tokElse
|
||||
tokEach
|
||||
tokAssignment
|
||||
tokImport
|
||||
tokNamedBlock
|
||||
tokExtends
|
||||
tokMixin
|
||||
tokMixinCall
|
||||
)
|
||||
|
||||
const (
|
||||
scnNewLine = iota
|
||||
scnLine
|
||||
scnEOF
|
||||
)
|
||||
|
||||
type scanner struct {
|
||||
reader *bufio.Reader
|
||||
indentStack *list.List
|
||||
stash *list.List
|
||||
|
||||
state int32
|
||||
buffer string
|
||||
|
||||
line int
|
||||
col int
|
||||
lastTokenLine int
|
||||
lastTokenCol int
|
||||
lastTokenSize int
|
||||
|
||||
readRaw bool
|
||||
}
|
||||
|
||||
type token struct {
|
||||
Kind rune
|
||||
Value string
|
||||
Data map[string]string
|
||||
}
|
||||
|
||||
func newScanner(r io.Reader) *scanner {
|
||||
s := new(scanner)
|
||||
s.reader = bufio.NewReader(r)
|
||||
s.indentStack = list.New()
|
||||
s.stash = list.New()
|
||||
s.state = scnNewLine
|
||||
s.line = -1
|
||||
s.col = 0
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *scanner) Pos() SourcePosition {
|
||||
return SourcePosition{s.lastTokenLine + 1, s.lastTokenCol + 1, s.lastTokenSize, ""}
|
||||
}
|
||||
|
||||
// Returns next token found in buffer
|
||||
func (s *scanner) Next() *token {
|
||||
if s.readRaw {
|
||||
s.readRaw = false
|
||||
return s.NextRaw()
|
||||
}
|
||||
|
||||
s.ensureBuffer()
|
||||
|
||||
if stashed := s.stash.Front(); stashed != nil {
|
||||
tok := stashed.Value.(*token)
|
||||
s.stash.Remove(stashed)
|
||||
return tok
|
||||
}
|
||||
|
||||
switch s.state {
|
||||
case scnEOF:
|
||||
if outdent := s.indentStack.Back(); outdent != nil {
|
||||
s.indentStack.Remove(outdent)
|
||||
return &token{tokOutdent, "", nil}
|
||||
}
|
||||
|
||||
return &token{tokEOF, "", nil}
|
||||
case scnNewLine:
|
||||
s.state = scnLine
|
||||
|
||||
if tok := s.scanIndent(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
return s.Next()
|
||||
case scnLine:
|
||||
if tok := s.scanMixin(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanMixinCall(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanDoctype(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanCondition(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanEach(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanImport(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanExtends(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanBlock(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanAssignment(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanTag(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanId(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanClassName(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanAttribute(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanComment(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
|
||||
if tok := s.scanText(); tok != nil {
|
||||
return tok
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *scanner) NextRaw() *token {
|
||||
result := ""
|
||||
level := 0
|
||||
|
||||
for {
|
||||
s.ensureBuffer()
|
||||
|
||||
switch s.state {
|
||||
case scnEOF:
|
||||
return &token{tokText, result, map[string]string{"Mode": "raw"}}
|
||||
case scnNewLine:
|
||||
s.state = scnLine
|
||||
|
||||
if tok := s.scanIndent(); tok != nil {
|
||||
if tok.Kind == tokIndent {
|
||||
level++
|
||||
} else if tok.Kind == tokOutdent {
|
||||
level--
|
||||
} else {
|
||||
result = result + "\n"
|
||||
continue
|
||||
}
|
||||
|
||||
if level < 0 {
|
||||
s.stash.PushBack(&token{tokOutdent, "", nil})
|
||||
|
||||
if len(result) > 0 && result[len(result)-1] == '\n' {
|
||||
result = result[:len(result)-1]
|
||||
}
|
||||
|
||||
return &token{tokText, result, map[string]string{"Mode": "raw"}}
|
||||
}
|
||||
}
|
||||
case scnLine:
|
||||
if len(result) > 0 {
|
||||
result = result + "\n"
|
||||
}
|
||||
for i := 0; i < level; i++ {
|
||||
result += "\t"
|
||||
}
|
||||
result = result + s.buffer
|
||||
s.consume(len(s.buffer))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxIndent = regexp.MustCompile(`^(\s+)`)
|
||||
|
||||
func (s *scanner) scanIndent() *token {
|
||||
if len(s.buffer) == 0 {
|
||||
return &token{tokBlank, "", nil}
|
||||
}
|
||||
|
||||
var head *list.Element
|
||||
for head = s.indentStack.Front(); head != nil; head = head.Next() {
|
||||
value := head.Value.(*regexp.Regexp)
|
||||
|
||||
if match := value.FindString(s.buffer); len(match) != 0 {
|
||||
s.consume(len(match))
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
newIndent := rgxIndent.FindString(s.buffer)
|
||||
|
||||
if len(newIndent) != 0 && head == nil {
|
||||
s.indentStack.PushBack(regexp.MustCompile(regexp.QuoteMeta(newIndent)))
|
||||
s.consume(len(newIndent))
|
||||
return &token{tokIndent, newIndent, nil}
|
||||
}
|
||||
|
||||
if len(newIndent) == 0 && head != nil {
|
||||
for head != nil {
|
||||
next := head.Next()
|
||||
s.indentStack.Remove(head)
|
||||
if next == nil {
|
||||
return &token{tokOutdent, "", nil}
|
||||
} else {
|
||||
s.stash.PushBack(&token{tokOutdent, "", nil})
|
||||
}
|
||||
head = next
|
||||
}
|
||||
}
|
||||
|
||||
if len(newIndent) != 0 && head != nil {
|
||||
panic("Mismatching indentation. Please use a coherent indent schema.")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxDoctype = regexp.MustCompile(`^(!!!|doctype)\s*(.*)`)
|
||||
|
||||
func (s *scanner) scanDoctype() *token {
|
||||
if sm := rgxDoctype.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
if len(sm[2]) == 0 {
|
||||
sm[2] = "html"
|
||||
}
|
||||
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokDoctype, sm[2], nil}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxIf = regexp.MustCompile(`^if\s+(.+)$`)
|
||||
var rgxElse = regexp.MustCompile(`^else\s*`)
|
||||
|
||||
func (s *scanner) scanCondition() *token {
|
||||
if sm := rgxIf.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokIf, sm[1], nil}
|
||||
}
|
||||
|
||||
if sm := rgxElse.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokElse, "", nil}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxEach = regexp.MustCompile(`^each\s+(\$[\w0-9\-_]*)(?:\s*,\s*(\$[\w0-9\-_]*))?\s+in\s+(.+)$`)
|
||||
|
||||
func (s *scanner) scanEach() *token {
|
||||
if sm := rgxEach.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokEach, sm[3], map[string]string{"X": sm[1], "Y": sm[2]}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxAssignment = regexp.MustCompile(`^(\$[\w0-9\-_]*)?\s*=\s*(.+)$`)
|
||||
|
||||
func (s *scanner) scanAssignment() *token {
|
||||
if sm := rgxAssignment.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokAssignment, sm[2], map[string]string{"X": sm[1]}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxComment = regexp.MustCompile(`^\/\/(-)?\s*(.*)$`)
|
||||
|
||||
func (s *scanner) scanComment() *token {
|
||||
if sm := rgxComment.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
mode := "embed"
|
||||
if len(sm[1]) != 0 {
|
||||
mode = "silent"
|
||||
}
|
||||
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokComment, sm[2], map[string]string{"Mode": mode}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxId = regexp.MustCompile(`^#([\w-]+)(?:\s*\?\s*(.*)$)?`)
|
||||
|
||||
func (s *scanner) scanId() *token {
|
||||
if sm := rgxId.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokId, sm[1], map[string]string{"Condition": sm[2]}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxClassName = regexp.MustCompile(`^\.([\w-]+)(?:\s*\?\s*(.*)$)?`)
|
||||
|
||||
func (s *scanner) scanClassName() *token {
|
||||
if sm := rgxClassName.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokClassName, sm[1], map[string]string{"Condition": sm[2]}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxAttribute = regexp.MustCompile(`^\[([\w\-:@\.]+)\s*(?:=\s*(\"([^\"\\]*)\"|([^\]]+)))?\](?:\s*\?\s*(.*)$)?`)
|
||||
|
||||
func (s *scanner) scanAttribute() *token {
|
||||
if sm := rgxAttribute.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
|
||||
if len(sm[3]) != 0 || sm[2] == "" {
|
||||
return &token{tokAttribute, sm[1], map[string]string{"Content": sm[3], "Mode": "raw", "Condition": sm[5]}}
|
||||
}
|
||||
|
||||
return &token{tokAttribute, sm[1], map[string]string{"Content": sm[4], "Mode": "expression", "Condition": sm[5]}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxImport = regexp.MustCompile(`^import\s+([0-9a-zA-Z_\-\. \/]*)$`)
|
||||
|
||||
func (s *scanner) scanImport() *token {
|
||||
if sm := rgxImport.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokImport, sm[1], nil}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxExtends = regexp.MustCompile(`^extends\s+([0-9a-zA-Z_\-\. \/]*)$`)
|
||||
|
||||
func (s *scanner) scanExtends() *token {
|
||||
if sm := rgxExtends.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokExtends, sm[1], nil}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxBlock = regexp.MustCompile(`^block\s+(?:(append|prepend)\s+)?([0-9a-zA-Z_\-\. \/]*)$`)
|
||||
|
||||
func (s *scanner) scanBlock() *token {
|
||||
if sm := rgxBlock.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokNamedBlock, sm[2], map[string]string{"Modifier": sm[1]}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxTag = regexp.MustCompile(`^(\w[-:\w]*)`)
|
||||
|
||||
func (s *scanner) scanTag() *token {
|
||||
if sm := rgxTag.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokTag, sm[1], nil}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxMixin = regexp.MustCompile(`^mixin ([a-zA-Z_-]+\w*)(\(((\$\w*(,\s)?)*)\))?$`)
|
||||
|
||||
func (s *scanner) scanMixin() *token {
|
||||
if sm := rgxMixin.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokMixin, sm[1], map[string]string{"Args": sm[3]}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxMixinCall = regexp.MustCompile(`^\+([A-Za-z_-]+\w*)(\((.+(,\s)?)*\))?$`)
|
||||
|
||||
func (s *scanner) scanMixinCall() *token {
|
||||
if sm := rgxMixinCall.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
return &token{tokMixinCall, sm[1], map[string]string{"Args": sm[3]}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var rgxText = regexp.MustCompile(`^(\|)? ?(.*)$`)
|
||||
|
||||
func (s *scanner) scanText() *token {
|
||||
if sm := rgxText.FindStringSubmatch(s.buffer); len(sm) != 0 {
|
||||
s.consume(len(sm[0]))
|
||||
|
||||
mode := "inline"
|
||||
if sm[1] == "|" {
|
||||
mode = "piped"
|
||||
}
|
||||
|
||||
return &token{tokText, sm[2], map[string]string{"Mode": mode}}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Moves position forward, and removes beginning of s.buffer (len bytes)
|
||||
func (s *scanner) consume(runes int) {
|
||||
if len(s.buffer) < runes {
|
||||
panic(fmt.Sprintf("Unable to consume %d runes from buffer.", runes))
|
||||
}
|
||||
|
||||
s.lastTokenLine = s.line
|
||||
s.lastTokenCol = s.col
|
||||
s.lastTokenSize = runes
|
||||
|
||||
s.buffer = s.buffer[runes:]
|
||||
s.col += runes
|
||||
}
|
||||
|
||||
// Reads string into s.buffer
|
||||
func (s *scanner) ensureBuffer() {
|
||||
if len(s.buffer) > 0 {
|
||||
return
|
||||
}
|
||||
|
||||
buf, err := s.reader.ReadString('\n')
|
||||
|
||||
if err != nil && err != io.EOF {
|
||||
panic(err)
|
||||
} else if err != nil && len(buf) == 0 {
|
||||
s.state = scnEOF
|
||||
} else {
|
||||
// endline "LF only" or "\n" use Unix, Linux, modern MacOS X, FreeBSD, BeOS, RISC OS
|
||||
if buf[len(buf)-1] == '\n' {
|
||||
buf = buf[:len(buf)-1]
|
||||
}
|
||||
// endline "CR+LF" or "\r\n" use internet protocols, DEC RT-11, Windows, CP/M, MS-DOS, OS/2, Symbian OS
|
||||
if len(buf) > 0 && buf[len(buf)-1] == '\r' {
|
||||
buf = buf[:len(buf)-1]
|
||||
}
|
||||
|
||||
s.state = scnNewLine
|
||||
s.buffer = buf
|
||||
s.line += 1
|
||||
s.col = 0
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user