switch glide to govendor (#43)
Signed-off-by: Bo-Yi Wu <appleboy.tw@gmail.com>
This commit is contained in:
		
							
								
								
									
										46
									
								
								vendor/github.com/aymerick/raymond/BENCHMARKS.md
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								vendor/github.com/aymerick/raymond/BENCHMARKS.md
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | ||||
| # Benchmarks | ||||
|  | ||||
| Hardware: MacBookPro11,1 - Intel Core i5 - 2,6 GHz - 8 Go RAM | ||||
|  | ||||
| With: | ||||
|  | ||||
|     - handlebars.js #8cba84df119c317fcebc49fb285518542ca9c2d0 | ||||
|     - raymond #7bbaaf50ed03c96b56687d7fa6c6e04e02375a98 | ||||
|  | ||||
|  | ||||
| ## handlebars.js (ops/ms) | ||||
|  | ||||
|         arguments          198 ±4 (5) | ||||
|         array-each         568 ±23 (5) | ||||
|         array-mustache     522 ±18 (4) | ||||
|         complex             71 ±7 (3) | ||||
|         data                67 ±2 (3) | ||||
|         depth-1             47 ±2 (3) | ||||
|         depth-2             14 ±1 (2) | ||||
|         object-mustache   1099 ±47 (5) | ||||
|         object             907 ±58 (4) | ||||
|         partial-recursion   46 ±3 (4) | ||||
|         partial             68 ±3 (3) | ||||
|         paths             1650 ±50 (3) | ||||
|         string            2552 ±157 (3) | ||||
|         subexpression      141 ±2 (4) | ||||
|         variables         2671 ±83 (4) | ||||
|  | ||||
|  | ||||
| ## raymond | ||||
|  | ||||
|         BenchmarkArguments          200000     6642 ns/op   151 ops/ms | ||||
|         BenchmarkArrayEach          100000    19584 ns/op    51 ops/ms | ||||
|         BenchmarkArrayMustache      100000    17305 ns/op    58 ops/ms | ||||
|         BenchmarkComplex            30000     50270 ns/op    20 ops/ms | ||||
|         BenchmarkData               50000     25551 ns/op    39 ops/ms | ||||
|         BenchmarkDepth1             100000    20162 ns/op    50 ops/ms | ||||
|         BenchmarkDepth2             30000     47782 ns/op    21 ops/ms | ||||
|         BenchmarkObjectMustache     200000     7668 ns/op   130 ops/ms | ||||
|         BenchmarkObject             200000     8843 ns/op   113 ops/ms | ||||
|         BenchmarkPartialRecursion   50000     23139 ns/op    43 ops/ms | ||||
|         BenchmarkPartial            50000     31015 ns/op    32 ops/ms | ||||
|         BenchmarkPath               200000     8997 ns/op   111 ops/ms | ||||
|         BenchmarkString             1000000    1879 ns/op   532 ops/ms | ||||
|         BenchmarkSubExpression      300000     4935 ns/op   203 ops/ms | ||||
|         BenchmarkVariables          200000     6478 ns/op   154 ops/ms | ||||
							
								
								
									
										33
									
								
								vendor/github.com/aymerick/raymond/CHANGELOG.md
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								vendor/github.com/aymerick/raymond/CHANGELOG.md
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,33 @@ | ||||
| # Raymond Changelog | ||||
|  | ||||
| ### Raymond 2.0.1 _(June 01, 2016)_ | ||||
|  | ||||
| - [BUGFIX] Removes data races [#3](https://github.com/aymerick/raymond/issues/3) - Thanks [@markbates](https://github.com/markbates) | ||||
|  | ||||
| ### Raymond 2.0.0 _(May 01, 2016)_ | ||||
|  | ||||
| - [BUGFIX] Fixes passing of context in helper options [#2](https://github.com/aymerick/raymond/issues/2) - Thanks [@GhostRussia](https://github.com/GhostRussia) | ||||
| - [BREAKING] Renames and unexports constants: | ||||
|  | ||||
|   - `handlebars.DUMP_TPL` | ||||
|   - `lexer.ESCAPED_ESCAPED_OPEN_MUSTACHE` | ||||
|   - `lexer.ESCAPED_OPEN_MUSTACHE` | ||||
|   - `lexer.OPEN_MUSTACHE` | ||||
|   - `lexer.CLOSE_MUSTACHE` | ||||
|   - `lexer.CLOSE_STRIP_MUSTACHE` | ||||
|   - `lexer.CLOSE_UNESCAPED_STRIP_MUSTACHE` | ||||
|   - `lexer.DUMP_TOKEN_POS` | ||||
|   - `lexer.DUMP_ALL_TOKENS_VAL` | ||||
|  | ||||
|  | ||||
| ### Raymond 1.1.0 _(June 15, 2015)_ | ||||
|  | ||||
| - Permits templates references with lowercase versions of struct fields. | ||||
| - Adds `ParseFile()` function. | ||||
| - Adds `RegisterPartialFile()`, `RegisterPartialFiles()` and `Clone()` methods on `Template`. | ||||
| - Helpers can now be struct methods. | ||||
| - Ensures safe concurrent access to helpers and partials. | ||||
|  | ||||
| ### Raymond 1.0.0 _(June 09, 2015)_ | ||||
|  | ||||
| - This is the first release. Raymond supports almost all handlebars features. See https://github.com/aymerick/raymond#limitations for a list of differences with the javascript implementation. | ||||
							
								
								
									
										22
									
								
								vendor/github.com/aymerick/raymond/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								vendor/github.com/aymerick/raymond/LICENSE
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| The MIT License (MIT) | ||||
|  | ||||
| Copyright (c) 2015 Aymerick JEHANNE | ||||
|  | ||||
| Permission is hereby granted, free of charge, to any person obtaining a copy | ||||
| of this software and associated documentation files (the "Software"), to deal | ||||
| in the Software without restriction, including without limitation the rights | ||||
| to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||||
| copies of the Software, and to permit persons to whom the Software is | ||||
| furnished to do so, subject to the following conditions: | ||||
|  | ||||
| The above copyright notice and this permission notice shall be included in all | ||||
| copies or substantial portions of the Software. | ||||
|  | ||||
| THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||||
| IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||||
| FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||||
| AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||||
| LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | ||||
| OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | ||||
| SOFTWARE. | ||||
|  | ||||
							
								
								
									
										1417
									
								
								vendor/github.com/aymerick/raymond/README.md
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1417
									
								
								vendor/github.com/aymerick/raymond/README.md
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										1
									
								
								vendor/github.com/aymerick/raymond/VERSION
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								vendor/github.com/aymerick/raymond/VERSION
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1 @@ | ||||
| 2.0.1 | ||||
							
								
								
									
										785
									
								
								vendor/github.com/aymerick/raymond/ast/node.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										785
									
								
								vendor/github.com/aymerick/raymond/ast/node.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,785 @@ | ||||
| // Package ast provides structures to represent a handlebars Abstract Syntax Tree, and a Visitor interface to visit that tree. | ||||
| package ast | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"strconv" | ||||
| ) | ||||
|  | ||||
| // References: | ||||
| //   - https://github.com/wycats/handlebars.js/blob/master/lib/handlebars/compiler/ast.js | ||||
| //   - https://github.com/wycats/handlebars.js/blob/master/docs/compiler-api.md | ||||
| //   - https://github.com/golang/go/blob/master/src/text/template/parse/node.go | ||||
|  | ||||
| // Node is an element in the AST. | ||||
| type Node interface { | ||||
| 	// node type | ||||
| 	Type() NodeType | ||||
|  | ||||
| 	// location of node in original input string | ||||
| 	Location() Loc | ||||
|  | ||||
| 	// string representation, used for debugging | ||||
| 	String() string | ||||
|  | ||||
| 	// accepts visitor | ||||
| 	Accept(Visitor) interface{} | ||||
| } | ||||
|  | ||||
| // Visitor is the interface to visit an AST. | ||||
| type Visitor interface { | ||||
| 	VisitProgram(*Program) interface{} | ||||
|  | ||||
| 	// statements | ||||
| 	VisitMustache(*MustacheStatement) interface{} | ||||
| 	VisitBlock(*BlockStatement) interface{} | ||||
| 	VisitPartial(*PartialStatement) interface{} | ||||
| 	VisitContent(*ContentStatement) interface{} | ||||
| 	VisitComment(*CommentStatement) interface{} | ||||
|  | ||||
| 	// expressions | ||||
| 	VisitExpression(*Expression) interface{} | ||||
| 	VisitSubExpression(*SubExpression) interface{} | ||||
| 	VisitPath(*PathExpression) interface{} | ||||
|  | ||||
| 	// literals | ||||
| 	VisitString(*StringLiteral) interface{} | ||||
| 	VisitBoolean(*BooleanLiteral) interface{} | ||||
| 	VisitNumber(*NumberLiteral) interface{} | ||||
|  | ||||
| 	// miscellaneous | ||||
| 	VisitHash(*Hash) interface{} | ||||
| 	VisitHashPair(*HashPair) interface{} | ||||
| } | ||||
|  | ||||
| // NodeType represents an AST Node type. | ||||
| type NodeType int | ||||
|  | ||||
| // Type returns itself, and permits struct includers to satisfy that part of Node interface. | ||||
| func (t NodeType) Type() NodeType { | ||||
| 	return t | ||||
| } | ||||
|  | ||||
| const ( | ||||
| 	// NodeProgram is the program node | ||||
| 	NodeProgram NodeType = iota | ||||
|  | ||||
| 	// NodeMustache is the mustache statement node | ||||
| 	NodeMustache | ||||
|  | ||||
| 	// NodeBlock is the block statement node | ||||
| 	NodeBlock | ||||
|  | ||||
| 	// NodePartial is the partial statement node | ||||
| 	NodePartial | ||||
|  | ||||
| 	// NodeContent is the content statement node | ||||
| 	NodeContent | ||||
|  | ||||
| 	// NodeComment is the comment statement node | ||||
| 	NodeComment | ||||
|  | ||||
| 	// NodeExpression is the expression node | ||||
| 	NodeExpression | ||||
|  | ||||
| 	// NodeSubExpression is the subexpression node | ||||
| 	NodeSubExpression | ||||
|  | ||||
| 	// NodePath is the expression path node | ||||
| 	NodePath | ||||
|  | ||||
| 	// NodeBoolean is the literal boolean node | ||||
| 	NodeBoolean | ||||
|  | ||||
| 	// NodeNumber is the literal number node | ||||
| 	NodeNumber | ||||
|  | ||||
| 	// NodeString is the literal string node | ||||
| 	NodeString | ||||
|  | ||||
| 	// NodeHash is the hash node | ||||
| 	NodeHash | ||||
|  | ||||
| 	// NodeHashPair is the hash pair node | ||||
| 	NodeHashPair | ||||
| ) | ||||
|  | ||||
| // Loc represents the position of a parsed node in source file. | ||||
| type Loc struct { | ||||
| 	Pos  int // Byte position | ||||
| 	Line int // Line number | ||||
| } | ||||
|  | ||||
| // Location returns itself, and permits struct includers to satisfy that part of Node interface. | ||||
| func (l Loc) Location() Loc { | ||||
| 	return l | ||||
| } | ||||
|  | ||||
| // Strip describes node whitespace management. | ||||
| type Strip struct { | ||||
| 	Open  bool | ||||
| 	Close bool | ||||
|  | ||||
| 	OpenStandalone   bool | ||||
| 	CloseStandalone  bool | ||||
| 	InlineStandalone bool | ||||
| } | ||||
|  | ||||
| // NewStrip instanciates a Strip for given open and close mustaches. | ||||
| func NewStrip(openStr, closeStr string) *Strip { | ||||
| 	return &Strip{ | ||||
| 		Open:  (len(openStr) > 2) && openStr[2] == '~', | ||||
| 		Close: (len(closeStr) > 2) && closeStr[len(closeStr)-3] == '~', | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // NewStripForStr instanciates a Strip for given tag. | ||||
| func NewStripForStr(str string) *Strip { | ||||
| 	return &Strip{ | ||||
| 		Open:  (len(str) > 2) && str[2] == '~', | ||||
| 		Close: (len(str) > 2) && str[len(str)-3] == '~', | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (s *Strip) String() string { | ||||
| 	return fmt.Sprintf("Open: %t, Close: %t, OpenStandalone: %t, CloseStandalone: %t, InlineStandalone: %t", s.Open, s.Close, s.OpenStandalone, s.CloseStandalone, s.InlineStandalone) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Program | ||||
| // | ||||
|  | ||||
| // Program represents a program node. | ||||
| type Program struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Body        []Node // [ Statement ... ] | ||||
| 	BlockParams []string | ||||
| 	Chained     bool | ||||
|  | ||||
| 	// whitespace management | ||||
| 	Strip *Strip | ||||
| } | ||||
|  | ||||
| // NewProgram instanciates a new program node. | ||||
| func NewProgram(pos int, line int) *Program { | ||||
| 	return &Program{ | ||||
| 		NodeType: NodeProgram, | ||||
| 		Loc:      Loc{pos, line}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *Program) String() string { | ||||
| 	return fmt.Sprintf("Program{Pos: %d}", node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *Program) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitProgram(node) | ||||
| } | ||||
|  | ||||
| // AddStatement adds given statement to program. | ||||
| func (node *Program) AddStatement(statement Node) { | ||||
| 	node.Body = append(node.Body, statement) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Mustache Statement | ||||
| // | ||||
|  | ||||
| // MustacheStatement represents a mustache node. | ||||
| type MustacheStatement struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Unescaped  bool | ||||
| 	Expression *Expression | ||||
|  | ||||
| 	// whitespace management | ||||
| 	Strip *Strip | ||||
| } | ||||
|  | ||||
| // NewMustacheStatement instanciates a new mustache node. | ||||
| func NewMustacheStatement(pos int, line int, unescaped bool) *MustacheStatement { | ||||
| 	return &MustacheStatement{ | ||||
| 		NodeType:  NodeMustache, | ||||
| 		Loc:       Loc{pos, line}, | ||||
| 		Unescaped: unescaped, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *MustacheStatement) String() string { | ||||
| 	return fmt.Sprintf("Mustache{Pos: %d}", node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *MustacheStatement) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitMustache(node) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Block Statement | ||||
| // | ||||
|  | ||||
| // BlockStatement represents a block node. | ||||
| type BlockStatement struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Expression *Expression | ||||
|  | ||||
| 	Program *Program | ||||
| 	Inverse *Program | ||||
|  | ||||
| 	// whitespace management | ||||
| 	OpenStrip    *Strip | ||||
| 	InverseStrip *Strip | ||||
| 	CloseStrip   *Strip | ||||
| } | ||||
|  | ||||
| // NewBlockStatement instanciates a new block node. | ||||
| func NewBlockStatement(pos int, line int) *BlockStatement { | ||||
| 	return &BlockStatement{ | ||||
| 		NodeType: NodeBlock, | ||||
| 		Loc:      Loc{pos, line}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *BlockStatement) String() string { | ||||
| 	return fmt.Sprintf("Block{Pos: %d}", node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *BlockStatement) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitBlock(node) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Partial Statement | ||||
| // | ||||
|  | ||||
| // PartialStatement represents a partial node. | ||||
| type PartialStatement struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Name   Node   // PathExpression | SubExpression | ||||
| 	Params []Node // [ Expression ... ] | ||||
| 	Hash   *Hash | ||||
|  | ||||
| 	// whitespace management | ||||
| 	Strip  *Strip | ||||
| 	Indent string | ||||
| } | ||||
|  | ||||
| // NewPartialStatement instanciates a new partial node. | ||||
| func NewPartialStatement(pos int, line int) *PartialStatement { | ||||
| 	return &PartialStatement{ | ||||
| 		NodeType: NodePartial, | ||||
| 		Loc:      Loc{pos, line}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *PartialStatement) String() string { | ||||
| 	return fmt.Sprintf("Partial{Name:%s, Pos:%d}", node.Name, node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *PartialStatement) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitPartial(node) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Content Statement | ||||
| // | ||||
|  | ||||
| // ContentStatement represents a content node. | ||||
| type ContentStatement struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Value    string | ||||
| 	Original string | ||||
|  | ||||
| 	// whitespace management | ||||
| 	RightStripped bool | ||||
| 	LeftStripped  bool | ||||
| } | ||||
|  | ||||
| // NewContentStatement instanciates a new content node. | ||||
| func NewContentStatement(pos int, line int, val string) *ContentStatement { | ||||
| 	return &ContentStatement{ | ||||
| 		NodeType: NodeContent, | ||||
| 		Loc:      Loc{pos, line}, | ||||
|  | ||||
| 		Value:    val, | ||||
| 		Original: val, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *ContentStatement) String() string { | ||||
| 	return fmt.Sprintf("Content{Value:'%s', Pos:%d}", node.Value, node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *ContentStatement) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitContent(node) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Comment Statement | ||||
| // | ||||
|  | ||||
| // CommentStatement represents a comment node. | ||||
| type CommentStatement struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Value string | ||||
|  | ||||
| 	// whitespace management | ||||
| 	Strip *Strip | ||||
| } | ||||
|  | ||||
| // NewCommentStatement instanciates a new comment node. | ||||
| func NewCommentStatement(pos int, line int, val string) *CommentStatement { | ||||
| 	return &CommentStatement{ | ||||
| 		NodeType: NodeComment, | ||||
| 		Loc:      Loc{pos, line}, | ||||
|  | ||||
| 		Value: val, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *CommentStatement) String() string { | ||||
| 	return fmt.Sprintf("Comment{Value:'%s', Pos:%d}", node.Value, node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *CommentStatement) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitComment(node) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Expression | ||||
| // | ||||
|  | ||||
| // Expression represents an expression node. | ||||
| type Expression struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Path   Node   // PathExpression | StringLiteral | BooleanLiteral | NumberLiteral | ||||
| 	Params []Node // [ Expression ... ] | ||||
| 	Hash   *Hash | ||||
| } | ||||
|  | ||||
| // NewExpression instanciates a new expression node. | ||||
| func NewExpression(pos int, line int) *Expression { | ||||
| 	return &Expression{ | ||||
| 		NodeType: NodeExpression, | ||||
| 		Loc:      Loc{pos, line}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *Expression) String() string { | ||||
| 	return fmt.Sprintf("Expr{Path:%s, Pos:%d}", node.Path, node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *Expression) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitExpression(node) | ||||
| } | ||||
|  | ||||
| // HelperName returns helper name, or an empty string if this expression can't be a helper. | ||||
| func (node *Expression) HelperName() string { | ||||
| 	path, ok := node.Path.(*PathExpression) | ||||
| 	if !ok { | ||||
| 		return "" | ||||
| 	} | ||||
|  | ||||
| 	if path.Data || (len(path.Parts) != 1) || (path.Depth > 0) || path.Scoped { | ||||
| 		return "" | ||||
| 	} | ||||
|  | ||||
| 	return path.Parts[0] | ||||
| } | ||||
|  | ||||
| // FieldPath returns path expression representing a field path, or nil if this is not a field path. | ||||
| func (node *Expression) FieldPath() *PathExpression { | ||||
| 	path, ok := node.Path.(*PathExpression) | ||||
| 	if !ok { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	return path | ||||
| } | ||||
|  | ||||
| // LiteralStr returns the string representation of literal value, with a boolean set to false if this is not a literal. | ||||
| func (node *Expression) LiteralStr() (string, bool) { | ||||
| 	return LiteralStr(node.Path) | ||||
| } | ||||
|  | ||||
| // Canonical returns the canonical form of expression node as a string. | ||||
| func (node *Expression) Canonical() string { | ||||
| 	if str, ok := HelperNameStr(node.Path); ok { | ||||
| 		return str | ||||
| 	} | ||||
|  | ||||
| 	return "" | ||||
| } | ||||
|  | ||||
| // HelperNameStr returns the string representation of a helper name, with a boolean set to false if this is not a valid helper name. | ||||
| // | ||||
| // helperName : path | dataName | STRING | NUMBER | BOOLEAN | UNDEFINED | NULL | ||||
| func HelperNameStr(node Node) (string, bool) { | ||||
| 	// PathExpression | ||||
| 	if str, ok := PathExpressionStr(node); ok { | ||||
| 		return str, ok | ||||
| 	} | ||||
|  | ||||
| 	// Literal | ||||
| 	if str, ok := LiteralStr(node); ok { | ||||
| 		return str, ok | ||||
| 	} | ||||
|  | ||||
| 	return "", false | ||||
| } | ||||
|  | ||||
| // PathExpressionStr returns the string representation of path expression value, with a boolean set to false if this is not a path expression. | ||||
| func PathExpressionStr(node Node) (string, bool) { | ||||
| 	if path, ok := node.(*PathExpression); ok { | ||||
| 		result := path.Original | ||||
|  | ||||
| 		// "[foo bar]"" => "foo bar" | ||||
| 		if (len(result) >= 2) && (result[0] == '[') && (result[len(result)-1] == ']') { | ||||
| 			result = result[1 : len(result)-1] | ||||
| 		} | ||||
|  | ||||
| 		return result, true | ||||
| 	} | ||||
|  | ||||
| 	return "", false | ||||
| } | ||||
|  | ||||
| // LiteralStr returns the string representation of literal value, with a boolean set to false if this is not a literal. | ||||
| func LiteralStr(node Node) (string, bool) { | ||||
| 	if lit, ok := node.(*StringLiteral); ok { | ||||
| 		return lit.Value, true | ||||
| 	} | ||||
|  | ||||
| 	if lit, ok := node.(*BooleanLiteral); ok { | ||||
| 		return lit.Canonical(), true | ||||
| 	} | ||||
|  | ||||
| 	if lit, ok := node.(*NumberLiteral); ok { | ||||
| 		return lit.Canonical(), true | ||||
| 	} | ||||
|  | ||||
| 	return "", false | ||||
| } | ||||
|  | ||||
| // | ||||
| // SubExpression | ||||
| // | ||||
|  | ||||
| // SubExpression represents a subexpression node. | ||||
| type SubExpression struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Expression *Expression | ||||
| } | ||||
|  | ||||
| // NewSubExpression instanciates a new subexpression node. | ||||
| func NewSubExpression(pos int, line int) *SubExpression { | ||||
| 	return &SubExpression{ | ||||
| 		NodeType: NodeSubExpression, | ||||
| 		Loc:      Loc{pos, line}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *SubExpression) String() string { | ||||
| 	return fmt.Sprintf("Sexp{Path:%s, Pos:%d}", node.Expression.Path, node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *SubExpression) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitSubExpression(node) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Path Expression | ||||
| // | ||||
|  | ||||
| // PathExpression represents a path expression node. | ||||
| type PathExpression struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Original string | ||||
| 	Depth    int | ||||
| 	Parts    []string | ||||
| 	Data     bool | ||||
| 	Scoped   bool | ||||
| } | ||||
|  | ||||
| // NewPathExpression instanciates a new path expression node. | ||||
| func NewPathExpression(pos int, line int, data bool) *PathExpression { | ||||
| 	result := &PathExpression{ | ||||
| 		NodeType: NodePath, | ||||
| 		Loc:      Loc{pos, line}, | ||||
|  | ||||
| 		Data: data, | ||||
| 	} | ||||
|  | ||||
| 	if data { | ||||
| 		result.Original = "@" | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *PathExpression) String() string { | ||||
| 	return fmt.Sprintf("Path{Original:'%s', Pos:%d}", node.Original, node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *PathExpression) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitPath(node) | ||||
| } | ||||
|  | ||||
| // Part adds path part. | ||||
| func (node *PathExpression) Part(part string) { | ||||
| 	node.Original += part | ||||
|  | ||||
| 	switch part { | ||||
| 	case "..": | ||||
| 		node.Depth++ | ||||
| 		node.Scoped = true | ||||
| 	case ".", "this": | ||||
| 		node.Scoped = true | ||||
| 	default: | ||||
| 		node.Parts = append(node.Parts, part) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Sep adds path separator. | ||||
| func (node *PathExpression) Sep(separator string) { | ||||
| 	node.Original += separator | ||||
| } | ||||
|  | ||||
| // IsDataRoot returns true if path expression is @root. | ||||
| func (node *PathExpression) IsDataRoot() bool { | ||||
| 	return node.Data && (node.Parts[0] == "root") | ||||
| } | ||||
|  | ||||
| // | ||||
| // String Literal | ||||
| // | ||||
|  | ||||
| // StringLiteral represents a string node. | ||||
| type StringLiteral struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Value string | ||||
| } | ||||
|  | ||||
| // NewStringLiteral instanciates a new string node. | ||||
| func NewStringLiteral(pos int, line int, val string) *StringLiteral { | ||||
| 	return &StringLiteral{ | ||||
| 		NodeType: NodeString, | ||||
| 		Loc:      Loc{pos, line}, | ||||
|  | ||||
| 		Value: val, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *StringLiteral) String() string { | ||||
| 	return fmt.Sprintf("String{Value:'%s', Pos:%d}", node.Value, node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *StringLiteral) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitString(node) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Boolean Literal | ||||
| // | ||||
|  | ||||
| // BooleanLiteral represents a boolean node. | ||||
| type BooleanLiteral struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Value    bool | ||||
| 	Original string | ||||
| } | ||||
|  | ||||
| // NewBooleanLiteral instanciates a new boolean node. | ||||
| func NewBooleanLiteral(pos int, line int, val bool, original string) *BooleanLiteral { | ||||
| 	return &BooleanLiteral{ | ||||
| 		NodeType: NodeBoolean, | ||||
| 		Loc:      Loc{pos, line}, | ||||
|  | ||||
| 		Value:    val, | ||||
| 		Original: original, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *BooleanLiteral) String() string { | ||||
| 	return fmt.Sprintf("Boolean{Value:%s, Pos:%d}", node.Canonical(), node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *BooleanLiteral) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitBoolean(node) | ||||
| } | ||||
|  | ||||
| // Canonical returns the canonical form of boolean node as a string (ie. "true" | "false"). | ||||
| func (node *BooleanLiteral) Canonical() string { | ||||
| 	if node.Value { | ||||
| 		return "true" | ||||
| 	} | ||||
|  | ||||
| 	return "false" | ||||
| } | ||||
|  | ||||
| // | ||||
| // Number Literal | ||||
| // | ||||
|  | ||||
| // NumberLiteral represents a number node. | ||||
| type NumberLiteral struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Value    float64 | ||||
| 	IsInt    bool | ||||
| 	Original string | ||||
| } | ||||
|  | ||||
| // NewNumberLiteral instanciates a new number node. | ||||
| func NewNumberLiteral(pos int, line int, val float64, isInt bool, original string) *NumberLiteral { | ||||
| 	return &NumberLiteral{ | ||||
| 		NodeType: NodeNumber, | ||||
| 		Loc:      Loc{pos, line}, | ||||
|  | ||||
| 		Value:    val, | ||||
| 		IsInt:    isInt, | ||||
| 		Original: original, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *NumberLiteral) String() string { | ||||
| 	return fmt.Sprintf("Number{Value:%s, Pos:%d}", node.Canonical(), node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *NumberLiteral) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitNumber(node) | ||||
| } | ||||
|  | ||||
| // Canonical returns the canonical form of number node as a string (eg: "12", "-1.51"). | ||||
| func (node *NumberLiteral) Canonical() string { | ||||
| 	prec := -1 | ||||
| 	if node.IsInt { | ||||
| 		prec = 0 | ||||
| 	} | ||||
| 	return strconv.FormatFloat(node.Value, 'f', prec, 64) | ||||
| } | ||||
|  | ||||
| // Number returns an integer or a float. | ||||
| func (node *NumberLiteral) Number() interface{} { | ||||
| 	if node.IsInt { | ||||
| 		return int(node.Value) | ||||
| 	} | ||||
|  | ||||
| 	return node.Value | ||||
| } | ||||
|  | ||||
| // | ||||
| // Hash | ||||
| // | ||||
|  | ||||
| // Hash represents a hash node. | ||||
| type Hash struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Pairs []*HashPair | ||||
| } | ||||
|  | ||||
| // NewHash instanciates a new hash node. | ||||
| func NewHash(pos int, line int) *Hash { | ||||
| 	return &Hash{ | ||||
| 		NodeType: NodeHash, | ||||
| 		Loc:      Loc{pos, line}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *Hash) String() string { | ||||
| 	result := fmt.Sprintf("Hash{[%d", node.Loc.Pos) | ||||
|  | ||||
| 	for i, p := range node.Pairs { | ||||
| 		if i > 0 { | ||||
| 			result += ", " | ||||
| 		} | ||||
| 		result += p.String() | ||||
| 	} | ||||
|  | ||||
| 	return result + fmt.Sprintf("], Pos:%d}", node.Loc.Pos) | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *Hash) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitHash(node) | ||||
| } | ||||
|  | ||||
| // | ||||
| // HashPair | ||||
| // | ||||
|  | ||||
| // HashPair represents a hash pair node. | ||||
| type HashPair struct { | ||||
| 	NodeType | ||||
| 	Loc | ||||
|  | ||||
| 	Key string | ||||
| 	Val Node // Expression | ||||
| } | ||||
|  | ||||
| // NewHashPair instanciates a new hash pair node. | ||||
| func NewHashPair(pos int, line int) *HashPair { | ||||
| 	return &HashPair{ | ||||
| 		NodeType: NodeHashPair, | ||||
| 		Loc:      Loc{pos, line}, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // String returns a string representation of receiver that can be used for debugging. | ||||
| func (node *HashPair) String() string { | ||||
| 	return node.Key + "=" + node.Val.String() | ||||
| } | ||||
|  | ||||
| // Accept is the receiver entry point for visitors. | ||||
| func (node *HashPair) Accept(visitor Visitor) interface{} { | ||||
| 	return visitor.VisitHashPair(node) | ||||
| } | ||||
							
								
								
									
										279
									
								
								vendor/github.com/aymerick/raymond/ast/print.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										279
									
								
								vendor/github.com/aymerick/raymond/ast/print.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,279 @@ | ||||
| package ast | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| // printVisitor implements the Visitor interface to print a AST. | ||||
| type printVisitor struct { | ||||
| 	buf   string | ||||
| 	depth int | ||||
|  | ||||
| 	original bool | ||||
| 	inBlock  bool | ||||
| } | ||||
|  | ||||
| func newPrintVisitor() *printVisitor { | ||||
| 	return &printVisitor{} | ||||
| } | ||||
|  | ||||
| // Print returns a string representation of given AST, that can be used for debugging purpose. | ||||
| func Print(node Node) string { | ||||
| 	visitor := newPrintVisitor() | ||||
| 	node.Accept(visitor) | ||||
| 	return visitor.output() | ||||
| } | ||||
|  | ||||
| func (v *printVisitor) output() string { | ||||
| 	return v.buf | ||||
| } | ||||
|  | ||||
| func (v *printVisitor) indent() { | ||||
| 	for i := 0; i < v.depth; { | ||||
| 		v.buf += "  " | ||||
| 		i++ | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (v *printVisitor) str(val string) { | ||||
| 	v.buf += val | ||||
| } | ||||
|  | ||||
| func (v *printVisitor) nl() { | ||||
| 	v.str("\n") | ||||
| } | ||||
|  | ||||
| func (v *printVisitor) line(val string) { | ||||
| 	v.indent() | ||||
| 	v.str(val) | ||||
| 	v.nl() | ||||
| } | ||||
|  | ||||
| // | ||||
| // Visitor interface | ||||
| // | ||||
|  | ||||
| // Statements | ||||
|  | ||||
| // VisitProgram implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitProgram(node *Program) interface{} { | ||||
| 	if len(node.BlockParams) > 0 { | ||||
| 		v.line("BLOCK PARAMS: [ " + strings.Join(node.BlockParams, " ") + " ]") | ||||
| 	} | ||||
|  | ||||
| 	for _, n := range node.Body { | ||||
| 		n.Accept(v) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitMustache implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitMustache(node *MustacheStatement) interface{} { | ||||
| 	v.indent() | ||||
| 	v.str("{{ ") | ||||
|  | ||||
| 	node.Expression.Accept(v) | ||||
|  | ||||
| 	v.str(" }}") | ||||
| 	v.nl() | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitBlock implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitBlock(node *BlockStatement) interface{} { | ||||
| 	v.inBlock = true | ||||
|  | ||||
| 	v.line("BLOCK:") | ||||
| 	v.depth++ | ||||
|  | ||||
| 	node.Expression.Accept(v) | ||||
|  | ||||
| 	if node.Program != nil { | ||||
| 		v.line("PROGRAM:") | ||||
| 		v.depth++ | ||||
| 		node.Program.Accept(v) | ||||
| 		v.depth-- | ||||
| 	} | ||||
|  | ||||
| 	if node.Inverse != nil { | ||||
| 		// if node.Program != nil { | ||||
| 		// 	v.depth++ | ||||
| 		// } | ||||
|  | ||||
| 		v.line("{{^}}") | ||||
| 		v.depth++ | ||||
| 		node.Inverse.Accept(v) | ||||
| 		v.depth-- | ||||
|  | ||||
| 		// if node.Program != nil { | ||||
| 		// 	v.depth-- | ||||
| 		// } | ||||
| 	} | ||||
|  | ||||
| 	v.inBlock = false | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitPartial implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitPartial(node *PartialStatement) interface{} { | ||||
| 	v.indent() | ||||
| 	v.str("{{> PARTIAL:") | ||||
|  | ||||
| 	v.original = true | ||||
| 	node.Name.Accept(v) | ||||
| 	v.original = false | ||||
|  | ||||
| 	if len(node.Params) > 0 { | ||||
| 		v.str(" ") | ||||
| 		node.Params[0].Accept(v) | ||||
| 	} | ||||
|  | ||||
| 	// hash | ||||
| 	if node.Hash != nil { | ||||
| 		v.str(" ") | ||||
| 		node.Hash.Accept(v) | ||||
| 	} | ||||
|  | ||||
| 	v.str(" }}") | ||||
| 	v.nl() | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitContent implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitContent(node *ContentStatement) interface{} { | ||||
| 	v.line("CONTENT[ '" + node.Value + "' ]") | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitComment implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitComment(node *CommentStatement) interface{} { | ||||
| 	v.line("{{! '" + node.Value + "' }}") | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // Expressions | ||||
|  | ||||
| // VisitExpression implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitExpression(node *Expression) interface{} { | ||||
| 	if v.inBlock { | ||||
| 		v.indent() | ||||
| 	} | ||||
|  | ||||
| 	// path | ||||
| 	node.Path.Accept(v) | ||||
|  | ||||
| 	// params | ||||
| 	v.str(" [") | ||||
| 	for i, n := range node.Params { | ||||
| 		if i > 0 { | ||||
| 			v.str(", ") | ||||
| 		} | ||||
| 		n.Accept(v) | ||||
| 	} | ||||
| 	v.str("]") | ||||
|  | ||||
| 	// hash | ||||
| 	if node.Hash != nil { | ||||
| 		v.str(" ") | ||||
| 		node.Hash.Accept(v) | ||||
| 	} | ||||
|  | ||||
| 	if v.inBlock { | ||||
| 		v.nl() | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitSubExpression implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitSubExpression(node *SubExpression) interface{} { | ||||
| 	node.Expression.Accept(v) | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitPath implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitPath(node *PathExpression) interface{} { | ||||
| 	if v.original { | ||||
| 		v.str(node.Original) | ||||
| 	} else { | ||||
| 		path := strings.Join(node.Parts, "/") | ||||
|  | ||||
| 		result := "" | ||||
| 		if node.Data { | ||||
| 			result += "@" | ||||
| 		} | ||||
|  | ||||
| 		v.str(result + "PATH:" + path) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // Literals | ||||
|  | ||||
| // VisitString implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitString(node *StringLiteral) interface{} { | ||||
| 	if v.original { | ||||
| 		v.str(node.Value) | ||||
| 	} else { | ||||
| 		v.str("\"" + node.Value + "\"") | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitBoolean implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitBoolean(node *BooleanLiteral) interface{} { | ||||
| 	if v.original { | ||||
| 		v.str(node.Original) | ||||
| 	} else { | ||||
| 		v.str(fmt.Sprintf("BOOLEAN{%s}", node.Canonical())) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitNumber implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitNumber(node *NumberLiteral) interface{} { | ||||
| 	if v.original { | ||||
| 		v.str(node.Original) | ||||
| 	} else { | ||||
| 		v.str(fmt.Sprintf("NUMBER{%s}", node.Canonical())) | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // Miscellaneous | ||||
|  | ||||
| // VisitHash implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitHash(node *Hash) interface{} { | ||||
| 	v.str("HASH{") | ||||
|  | ||||
| 	for i, p := range node.Pairs { | ||||
| 		if i > 0 { | ||||
| 			v.str(", ") | ||||
| 		} | ||||
| 		p.Accept(v) | ||||
| 	} | ||||
|  | ||||
| 	v.str("}") | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // VisitHashPair implements corresponding Visitor interface method | ||||
| func (v *printVisitor) VisitHashPair(node *HashPair) interface{} { | ||||
| 	v.str(node.Key + "=") | ||||
| 	node.Val.Accept(v) | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
							
								
								
									
										95
									
								
								vendor/github.com/aymerick/raymond/data_frame.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										95
									
								
								vendor/github.com/aymerick/raymond/data_frame.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,95 @@ | ||||
| package raymond | ||||
|  | ||||
| import "reflect" | ||||
|  | ||||
| // DataFrame represents a private data frame. | ||||
| // | ||||
| // Cf. private variables documentation at: http://handlebarsjs.com/block_helpers.html | ||||
| type DataFrame struct { | ||||
| 	parent *DataFrame | ||||
| 	data   map[string]interface{} | ||||
| } | ||||
|  | ||||
| // NewDataFrame instanciates a new private data frame. | ||||
| func NewDataFrame() *DataFrame { | ||||
| 	return &DataFrame{ | ||||
| 		data: make(map[string]interface{}), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Copy instanciates a new private data frame with receiver as parent. | ||||
| func (p *DataFrame) Copy() *DataFrame { | ||||
| 	result := NewDataFrame() | ||||
|  | ||||
| 	for k, v := range p.data { | ||||
| 		result.data[k] = v | ||||
| 	} | ||||
|  | ||||
| 	result.parent = p | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // newIterDataFrame instanciates a new private data frame with receiver as parent and with iteration data set (@index, @key, @first, @last) | ||||
| func (p *DataFrame) newIterDataFrame(length int, i int, key interface{}) *DataFrame { | ||||
| 	result := p.Copy() | ||||
|  | ||||
| 	result.Set("index", i) | ||||
| 	result.Set("key", key) | ||||
| 	result.Set("first", i == 0) | ||||
| 	result.Set("last", i == length-1) | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Set sets a data value. | ||||
| func (p *DataFrame) Set(key string, val interface{}) { | ||||
| 	p.data[key] = val | ||||
| } | ||||
|  | ||||
| // Get gets a data value. | ||||
| func (p *DataFrame) Get(key string) interface{} { | ||||
| 	return p.find([]string{key}) | ||||
| } | ||||
|  | ||||
| // find gets a deep data value | ||||
| // | ||||
| // @todo This is NOT consistent with the way we resolve data in template (cf. `evalDataPathExpression()`) ! FIX THAT ! | ||||
| func (p *DataFrame) find(parts []string) interface{} { | ||||
| 	data := p.data | ||||
|  | ||||
| 	for i, part := range parts { | ||||
| 		val := data[part] | ||||
| 		if val == nil { | ||||
| 			return nil | ||||
| 		} | ||||
|  | ||||
| 		if i == len(parts)-1 { | ||||
| 			// found | ||||
| 			return val | ||||
| 		} | ||||
|  | ||||
| 		valValue := reflect.ValueOf(val) | ||||
| 		if valValue.Kind() != reflect.Map { | ||||
| 			// not found | ||||
| 			return nil | ||||
| 		} | ||||
|  | ||||
| 		// continue | ||||
| 		data = mapStringInterface(valValue) | ||||
| 	} | ||||
|  | ||||
| 	// not found | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // mapStringInterface converts any `map` to `map[string]interface{}` | ||||
| func mapStringInterface(value reflect.Value) map[string]interface{} { | ||||
| 	result := make(map[string]interface{}) | ||||
|  | ||||
| 	for _, key := range value.MapKeys() { | ||||
| 		result[strValue(key)] = value.MapIndex(key).Interface() | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
							
								
								
									
										65
									
								
								vendor/github.com/aymerick/raymond/escape.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								vendor/github.com/aymerick/raymond/escape.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,65 @@ | ||||
| package raymond | ||||
|  | ||||
| import ( | ||||
| 	"bytes" | ||||
| 	"strings" | ||||
| ) | ||||
|  | ||||
| // | ||||
| // That whole file is borrowed from https://github.com/golang/go/tree/master/src/html/escape.go | ||||
| // | ||||
| // With changes: | ||||
| //    ' => ' | ||||
| //    " => " | ||||
| // | ||||
| // To stay in sync with JS implementation, and make mustache tests pass. | ||||
| // | ||||
|  | ||||
| type writer interface { | ||||
| 	WriteString(string) (int, error) | ||||
| } | ||||
|  | ||||
| const escapedChars = `&'<>"` | ||||
|  | ||||
| func escape(w writer, s string) error { | ||||
| 	i := strings.IndexAny(s, escapedChars) | ||||
| 	for i != -1 { | ||||
| 		if _, err := w.WriteString(s[:i]); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		var esc string | ||||
| 		switch s[i] { | ||||
| 		case '&': | ||||
| 			esc = "&" | ||||
| 		case '\'': | ||||
| 			esc = "'" | ||||
| 		case '<': | ||||
| 			esc = "<" | ||||
| 		case '>': | ||||
| 			esc = ">" | ||||
| 		case '"': | ||||
| 			esc = """ | ||||
| 		default: | ||||
| 			panic("unrecognized escape character") | ||||
| 		} | ||||
| 		s = s[i+1:] | ||||
| 		if _, err := w.WriteString(esc); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 		i = strings.IndexAny(s, escapedChars) | ||||
| 	} | ||||
| 	_, err := w.WriteString(s) | ||||
| 	return err | ||||
| } | ||||
|  | ||||
| // Escape escapes special HTML characters. | ||||
| // | ||||
| // It can be used by helpers that return a SafeString and that need to escape some content by themselves. | ||||
| func Escape(s string) string { | ||||
| 	if strings.IndexAny(s, escapedChars) == -1 { | ||||
| 		return s | ||||
| 	} | ||||
| 	var buf bytes.Buffer | ||||
| 	escape(&buf, s) | ||||
| 	return buf.String() | ||||
| } | ||||
							
								
								
									
										1005
									
								
								vendor/github.com/aymerick/raymond/eval.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1005
									
								
								vendor/github.com/aymerick/raymond/eval.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										382
									
								
								vendor/github.com/aymerick/raymond/helper.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										382
									
								
								vendor/github.com/aymerick/raymond/helper.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,382 @@ | ||||
| package raymond | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"log" | ||||
| 	"reflect" | ||||
| 	"sync" | ||||
| ) | ||||
|  | ||||
| // Options represents the options argument provided to helpers and context functions. | ||||
| type Options struct { | ||||
| 	// evaluation visitor | ||||
| 	eval *evalVisitor | ||||
|  | ||||
| 	// params | ||||
| 	params []interface{} | ||||
| 	hash   map[string]interface{} | ||||
| } | ||||
|  | ||||
| // helpers stores all globally registered helpers | ||||
| var helpers = make(map[string]reflect.Value) | ||||
|  | ||||
| // protects global helpers | ||||
| var helpersMutex sync.RWMutex | ||||
|  | ||||
| func init() { | ||||
| 	// register builtin helpers | ||||
| 	RegisterHelper("if", ifHelper) | ||||
| 	RegisterHelper("unless", unlessHelper) | ||||
| 	RegisterHelper("with", withHelper) | ||||
| 	RegisterHelper("each", eachHelper) | ||||
| 	RegisterHelper("log", logHelper) | ||||
| 	RegisterHelper("lookup", lookupHelper) | ||||
| 	RegisterHelper("equal", equalHelper) | ||||
| } | ||||
|  | ||||
| // RegisterHelper registers a global helper. That helper will be available to all templates. | ||||
| func RegisterHelper(name string, helper interface{}) { | ||||
| 	helpersMutex.Lock() | ||||
| 	defer helpersMutex.Unlock() | ||||
|  | ||||
| 	if helpers[name] != zero { | ||||
| 		panic(fmt.Errorf("Helper already registered: %s", name)) | ||||
| 	} | ||||
|  | ||||
| 	val := reflect.ValueOf(helper) | ||||
| 	ensureValidHelper(name, val) | ||||
|  | ||||
| 	helpers[name] = val | ||||
| } | ||||
|  | ||||
| // RegisterHelpers registers several global helpers. Those helpers will be available to all templates. | ||||
| func RegisterHelpers(helpers map[string]interface{}) { | ||||
| 	for name, helper := range helpers { | ||||
| 		RegisterHelper(name, helper) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // ensureValidHelper panics if given helper is not valid | ||||
| func ensureValidHelper(name string, funcValue reflect.Value) { | ||||
| 	if funcValue.Kind() != reflect.Func { | ||||
| 		panic(fmt.Errorf("Helper must be a function: %s", name)) | ||||
| 	} | ||||
|  | ||||
| 	funcType := funcValue.Type() | ||||
|  | ||||
| 	if funcType.NumOut() != 1 { | ||||
| 		panic(fmt.Errorf("Helper function must return a string or a SafeString: %s", name)) | ||||
| 	} | ||||
|  | ||||
| 	// @todo Check if first returned value is a string, SafeString or interface{} ? | ||||
| } | ||||
|  | ||||
| // findHelper finds a globally registered helper | ||||
| func findHelper(name string) reflect.Value { | ||||
| 	helpersMutex.RLock() | ||||
| 	defer helpersMutex.RUnlock() | ||||
|  | ||||
| 	return helpers[name] | ||||
| } | ||||
|  | ||||
| // newOptions instanciates a new Options | ||||
| func newOptions(eval *evalVisitor, params []interface{}, hash map[string]interface{}) *Options { | ||||
| 	return &Options{ | ||||
| 		eval:   eval, | ||||
| 		params: params, | ||||
| 		hash:   hash, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // newEmptyOptions instanciates a new empty Options | ||||
| func newEmptyOptions(eval *evalVisitor) *Options { | ||||
| 	return &Options{ | ||||
| 		eval: eval, | ||||
| 		hash: make(map[string]interface{}), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // | ||||
| // Context Values | ||||
| // | ||||
|  | ||||
| // Value returns field value from current context. | ||||
| func (options *Options) Value(name string) interface{} { | ||||
| 	value := options.eval.evalField(options.eval.curCtx(), name, false) | ||||
| 	if !value.IsValid() { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	return value.Interface() | ||||
| } | ||||
|  | ||||
| // ValueStr returns string representation of field value from current context. | ||||
| func (options *Options) ValueStr(name string) string { | ||||
| 	return Str(options.Value(name)) | ||||
| } | ||||
|  | ||||
| // Ctx returns current evaluation context. | ||||
| func (options *Options) Ctx() interface{} { | ||||
| 	return options.eval.curCtx().Interface() | ||||
| } | ||||
|  | ||||
| // | ||||
| // Hash Arguments | ||||
| // | ||||
|  | ||||
| // HashProp returns hash property. | ||||
| func (options *Options) HashProp(name string) interface{} { | ||||
| 	return options.hash[name] | ||||
| } | ||||
|  | ||||
| // HashStr returns string representation of hash property. | ||||
| func (options *Options) HashStr(name string) string { | ||||
| 	return Str(options.hash[name]) | ||||
| } | ||||
|  | ||||
| // Hash returns entire hash. | ||||
| func (options *Options) Hash() map[string]interface{} { | ||||
| 	return options.hash | ||||
| } | ||||
|  | ||||
| // | ||||
| // Parameters | ||||
| // | ||||
|  | ||||
| // Param returns parameter at given position. | ||||
| func (options *Options) Param(pos int) interface{} { | ||||
| 	if len(options.params) > pos { | ||||
| 		return options.params[pos] | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // ParamStr returns string representation of parameter at given position. | ||||
| func (options *Options) ParamStr(pos int) string { | ||||
| 	return Str(options.Param(pos)) | ||||
| } | ||||
|  | ||||
| // Params returns all parameters. | ||||
| func (options *Options) Params() []interface{} { | ||||
| 	return options.params | ||||
| } | ||||
|  | ||||
| // | ||||
| // Private data | ||||
| // | ||||
|  | ||||
| // Data returns private data value. | ||||
| func (options *Options) Data(name string) interface{} { | ||||
| 	return options.eval.dataFrame.Get(name) | ||||
| } | ||||
|  | ||||
| // DataStr returns string representation of private data value. | ||||
| func (options *Options) DataStr(name string) string { | ||||
| 	return Str(options.eval.dataFrame.Get(name)) | ||||
| } | ||||
|  | ||||
| // DataFrame returns current private data frame. | ||||
| func (options *Options) DataFrame() *DataFrame { | ||||
| 	return options.eval.dataFrame | ||||
| } | ||||
|  | ||||
| // NewDataFrame instanciates a new data frame that is a copy of current evaluation data frame. | ||||
| // | ||||
| // Parent of returned data frame is set to current evaluation data frame. | ||||
| func (options *Options) NewDataFrame() *DataFrame { | ||||
| 	return options.eval.dataFrame.Copy() | ||||
| } | ||||
|  | ||||
| // newIterDataFrame instanciates a new data frame and set iteration specific vars | ||||
| func (options *Options) newIterDataFrame(length int, i int, key interface{}) *DataFrame { | ||||
| 	return options.eval.dataFrame.newIterDataFrame(length, i, key) | ||||
| } | ||||
|  | ||||
| // | ||||
| // Evaluation | ||||
| // | ||||
|  | ||||
| // evalBlock evaluates block with given context, private data and iteration key | ||||
| func (options *Options) evalBlock(ctx interface{}, data *DataFrame, key interface{}) string { | ||||
| 	result := "" | ||||
|  | ||||
| 	if block := options.eval.curBlock(); (block != nil) && (block.Program != nil) { | ||||
| 		result = options.eval.evalProgram(block.Program, ctx, data, key) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Fn evaluates block with current evaluation context. | ||||
| func (options *Options) Fn() string { | ||||
| 	return options.evalBlock(nil, nil, nil) | ||||
| } | ||||
|  | ||||
| // FnCtxData evaluates block with given context and private data frame. | ||||
| func (options *Options) FnCtxData(ctx interface{}, data *DataFrame) string { | ||||
| 	return options.evalBlock(ctx, data, nil) | ||||
| } | ||||
|  | ||||
| // FnWith evaluates block with given context. | ||||
| func (options *Options) FnWith(ctx interface{}) string { | ||||
| 	return options.evalBlock(ctx, nil, nil) | ||||
| } | ||||
|  | ||||
| // FnData evaluates block with given private data frame. | ||||
| func (options *Options) FnData(data *DataFrame) string { | ||||
| 	return options.evalBlock(nil, data, nil) | ||||
| } | ||||
|  | ||||
| // Inverse evaluates "else block". | ||||
| func (options *Options) Inverse() string { | ||||
| 	result := "" | ||||
| 	if block := options.eval.curBlock(); (block != nil) && (block.Inverse != nil) { | ||||
| 		result, _ = block.Inverse.Accept(options.eval).(string) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Eval evaluates field for given context. | ||||
| func (options *Options) Eval(ctx interface{}, field string) interface{} { | ||||
| 	if ctx == nil { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	if field == "" { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	val := options.eval.evalField(reflect.ValueOf(ctx), field, false) | ||||
| 	if !val.IsValid() { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	return val.Interface() | ||||
| } | ||||
|  | ||||
| // | ||||
| // Misc | ||||
| // | ||||
|  | ||||
| // isIncludableZero returns true if 'includeZero' option is set and first param is the number 0 | ||||
| func (options *Options) isIncludableZero() bool { | ||||
| 	b, ok := options.HashProp("includeZero").(bool) | ||||
| 	if ok && b { | ||||
| 		nb, ok := options.Param(0).(int) | ||||
| 		if ok && nb == 0 { | ||||
| 			return true | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // | ||||
| // Builtin helpers | ||||
| // | ||||
|  | ||||
| // #if block helper | ||||
| func ifHelper(conditional interface{}, options *Options) interface{} { | ||||
| 	if options.isIncludableZero() || IsTrue(conditional) { | ||||
| 		return options.Fn() | ||||
| 	} | ||||
|  | ||||
| 	return options.Inverse() | ||||
| } | ||||
|  | ||||
| // #unless block helper | ||||
| func unlessHelper(conditional interface{}, options *Options) interface{} { | ||||
| 	if options.isIncludableZero() || IsTrue(conditional) { | ||||
| 		return options.Inverse() | ||||
| 	} | ||||
|  | ||||
| 	return options.Fn() | ||||
| } | ||||
|  | ||||
| // #with block helper | ||||
| func withHelper(context interface{}, options *Options) interface{} { | ||||
| 	if IsTrue(context) { | ||||
| 		return options.FnWith(context) | ||||
| 	} | ||||
|  | ||||
| 	return options.Inverse() | ||||
| } | ||||
|  | ||||
| // #each block helper | ||||
| func eachHelper(context interface{}, options *Options) interface{} { | ||||
| 	if !IsTrue(context) { | ||||
| 		return options.Inverse() | ||||
| 	} | ||||
|  | ||||
| 	result := "" | ||||
|  | ||||
| 	val := reflect.ValueOf(context) | ||||
| 	switch val.Kind() { | ||||
| 	case reflect.Array, reflect.Slice: | ||||
| 		for i := 0; i < val.Len(); i++ { | ||||
| 			// computes private data | ||||
| 			data := options.newIterDataFrame(val.Len(), i, nil) | ||||
|  | ||||
| 			// evaluates block | ||||
| 			result += options.evalBlock(val.Index(i).Interface(), data, i) | ||||
| 		} | ||||
| 	case reflect.Map: | ||||
| 		// note: a go hash is not ordered, so result may vary, this behaviour differs from the JS implementation | ||||
| 		keys := val.MapKeys() | ||||
| 		for i := 0; i < len(keys); i++ { | ||||
| 			key := keys[i].Interface() | ||||
| 			ctx := val.MapIndex(keys[i]).Interface() | ||||
|  | ||||
| 			// computes private data | ||||
| 			data := options.newIterDataFrame(len(keys), i, key) | ||||
|  | ||||
| 			// evaluates block | ||||
| 			result += options.evalBlock(ctx, data, key) | ||||
| 		} | ||||
| 	case reflect.Struct: | ||||
| 		var exportedFields []int | ||||
|  | ||||
| 		// collect exported fields only | ||||
| 		for i := 0; i < val.NumField(); i++ { | ||||
| 			if tField := val.Type().Field(i); tField.PkgPath == "" { | ||||
| 				exportedFields = append(exportedFields, i) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		for i, fieldIndex := range exportedFields { | ||||
| 			key := val.Type().Field(fieldIndex).Name | ||||
| 			ctx := val.Field(fieldIndex).Interface() | ||||
|  | ||||
| 			// computes private data | ||||
| 			data := options.newIterDataFrame(len(exportedFields), i, key) | ||||
|  | ||||
| 			// evaluates block | ||||
| 			result += options.evalBlock(ctx, data, key) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // #log helper | ||||
| func logHelper(message string) interface{} { | ||||
| 	log.Print(message) | ||||
| 	return "" | ||||
| } | ||||
|  | ||||
| // #lookup helper | ||||
| func lookupHelper(obj interface{}, field string, options *Options) interface{} { | ||||
| 	return Str(options.Eval(obj, field)) | ||||
| } | ||||
|  | ||||
| // #equal helper | ||||
| // Ref: https://github.com/aymerick/raymond/issues/7 | ||||
| func equalHelper(a interface{}, b interface{}, options *Options) interface{} { | ||||
| 	if Str(a) == Str(b) { | ||||
| 		return options.Fn() | ||||
| 	} | ||||
|  | ||||
| 	return "" | ||||
| } | ||||
							
								
								
									
										639
									
								
								vendor/github.com/aymerick/raymond/lexer/lexer.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										639
									
								
								vendor/github.com/aymerick/raymond/lexer/lexer.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,639 @@ | ||||
| // Package lexer provides a handlebars tokenizer. | ||||
| package lexer | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"regexp" | ||||
| 	"strings" | ||||
| 	"unicode" | ||||
| 	"unicode/utf8" | ||||
| ) | ||||
|  | ||||
| // References: | ||||
| //   - https://github.com/wycats/handlebars.js/blob/master/src/handlebars.l | ||||
| //   - https://github.com/golang/go/blob/master/src/text/template/parse/lex.go | ||||
|  | ||||
| const ( | ||||
| 	// Mustaches detection | ||||
| 	escapedEscapedOpenMustache  = "\\\\{{" | ||||
| 	escapedOpenMustache         = "\\{{" | ||||
| 	openMustache                = "{{" | ||||
| 	closeMustache               = "}}" | ||||
| 	closeStripMustache          = "~}}" | ||||
| 	closeUnescapedStripMustache = "}~}}" | ||||
| ) | ||||
|  | ||||
| const eof = -1 | ||||
|  | ||||
| // lexFunc represents a function that returns the next lexer function. | ||||
| type lexFunc func(*Lexer) lexFunc | ||||
|  | ||||
| // Lexer is a lexical analyzer. | ||||
| type Lexer struct { | ||||
| 	input    string     // input to scan | ||||
| 	name     string     // lexer name, used for testing purpose | ||||
| 	tokens   chan Token // channel of scanned tokens | ||||
| 	nextFunc lexFunc    // the next function to execute | ||||
|  | ||||
| 	pos   int // current byte position in input string | ||||
| 	line  int // current line position in input string | ||||
| 	width int // size of last rune scanned from input string | ||||
| 	start int // start position of the token we are scanning | ||||
|  | ||||
| 	// the shameful contextual properties needed because `nextFunc` is not enough | ||||
| 	closeComment *regexp.Regexp // regexp to scan close of current comment | ||||
| 	rawBlock     bool           // are we parsing a raw block content ? | ||||
| } | ||||
|  | ||||
| var ( | ||||
| 	lookheadChars        = `[\s` + regexp.QuoteMeta("=~}/)|") + `]` | ||||
| 	literalLookheadChars = `[\s` + regexp.QuoteMeta("~})") + `]` | ||||
|  | ||||
| 	// characters not allowed in an identifier | ||||
| 	unallowedIDChars = " \n\t!\"#%&'()*+,./;<=>@[\\]^`{|}~" | ||||
|  | ||||
| 	// regular expressions | ||||
| 	rID                  = regexp.MustCompile(`^[^` + regexp.QuoteMeta(unallowedIDChars) + `]+`) | ||||
| 	rDotID               = regexp.MustCompile(`^\.` + lookheadChars) | ||||
| 	rTrue                = regexp.MustCompile(`^true` + literalLookheadChars) | ||||
| 	rFalse               = regexp.MustCompile(`^false` + literalLookheadChars) | ||||
| 	rOpenRaw             = regexp.MustCompile(`^\{\{\{\{`) | ||||
| 	rCloseRaw            = regexp.MustCompile(`^\}\}\}\}`) | ||||
| 	rOpenEndRaw          = regexp.MustCompile(`^\{\{\{\{/`) | ||||
| 	rOpenEndRawLookAhead = regexp.MustCompile(`\{\{\{\{/`) | ||||
| 	rOpenUnescaped       = regexp.MustCompile(`^\{\{~?\{`) | ||||
| 	rCloseUnescaped      = regexp.MustCompile(`^\}~?\}\}`) | ||||
| 	rOpenBlock           = regexp.MustCompile(`^\{\{~?#`) | ||||
| 	rOpenEndBlock        = regexp.MustCompile(`^\{\{~?/`) | ||||
| 	rOpenPartial         = regexp.MustCompile(`^\{\{~?>`) | ||||
| 	// {{^}} or {{else}} | ||||
| 	rInverse          = regexp.MustCompile(`^(\{\{~?\^\s*~?\}\}|\{\{~?\s*else\s*~?\}\})`) | ||||
| 	rOpenInverse      = regexp.MustCompile(`^\{\{~?\^`) | ||||
| 	rOpenInverseChain = regexp.MustCompile(`^\{\{~?\s*else`) | ||||
| 	// {{ or {{& | ||||
| 	rOpen            = regexp.MustCompile(`^\{\{~?&?`) | ||||
| 	rClose           = regexp.MustCompile(`^~?\}\}`) | ||||
| 	rOpenBlockParams = regexp.MustCompile(`^as\s+\|`) | ||||
| 	// {{!--  ... --}} | ||||
| 	rOpenCommentDash  = regexp.MustCompile(`^\{\{~?!--\s*`) | ||||
| 	rCloseCommentDash = regexp.MustCompile(`^\s*--~?\}\}`) | ||||
| 	// {{! ... }} | ||||
| 	rOpenComment  = regexp.MustCompile(`^\{\{~?!\s*`) | ||||
| 	rCloseComment = regexp.MustCompile(`^\s*~?\}\}`) | ||||
| ) | ||||
|  | ||||
| // Scan scans given input. | ||||
| // | ||||
| // Tokens can then be fetched sequentially thanks to NextToken() function on returned lexer. | ||||
| func Scan(input string) *Lexer { | ||||
| 	return scanWithName(input, "") | ||||
| } | ||||
|  | ||||
| // scanWithName scans given input, with a name used for testing | ||||
| // | ||||
| // Tokens can then be fetched sequentially thanks to NextToken() function on returned lexer. | ||||
| func scanWithName(input string, name string) *Lexer { | ||||
| 	result := &Lexer{ | ||||
| 		input:  input, | ||||
| 		name:   name, | ||||
| 		tokens: make(chan Token), | ||||
| 		line:   1, | ||||
| 	} | ||||
|  | ||||
| 	go result.run() | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Collect scans and collect all tokens. | ||||
| // | ||||
| // This should be used for debugging purpose only. You should use Scan() and lexer.NextToken() functions instead. | ||||
| func Collect(input string) []Token { | ||||
| 	var result []Token | ||||
|  | ||||
| 	l := Scan(input) | ||||
| 	for { | ||||
| 		token := l.NextToken() | ||||
| 		result = append(result, token) | ||||
|  | ||||
| 		if token.Kind == TokenEOF || token.Kind == TokenError { | ||||
| 			break | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // NextToken returns the next scanned token. | ||||
| func (l *Lexer) NextToken() Token { | ||||
| 	result := <-l.tokens | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // run starts lexical analysis | ||||
| func (l *Lexer) run() { | ||||
| 	for l.nextFunc = lexContent; l.nextFunc != nil; { | ||||
| 		l.nextFunc = l.nextFunc(l) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // next returns next character from input, or eof of there is nothing left to scan | ||||
| func (l *Lexer) next() rune { | ||||
| 	if l.pos >= len(l.input) { | ||||
| 		l.width = 0 | ||||
| 		return eof | ||||
| 	} | ||||
|  | ||||
| 	r, w := utf8.DecodeRuneInString(l.input[l.pos:]) | ||||
| 	l.width = w | ||||
| 	l.pos += l.width | ||||
|  | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| func (l *Lexer) produce(kind TokenKind, val string) { | ||||
| 	l.tokens <- Token{kind, val, l.start, l.line} | ||||
|  | ||||
| 	// scanning a new token | ||||
| 	l.start = l.pos | ||||
|  | ||||
| 	// update line number | ||||
| 	l.line += strings.Count(val, "\n") | ||||
| } | ||||
|  | ||||
| // emit emits a new scanned token | ||||
| func (l *Lexer) emit(kind TokenKind) { | ||||
| 	l.produce(kind, l.input[l.start:l.pos]) | ||||
| } | ||||
|  | ||||
| // emitContent emits scanned content | ||||
| func (l *Lexer) emitContent() { | ||||
| 	if l.pos > l.start { | ||||
| 		l.emit(TokenContent) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // emitString emits a scanned string | ||||
| func (l *Lexer) emitString(delimiter rune) { | ||||
| 	str := l.input[l.start:l.pos] | ||||
|  | ||||
| 	// replace escaped delimiters | ||||
| 	str = strings.Replace(str, "\\"+string(delimiter), string(delimiter), -1) | ||||
|  | ||||
| 	l.produce(TokenString, str) | ||||
| } | ||||
|  | ||||
| // peek returns but does not consume the next character in the input | ||||
| func (l *Lexer) peek() rune { | ||||
| 	r := l.next() | ||||
| 	l.backup() | ||||
| 	return r | ||||
| } | ||||
|  | ||||
| // backup steps back one character | ||||
| // | ||||
| // WARNING: Can only be called once per call of next | ||||
| func (l *Lexer) backup() { | ||||
| 	l.pos -= l.width | ||||
| } | ||||
|  | ||||
| // ignoreskips all characters that have been scanned up to current position | ||||
| func (l *Lexer) ignore() { | ||||
| 	l.start = l.pos | ||||
| } | ||||
|  | ||||
| // accept scans the next character if it is included in given string | ||||
| func (l *Lexer) accept(valid string) bool { | ||||
| 	if strings.IndexRune(valid, l.next()) >= 0 { | ||||
| 		return true | ||||
| 	} | ||||
|  | ||||
| 	l.backup() | ||||
|  | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // acceptRun scans all following characters that are part of given string | ||||
| func (l *Lexer) acceptRun(valid string) { | ||||
| 	for strings.IndexRune(valid, l.next()) >= 0 { | ||||
| 	} | ||||
|  | ||||
| 	l.backup() | ||||
| } | ||||
|  | ||||
| // errorf emits an error token | ||||
| func (l *Lexer) errorf(format string, args ...interface{}) lexFunc { | ||||
| 	l.tokens <- Token{TokenError, fmt.Sprintf(format, args...), l.start, l.line} | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // isString returns true if content at current scanning position starts with given string | ||||
| func (l *Lexer) isString(str string) bool { | ||||
| 	return strings.HasPrefix(l.input[l.pos:], str) | ||||
| } | ||||
|  | ||||
| // findRegexp returns the first string from current scanning position that matches given regular expression | ||||
| func (l *Lexer) findRegexp(r *regexp.Regexp) string { | ||||
| 	return r.FindString(l.input[l.pos:]) | ||||
| } | ||||
|  | ||||
| // indexRegexp returns the index of the first string from current scanning position that matches given regular expression | ||||
| // | ||||
| // It returns -1 if not found | ||||
| func (l *Lexer) indexRegexp(r *regexp.Regexp) int { | ||||
| 	loc := r.FindStringIndex(l.input[l.pos:]) | ||||
| 	if loc == nil { | ||||
| 		return -1 | ||||
| 	} | ||||
| 	return loc[0] | ||||
| } | ||||
|  | ||||
| // lexContent scans content (ie: not between mustaches) | ||||
| func lexContent(l *Lexer) lexFunc { | ||||
| 	var next lexFunc | ||||
|  | ||||
| 	if l.rawBlock { | ||||
| 		if i := l.indexRegexp(rOpenEndRawLookAhead); i != -1 { | ||||
| 			// {{{{/ | ||||
| 			l.rawBlock = false | ||||
| 			l.pos += i | ||||
|  | ||||
| 			next = lexOpenMustache | ||||
| 		} else { | ||||
| 			return l.errorf("Unclosed raw block") | ||||
| 		} | ||||
| 	} else if l.isString(escapedEscapedOpenMustache) { | ||||
| 		// \\{{ | ||||
|  | ||||
| 		// emit content with only one escaped escape | ||||
| 		l.next() | ||||
| 		l.emitContent() | ||||
|  | ||||
| 		// ignore second escaped escape | ||||
| 		l.next() | ||||
| 		l.ignore() | ||||
|  | ||||
| 		next = lexContent | ||||
| 	} else if l.isString(escapedOpenMustache) { | ||||
| 		// \{{ | ||||
| 		next = lexEscapedOpenMustache | ||||
| 	} else if str := l.findRegexp(rOpenCommentDash); str != "" { | ||||
| 		// {{!-- | ||||
| 		l.closeComment = rCloseCommentDash | ||||
|  | ||||
| 		next = lexComment | ||||
| 	} else if str := l.findRegexp(rOpenComment); str != "" { | ||||
| 		// {{! | ||||
| 		l.closeComment = rCloseComment | ||||
|  | ||||
| 		next = lexComment | ||||
| 	} else if l.isString(openMustache) { | ||||
| 		// {{ | ||||
| 		next = lexOpenMustache | ||||
| 	} | ||||
|  | ||||
| 	if next != nil { | ||||
| 		// emit scanned content | ||||
| 		l.emitContent() | ||||
|  | ||||
| 		// scan next token | ||||
| 		return next | ||||
| 	} | ||||
|  | ||||
| 	// scan next rune | ||||
| 	if l.next() == eof { | ||||
| 		// emit scanned content | ||||
| 		l.emitContent() | ||||
|  | ||||
| 		// this is over | ||||
| 		l.emit(TokenEOF) | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	// continue content scanning | ||||
| 	return lexContent | ||||
| } | ||||
|  | ||||
| // lexEscapedOpenMustache scans \{{ | ||||
| func lexEscapedOpenMustache(l *Lexer) lexFunc { | ||||
| 	// ignore escape character | ||||
| 	l.next() | ||||
| 	l.ignore() | ||||
|  | ||||
| 	// scan mustaches | ||||
| 	for l.peek() == '{' { | ||||
| 		l.next() | ||||
| 	} | ||||
|  | ||||
| 	return lexContent | ||||
| } | ||||
|  | ||||
| // lexOpenMustache scans {{ | ||||
| func lexOpenMustache(l *Lexer) lexFunc { | ||||
| 	var str string | ||||
| 	var tok TokenKind | ||||
|  | ||||
| 	nextFunc := lexExpression | ||||
|  | ||||
| 	if str = l.findRegexp(rOpenEndRaw); str != "" { | ||||
| 		tok = TokenOpenEndRawBlock | ||||
| 	} else if str = l.findRegexp(rOpenRaw); str != "" { | ||||
| 		tok = TokenOpenRawBlock | ||||
| 		l.rawBlock = true | ||||
| 	} else if str = l.findRegexp(rOpenUnescaped); str != "" { | ||||
| 		tok = TokenOpenUnescaped | ||||
| 	} else if str = l.findRegexp(rOpenBlock); str != "" { | ||||
| 		tok = TokenOpenBlock | ||||
| 	} else if str = l.findRegexp(rOpenEndBlock); str != "" { | ||||
| 		tok = TokenOpenEndBlock | ||||
| 	} else if str = l.findRegexp(rOpenPartial); str != "" { | ||||
| 		tok = TokenOpenPartial | ||||
| 	} else if str = l.findRegexp(rInverse); str != "" { | ||||
| 		tok = TokenInverse | ||||
| 		nextFunc = lexContent | ||||
| 	} else if str = l.findRegexp(rOpenInverse); str != "" { | ||||
| 		tok = TokenOpenInverse | ||||
| 	} else if str = l.findRegexp(rOpenInverseChain); str != "" { | ||||
| 		tok = TokenOpenInverseChain | ||||
| 	} else if str = l.findRegexp(rOpen); str != "" { | ||||
| 		tok = TokenOpen | ||||
| 	} else { | ||||
| 		// this is rotten | ||||
| 		panic("Current pos MUST be an opening mustache") | ||||
| 	} | ||||
|  | ||||
| 	l.pos += len(str) | ||||
| 	l.emit(tok) | ||||
|  | ||||
| 	return nextFunc | ||||
| } | ||||
|  | ||||
| // lexCloseMustache scans }} or ~}} | ||||
| func lexCloseMustache(l *Lexer) lexFunc { | ||||
| 	var str string | ||||
| 	var tok TokenKind | ||||
|  | ||||
| 	if str = l.findRegexp(rCloseRaw); str != "" { | ||||
| 		// }}}} | ||||
| 		tok = TokenCloseRawBlock | ||||
| 	} else if str = l.findRegexp(rCloseUnescaped); str != "" { | ||||
| 		// }}} | ||||
| 		tok = TokenCloseUnescaped | ||||
| 	} else if str = l.findRegexp(rClose); str != "" { | ||||
| 		// }} | ||||
| 		tok = TokenClose | ||||
| 	} else { | ||||
| 		// this is rotten | ||||
| 		panic("Current pos MUST be a closing mustache") | ||||
| 	} | ||||
|  | ||||
| 	l.pos += len(str) | ||||
| 	l.emit(tok) | ||||
|  | ||||
| 	return lexContent | ||||
| } | ||||
|  | ||||
| // lexExpression scans inside mustaches | ||||
| func lexExpression(l *Lexer) lexFunc { | ||||
| 	// search close mustache delimiter | ||||
| 	if l.isString(closeMustache) || l.isString(closeStripMustache) || l.isString(closeUnescapedStripMustache) { | ||||
| 		return lexCloseMustache | ||||
| 	} | ||||
|  | ||||
| 	// search some patterns before advancing scanning position | ||||
|  | ||||
| 	// "as |" | ||||
| 	if str := l.findRegexp(rOpenBlockParams); str != "" { | ||||
| 		l.pos += len(str) | ||||
| 		l.emit(TokenOpenBlockParams) | ||||
| 		return lexExpression | ||||
| 	} | ||||
|  | ||||
| 	// .. | ||||
| 	if l.isString("..") { | ||||
| 		l.pos += len("..") | ||||
| 		l.emit(TokenID) | ||||
| 		return lexExpression | ||||
| 	} | ||||
|  | ||||
| 	// . | ||||
| 	if str := l.findRegexp(rDotID); str != "" { | ||||
| 		l.pos += len(".") | ||||
| 		l.emit(TokenID) | ||||
| 		return lexExpression | ||||
| 	} | ||||
|  | ||||
| 	// true | ||||
| 	if str := l.findRegexp(rTrue); str != "" { | ||||
| 		l.pos += len("true") | ||||
| 		l.emit(TokenBoolean) | ||||
| 		return lexExpression | ||||
| 	} | ||||
|  | ||||
| 	// false | ||||
| 	if str := l.findRegexp(rFalse); str != "" { | ||||
| 		l.pos += len("false") | ||||
| 		l.emit(TokenBoolean) | ||||
| 		return lexExpression | ||||
| 	} | ||||
|  | ||||
| 	// let's scan next character | ||||
| 	switch r := l.next(); { | ||||
| 	case r == eof: | ||||
| 		return l.errorf("Unclosed expression") | ||||
| 	case isIgnorable(r): | ||||
| 		return lexIgnorable | ||||
| 	case r == '(': | ||||
| 		l.emit(TokenOpenSexpr) | ||||
| 	case r == ')': | ||||
| 		l.emit(TokenCloseSexpr) | ||||
| 	case r == '=': | ||||
| 		l.emit(TokenEquals) | ||||
| 	case r == '@': | ||||
| 		l.emit(TokenData) | ||||
| 	case r == '"' || r == '\'': | ||||
| 		l.backup() | ||||
| 		return lexString | ||||
| 	case r == '/' || r == '.': | ||||
| 		l.emit(TokenSep) | ||||
| 	case r == '|': | ||||
| 		l.emit(TokenCloseBlockParams) | ||||
| 	case r == '+' || r == '-' || (r >= '0' && r <= '9'): | ||||
| 		l.backup() | ||||
| 		return lexNumber | ||||
| 	case r == '[': | ||||
| 		return lexPathLiteral | ||||
| 	case strings.IndexRune(unallowedIDChars, r) < 0: | ||||
| 		l.backup() | ||||
| 		return lexIdentifier | ||||
| 	default: | ||||
| 		return l.errorf("Unexpected character in expression: '%c'", r) | ||||
| 	} | ||||
|  | ||||
| 	return lexExpression | ||||
| } | ||||
|  | ||||
| // lexComment scans {{!-- or {{! | ||||
| func lexComment(l *Lexer) lexFunc { | ||||
| 	if str := l.findRegexp(l.closeComment); str != "" { | ||||
| 		l.pos += len(str) | ||||
| 		l.emit(TokenComment) | ||||
|  | ||||
| 		return lexContent | ||||
| 	} | ||||
|  | ||||
| 	if r := l.next(); r == eof { | ||||
| 		return l.errorf("Unclosed comment") | ||||
| 	} | ||||
|  | ||||
| 	return lexComment | ||||
| } | ||||
|  | ||||
| // lexIgnorable scans all following ignorable characters | ||||
| func lexIgnorable(l *Lexer) lexFunc { | ||||
| 	for isIgnorable(l.peek()) { | ||||
| 		l.next() | ||||
| 	} | ||||
| 	l.ignore() | ||||
|  | ||||
| 	return lexExpression | ||||
| } | ||||
|  | ||||
| // lexString scans a string | ||||
| func lexString(l *Lexer) lexFunc { | ||||
| 	// get string delimiter | ||||
| 	delim := l.next() | ||||
| 	var prev rune | ||||
|  | ||||
| 	// ignore delimiter | ||||
| 	l.ignore() | ||||
|  | ||||
| 	for { | ||||
| 		r := l.next() | ||||
| 		if r == eof || r == '\n' { | ||||
| 			return l.errorf("Unterminated string") | ||||
| 		} | ||||
|  | ||||
| 		if (r == delim) && (prev != '\\') { | ||||
| 			break | ||||
| 		} | ||||
|  | ||||
| 		prev = r | ||||
| 	} | ||||
|  | ||||
| 	// remove end delimiter | ||||
| 	l.backup() | ||||
|  | ||||
| 	// emit string | ||||
| 	l.emitString(delim) | ||||
|  | ||||
| 	// skip end delimiter | ||||
| 	l.next() | ||||
| 	l.ignore() | ||||
|  | ||||
| 	return lexExpression | ||||
| } | ||||
|  | ||||
| // lexNumber scans a number: decimal, octal, hex, float, or imaginary. This | ||||
| // isn't a perfect number scanner - for instance it accepts "." and "0x0.2" | ||||
| // and "089" - but when it's wrong the input is invalid and the parser (via | ||||
| // strconv) will notice. | ||||
| // | ||||
| // NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/parse/lex.go | ||||
| func lexNumber(l *Lexer) lexFunc { | ||||
| 	if !l.scanNumber() { | ||||
| 		return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) | ||||
| 	} | ||||
| 	if sign := l.peek(); sign == '+' || sign == '-' { | ||||
| 		// Complex: 1+2i. No spaces, must end in 'i'. | ||||
| 		if !l.scanNumber() || l.input[l.pos-1] != 'i' { | ||||
| 			return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) | ||||
| 		} | ||||
| 		l.emit(TokenNumber) | ||||
| 	} else { | ||||
| 		l.emit(TokenNumber) | ||||
| 	} | ||||
| 	return lexExpression | ||||
| } | ||||
|  | ||||
| // scanNumber scans a number | ||||
| // | ||||
| // NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/parse/lex.go | ||||
| func (l *Lexer) scanNumber() bool { | ||||
| 	// Optional leading sign. | ||||
| 	l.accept("+-") | ||||
|  | ||||
| 	// Is it hex? | ||||
| 	digits := "0123456789" | ||||
|  | ||||
| 	if l.accept("0") && l.accept("xX") { | ||||
| 		digits = "0123456789abcdefABCDEF" | ||||
| 	} | ||||
|  | ||||
| 	l.acceptRun(digits) | ||||
|  | ||||
| 	if l.accept(".") { | ||||
| 		l.acceptRun(digits) | ||||
| 	} | ||||
|  | ||||
| 	if l.accept("eE") { | ||||
| 		l.accept("+-") | ||||
| 		l.acceptRun("0123456789") | ||||
| 	} | ||||
|  | ||||
| 	// Is it imaginary? | ||||
| 	l.accept("i") | ||||
|  | ||||
| 	// Next thing mustn't be alphanumeric. | ||||
| 	if isAlphaNumeric(l.peek()) { | ||||
| 		l.next() | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	return true | ||||
| } | ||||
|  | ||||
| // lexIdentifier scans an ID | ||||
| func lexIdentifier(l *Lexer) lexFunc { | ||||
| 	str := l.findRegexp(rID) | ||||
| 	if len(str) == 0 { | ||||
| 		// this is rotten | ||||
| 		panic("Identifier expected") | ||||
| 	} | ||||
|  | ||||
| 	l.pos += len(str) | ||||
| 	l.emit(TokenID) | ||||
|  | ||||
| 	return lexExpression | ||||
| } | ||||
|  | ||||
| // lexPathLiteral scans an [ID] | ||||
| func lexPathLiteral(l *Lexer) lexFunc { | ||||
| 	for { | ||||
| 		r := l.next() | ||||
| 		if r == eof || r == '\n' { | ||||
| 			return l.errorf("Unterminated path literal") | ||||
| 		} | ||||
|  | ||||
| 		if r == ']' { | ||||
| 			break | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	l.emit(TokenID) | ||||
|  | ||||
| 	return lexExpression | ||||
| } | ||||
|  | ||||
| // isIgnorable returns true if given character is ignorable (ie. whitespace of line feed) | ||||
| func isIgnorable(r rune) bool { | ||||
| 	return r == ' ' || r == '\t' || r == '\n' | ||||
| } | ||||
|  | ||||
| // isAlphaNumeric reports whether r is an alphabetic, digit, or underscore. | ||||
| // | ||||
| // NOTE borrowed from https://github.com/golang/go/tree/master/src/text/template/parse/lex.go | ||||
| func isAlphaNumeric(r rune) bool { | ||||
| 	return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r) | ||||
| } | ||||
							
								
								
									
										183
									
								
								vendor/github.com/aymerick/raymond/lexer/token.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										183
									
								
								vendor/github.com/aymerick/raymond/lexer/token.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,183 @@ | ||||
| package lexer | ||||
|  | ||||
| import "fmt" | ||||
|  | ||||
| const ( | ||||
| 	// TokenError represents an error | ||||
| 	TokenError TokenKind = iota | ||||
|  | ||||
| 	// TokenEOF represents an End Of File | ||||
| 	TokenEOF | ||||
|  | ||||
| 	// | ||||
| 	// Mustache delimiters | ||||
| 	// | ||||
|  | ||||
| 	// TokenOpen is the OPEN token | ||||
| 	TokenOpen | ||||
|  | ||||
| 	// TokenClose is the CLOSE token | ||||
| 	TokenClose | ||||
|  | ||||
| 	// TokenOpenRawBlock is the OPEN_RAW_BLOCK token | ||||
| 	TokenOpenRawBlock | ||||
|  | ||||
| 	// TokenCloseRawBlock is the CLOSE_RAW_BLOCK token | ||||
| 	TokenCloseRawBlock | ||||
|  | ||||
| 	// TokenOpenEndRawBlock is the END_RAW_BLOCK token | ||||
| 	TokenOpenEndRawBlock | ||||
|  | ||||
| 	// TokenOpenUnescaped is the OPEN_UNESCAPED token | ||||
| 	TokenOpenUnescaped | ||||
|  | ||||
| 	// TokenCloseUnescaped is the CLOSE_UNESCAPED token | ||||
| 	TokenCloseUnescaped | ||||
|  | ||||
| 	// TokenOpenBlock is the OPEN_BLOCK token | ||||
| 	TokenOpenBlock | ||||
|  | ||||
| 	// TokenOpenEndBlock is the OPEN_ENDBLOCK token | ||||
| 	TokenOpenEndBlock | ||||
|  | ||||
| 	// TokenInverse is the INVERSE token | ||||
| 	TokenInverse | ||||
|  | ||||
| 	// TokenOpenInverse is the OPEN_INVERSE token | ||||
| 	TokenOpenInverse | ||||
|  | ||||
| 	// TokenOpenInverseChain is the OPEN_INVERSE_CHAIN token | ||||
| 	TokenOpenInverseChain | ||||
|  | ||||
| 	// TokenOpenPartial is the OPEN_PARTIAL token | ||||
| 	TokenOpenPartial | ||||
|  | ||||
| 	// TokenComment is the COMMENT token | ||||
| 	TokenComment | ||||
|  | ||||
| 	// | ||||
| 	// Inside mustaches | ||||
| 	// | ||||
|  | ||||
| 	// TokenOpenSexpr is the OPEN_SEXPR token | ||||
| 	TokenOpenSexpr | ||||
|  | ||||
| 	// TokenCloseSexpr is the CLOSE_SEXPR token | ||||
| 	TokenCloseSexpr | ||||
|  | ||||
| 	// TokenEquals is the EQUALS token | ||||
| 	TokenEquals | ||||
|  | ||||
| 	// TokenData is the DATA token | ||||
| 	TokenData | ||||
|  | ||||
| 	// TokenSep is the SEP token | ||||
| 	TokenSep | ||||
|  | ||||
| 	// TokenOpenBlockParams is the OPEN_BLOCK_PARAMS token | ||||
| 	TokenOpenBlockParams | ||||
|  | ||||
| 	// TokenCloseBlockParams is the CLOSE_BLOCK_PARAMS token | ||||
| 	TokenCloseBlockParams | ||||
|  | ||||
| 	// | ||||
| 	// Tokens with content | ||||
| 	// | ||||
|  | ||||
| 	// TokenContent is the CONTENT token | ||||
| 	TokenContent | ||||
|  | ||||
| 	// TokenID is the ID token | ||||
| 	TokenID | ||||
|  | ||||
| 	// TokenString is the STRING token | ||||
| 	TokenString | ||||
|  | ||||
| 	// TokenNumber is the NUMBER token | ||||
| 	TokenNumber | ||||
|  | ||||
| 	// TokenBoolean is the BOOLEAN token | ||||
| 	TokenBoolean | ||||
| ) | ||||
|  | ||||
| const ( | ||||
| 	// Option to generate token position in its string representation | ||||
| 	dumpTokenPos = false | ||||
|  | ||||
| 	// Option to generate values for all token kinds for their string representations | ||||
| 	dumpAllTokensVal = true | ||||
| ) | ||||
|  | ||||
| // TokenKind represents a Token type. | ||||
| type TokenKind int | ||||
|  | ||||
| // Token represents a scanned token. | ||||
| type Token struct { | ||||
| 	Kind TokenKind // Token kind | ||||
| 	Val  string    // Token value | ||||
|  | ||||
| 	Pos  int // Byte position in input string | ||||
| 	Line int // Line number in input string | ||||
| } | ||||
|  | ||||
| // tokenName permits to display token name given token type | ||||
| var tokenName = map[TokenKind]string{ | ||||
| 	TokenError:            "Error", | ||||
| 	TokenEOF:              "EOF", | ||||
| 	TokenContent:          "Content", | ||||
| 	TokenComment:          "Comment", | ||||
| 	TokenOpen:             "Open", | ||||
| 	TokenClose:            "Close", | ||||
| 	TokenOpenUnescaped:    "OpenUnescaped", | ||||
| 	TokenCloseUnescaped:   "CloseUnescaped", | ||||
| 	TokenOpenBlock:        "OpenBlock", | ||||
| 	TokenOpenEndBlock:     "OpenEndBlock", | ||||
| 	TokenOpenRawBlock:     "OpenRawBlock", | ||||
| 	TokenCloseRawBlock:    "CloseRawBlock", | ||||
| 	TokenOpenEndRawBlock:  "OpenEndRawBlock", | ||||
| 	TokenOpenBlockParams:  "OpenBlockParams", | ||||
| 	TokenCloseBlockParams: "CloseBlockParams", | ||||
| 	TokenInverse:          "Inverse", | ||||
| 	TokenOpenInverse:      "OpenInverse", | ||||
| 	TokenOpenInverseChain: "OpenInverseChain", | ||||
| 	TokenOpenPartial:      "OpenPartial", | ||||
| 	TokenOpenSexpr:        "OpenSexpr", | ||||
| 	TokenCloseSexpr:       "CloseSexpr", | ||||
| 	TokenID:               "ID", | ||||
| 	TokenEquals:           "Equals", | ||||
| 	TokenString:           "String", | ||||
| 	TokenNumber:           "Number", | ||||
| 	TokenBoolean:          "Boolean", | ||||
| 	TokenData:             "Data", | ||||
| 	TokenSep:              "Sep", | ||||
| } | ||||
|  | ||||
| // String returns the token kind string representation for debugging. | ||||
| func (k TokenKind) String() string { | ||||
| 	s := tokenName[k] | ||||
| 	if s == "" { | ||||
| 		return fmt.Sprintf("Token-%d", int(k)) | ||||
| 	} | ||||
| 	return s | ||||
| } | ||||
|  | ||||
| // String returns the token string representation for debugging. | ||||
| func (t Token) String() string { | ||||
| 	result := "" | ||||
|  | ||||
| 	if dumpTokenPos { | ||||
| 		result += fmt.Sprintf("%d:", t.Pos) | ||||
| 	} | ||||
|  | ||||
| 	result += fmt.Sprintf("%s", t.Kind) | ||||
|  | ||||
| 	if (dumpAllTokensVal || (t.Kind >= TokenContent)) && len(t.Val) > 0 { | ||||
| 		if len(t.Val) > 100 { | ||||
| 			result += fmt.Sprintf("{%.20q...}", t.Val) | ||||
| 		} else { | ||||
| 			result += fmt.Sprintf("{%q}", t.Val) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
							
								
								
									
										846
									
								
								vendor/github.com/aymerick/raymond/parser/parser.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										846
									
								
								vendor/github.com/aymerick/raymond/parser/parser.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,846 @@ | ||||
| // Package parser provides a handlebars syntax analyser. It consumes the tokens provided by the lexer to build an AST. | ||||
| package parser | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"regexp" | ||||
| 	"runtime" | ||||
| 	"strconv" | ||||
|  | ||||
| 	"github.com/aymerick/raymond/ast" | ||||
| 	"github.com/aymerick/raymond/lexer" | ||||
| ) | ||||
|  | ||||
| // References: | ||||
| //   - https://github.com/wycats/handlebars.js/blob/master/src/handlebars.yy | ||||
| //   - https://github.com/golang/go/blob/master/src/text/template/parse/parse.go | ||||
|  | ||||
| // parser is a syntax analyzer. | ||||
| type parser struct { | ||||
| 	// Lexer | ||||
| 	lex *lexer.Lexer | ||||
|  | ||||
| 	// Root node | ||||
| 	root ast.Node | ||||
|  | ||||
| 	// Tokens parsed but not consumed yet | ||||
| 	tokens []*lexer.Token | ||||
|  | ||||
| 	// All tokens have been retreieved from lexer | ||||
| 	lexOver bool | ||||
| } | ||||
|  | ||||
| var ( | ||||
| 	rOpenComment  = regexp.MustCompile(`^\{\{~?!-?-?`) | ||||
| 	rCloseComment = regexp.MustCompile(`-?-?~?\}\}$`) | ||||
| 	rOpenAmp      = regexp.MustCompile(`^\{\{~?&`) | ||||
| ) | ||||
|  | ||||
| // new instanciates a new parser | ||||
| func new(input string) *parser { | ||||
| 	return &parser{ | ||||
| 		lex: lexer.Scan(input), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Parse analyzes given input and returns the AST root node. | ||||
| func Parse(input string) (result *ast.Program, err error) { | ||||
| 	// recover error | ||||
| 	defer errRecover(&err) | ||||
|  | ||||
| 	parser := new(input) | ||||
|  | ||||
| 	// parse | ||||
| 	result = parser.parseProgram() | ||||
|  | ||||
| 	// check last token | ||||
| 	token := parser.shift() | ||||
| 	if token.Kind != lexer.TokenEOF { | ||||
| 		// Parsing ended before EOF | ||||
| 		errToken(token, "Syntax error") | ||||
| 	} | ||||
|  | ||||
| 	// fix whitespaces | ||||
| 	processWhitespaces(result) | ||||
|  | ||||
| 	// named returned values | ||||
| 	return | ||||
| } | ||||
|  | ||||
| // errRecover recovers parsing panic | ||||
| func errRecover(errp *error) { | ||||
| 	e := recover() | ||||
| 	if e != nil { | ||||
| 		switch err := e.(type) { | ||||
| 		case runtime.Error: | ||||
| 			panic(e) | ||||
| 		case error: | ||||
| 			*errp = err | ||||
| 		default: | ||||
| 			panic(e) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // errPanic panics | ||||
| func errPanic(err error, line int) { | ||||
| 	panic(fmt.Errorf("Parse error on line %d:\n%s", line, err)) | ||||
| } | ||||
|  | ||||
| // errNode panics with given node infos | ||||
| func errNode(node ast.Node, msg string) { | ||||
| 	errPanic(fmt.Errorf("%s\nNode: %s", msg, node), node.Location().Line) | ||||
| } | ||||
|  | ||||
| // errNode panics with given Token infos | ||||
| func errToken(tok *lexer.Token, msg string) { | ||||
| 	errPanic(fmt.Errorf("%s\nToken: %s", msg, tok), tok.Line) | ||||
| } | ||||
|  | ||||
| // errNode panics because of an unexpected Token kind | ||||
| func errExpected(expect lexer.TokenKind, tok *lexer.Token) { | ||||
| 	errPanic(fmt.Errorf("Expecting %s, got: '%s'", expect, tok), tok.Line) | ||||
| } | ||||
|  | ||||
| // program : statement* | ||||
| func (p *parser) parseProgram() *ast.Program { | ||||
| 	result := ast.NewProgram(p.next().Pos, p.next().Line) | ||||
|  | ||||
| 	for p.isStatement() { | ||||
| 		result.AddStatement(p.parseStatement()) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // statement : mustache | block | rawBlock | partial | content | COMMENT | ||||
| func (p *parser) parseStatement() ast.Node { | ||||
| 	var result ast.Node | ||||
|  | ||||
| 	tok := p.next() | ||||
|  | ||||
| 	switch tok.Kind { | ||||
| 	case lexer.TokenOpen, lexer.TokenOpenUnescaped: | ||||
| 		// mustache | ||||
| 		result = p.parseMustache() | ||||
| 	case lexer.TokenOpenBlock: | ||||
| 		// block | ||||
| 		result = p.parseBlock() | ||||
| 	case lexer.TokenOpenInverse: | ||||
| 		// block | ||||
| 		result = p.parseInverse() | ||||
| 	case lexer.TokenOpenRawBlock: | ||||
| 		// rawBlock | ||||
| 		result = p.parseRawBlock() | ||||
| 	case lexer.TokenOpenPartial: | ||||
| 		// partial | ||||
| 		result = p.parsePartial() | ||||
| 	case lexer.TokenContent: | ||||
| 		// content | ||||
| 		result = p.parseContent() | ||||
| 	case lexer.TokenComment: | ||||
| 		// COMMENT | ||||
| 		result = p.parseComment() | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // isStatement returns true if next token starts a statement | ||||
| func (p *parser) isStatement() bool { | ||||
| 	if !p.have(1) { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	switch p.next().Kind { | ||||
| 	case lexer.TokenOpen, lexer.TokenOpenUnescaped, lexer.TokenOpenBlock, | ||||
| 		lexer.TokenOpenInverse, lexer.TokenOpenRawBlock, lexer.TokenOpenPartial, | ||||
| 		lexer.TokenContent, lexer.TokenComment: | ||||
| 		return true | ||||
| 	} | ||||
|  | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // content : CONTENT | ||||
| func (p *parser) parseContent() *ast.ContentStatement { | ||||
| 	// CONTENT | ||||
| 	tok := p.shift() | ||||
| 	if tok.Kind != lexer.TokenContent { | ||||
| 		// @todo This check can be removed if content is optional in a raw block | ||||
| 		errExpected(lexer.TokenContent, tok) | ||||
| 	} | ||||
|  | ||||
| 	return ast.NewContentStatement(tok.Pos, tok.Line, tok.Val) | ||||
| } | ||||
|  | ||||
| // COMMENT | ||||
| func (p *parser) parseComment() *ast.CommentStatement { | ||||
| 	// COMMENT | ||||
| 	tok := p.shift() | ||||
|  | ||||
| 	value := rOpenComment.ReplaceAllString(tok.Val, "") | ||||
| 	value = rCloseComment.ReplaceAllString(value, "") | ||||
|  | ||||
| 	result := ast.NewCommentStatement(tok.Pos, tok.Line, value) | ||||
| 	result.Strip = ast.NewStripForStr(tok.Val) | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // param* hash? | ||||
| func (p *parser) parseExpressionParamsHash() ([]ast.Node, *ast.Hash) { | ||||
| 	var params []ast.Node | ||||
| 	var hash *ast.Hash | ||||
|  | ||||
| 	// params* | ||||
| 	if p.isParam() { | ||||
| 		params = p.parseParams() | ||||
| 	} | ||||
|  | ||||
| 	// hash? | ||||
| 	if p.isHashSegment() { | ||||
| 		hash = p.parseHash() | ||||
| 	} | ||||
|  | ||||
| 	return params, hash | ||||
| } | ||||
|  | ||||
| // helperName param* hash? | ||||
| func (p *parser) parseExpression(tok *lexer.Token) *ast.Expression { | ||||
| 	result := ast.NewExpression(tok.Pos, tok.Line) | ||||
|  | ||||
| 	// helperName | ||||
| 	result.Path = p.parseHelperName() | ||||
|  | ||||
| 	// param* hash? | ||||
| 	result.Params, result.Hash = p.parseExpressionParamsHash() | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // rawBlock : openRawBlock content endRawBlock | ||||
| // openRawBlock : OPEN_RAW_BLOCK helperName param* hash? CLOSE_RAW_BLOCK | ||||
| // endRawBlock : OPEN_END_RAW_BLOCK helperName CLOSE_RAW_BLOCK | ||||
| func (p *parser) parseRawBlock() *ast.BlockStatement { | ||||
| 	// OPEN_RAW_BLOCK | ||||
| 	tok := p.shift() | ||||
|  | ||||
| 	result := ast.NewBlockStatement(tok.Pos, tok.Line) | ||||
|  | ||||
| 	// helperName param* hash? | ||||
| 	result.Expression = p.parseExpression(tok) | ||||
|  | ||||
| 	openName := result.Expression.Canonical() | ||||
|  | ||||
| 	// CLOSE_RAW_BLOCK | ||||
| 	tok = p.shift() | ||||
| 	if tok.Kind != lexer.TokenCloseRawBlock { | ||||
| 		errExpected(lexer.TokenCloseRawBlock, tok) | ||||
| 	} | ||||
|  | ||||
| 	// content | ||||
| 	// @todo Is content mandatory in a raw block ? | ||||
| 	content := p.parseContent() | ||||
|  | ||||
| 	program := ast.NewProgram(tok.Pos, tok.Line) | ||||
| 	program.AddStatement(content) | ||||
|  | ||||
| 	result.Program = program | ||||
|  | ||||
| 	// OPEN_END_RAW_BLOCK | ||||
| 	tok = p.shift() | ||||
| 	if tok.Kind != lexer.TokenOpenEndRawBlock { | ||||
| 		// should never happen as it is caught by lexer | ||||
| 		errExpected(lexer.TokenOpenEndRawBlock, tok) | ||||
| 	} | ||||
|  | ||||
| 	// helperName | ||||
| 	endID := p.parseHelperName() | ||||
|  | ||||
| 	closeName, ok := ast.HelperNameStr(endID) | ||||
| 	if !ok { | ||||
| 		errNode(endID, "Erroneous closing expression") | ||||
| 	} | ||||
|  | ||||
| 	if openName != closeName { | ||||
| 		errNode(endID, fmt.Sprintf("%s doesn't match %s", openName, closeName)) | ||||
| 	} | ||||
|  | ||||
| 	// CLOSE_RAW_BLOCK | ||||
| 	tok = p.shift() | ||||
| 	if tok.Kind != lexer.TokenCloseRawBlock { | ||||
| 		errExpected(lexer.TokenCloseRawBlock, tok) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // block : openBlock program inverseChain? closeBlock | ||||
| func (p *parser) parseBlock() *ast.BlockStatement { | ||||
| 	// openBlock | ||||
| 	result, blockParams := p.parseOpenBlock() | ||||
|  | ||||
| 	// program | ||||
| 	program := p.parseProgram() | ||||
| 	program.BlockParams = blockParams | ||||
| 	result.Program = program | ||||
|  | ||||
| 	// inverseChain? | ||||
| 	if p.isInverseChain() { | ||||
| 		result.Inverse = p.parseInverseChain() | ||||
| 	} | ||||
|  | ||||
| 	// closeBlock | ||||
| 	p.parseCloseBlock(result) | ||||
|  | ||||
| 	setBlockInverseStrip(result) | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // setBlockInverseStrip is called when parsing `block` (openBlock | openInverse) and `inverseChain` | ||||
| // | ||||
| // TODO: This was totally cargo culted ! CHECK THAT ! | ||||
| // | ||||
| // cf. prepareBlock() in: | ||||
| //   https://github.com/wycats/handlebars.js/blob/master/lib/handlebars/compiler/helper.js | ||||
| func setBlockInverseStrip(block *ast.BlockStatement) { | ||||
| 	if block.Inverse == nil { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if block.Inverse.Chained { | ||||
| 		b, _ := block.Inverse.Body[0].(*ast.BlockStatement) | ||||
| 		b.CloseStrip = block.CloseStrip | ||||
| 	} | ||||
|  | ||||
| 	block.InverseStrip = block.Inverse.Strip | ||||
| } | ||||
|  | ||||
| // block : openInverse program inverseAndProgram? closeBlock | ||||
| func (p *parser) parseInverse() *ast.BlockStatement { | ||||
| 	// openInverse | ||||
| 	result, blockParams := p.parseOpenBlock() | ||||
|  | ||||
| 	// program | ||||
| 	program := p.parseProgram() | ||||
|  | ||||
| 	program.BlockParams = blockParams | ||||
| 	result.Inverse = program | ||||
|  | ||||
| 	// inverseAndProgram? | ||||
| 	if p.isInverse() { | ||||
| 		result.Program = p.parseInverseAndProgram() | ||||
| 	} | ||||
|  | ||||
| 	// closeBlock | ||||
| 	p.parseCloseBlock(result) | ||||
|  | ||||
| 	setBlockInverseStrip(result) | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // helperName param* hash? blockParams? | ||||
| func (p *parser) parseOpenBlockExpression(tok *lexer.Token) (*ast.BlockStatement, []string) { | ||||
| 	var blockParams []string | ||||
|  | ||||
| 	result := ast.NewBlockStatement(tok.Pos, tok.Line) | ||||
|  | ||||
| 	// helperName param* hash? | ||||
| 	result.Expression = p.parseExpression(tok) | ||||
|  | ||||
| 	// blockParams? | ||||
| 	if p.isBlockParams() { | ||||
| 		blockParams = p.parseBlockParams() | ||||
| 	} | ||||
|  | ||||
| 	// named returned values | ||||
| 	return result, blockParams | ||||
| } | ||||
|  | ||||
| // inverseChain : openInverseChain program inverseChain? | ||||
| //              | inverseAndProgram | ||||
| func (p *parser) parseInverseChain() *ast.Program { | ||||
| 	if p.isInverse() { | ||||
| 		// inverseAndProgram | ||||
| 		return p.parseInverseAndProgram() | ||||
| 	} | ||||
|  | ||||
| 	result := ast.NewProgram(p.next().Pos, p.next().Line) | ||||
|  | ||||
| 	// openInverseChain | ||||
| 	block, blockParams := p.parseOpenBlock() | ||||
|  | ||||
| 	// program | ||||
| 	program := p.parseProgram() | ||||
|  | ||||
| 	program.BlockParams = blockParams | ||||
| 	block.Program = program | ||||
|  | ||||
| 	// inverseChain? | ||||
| 	if p.isInverseChain() { | ||||
| 		block.Inverse = p.parseInverseChain() | ||||
| 	} | ||||
|  | ||||
| 	setBlockInverseStrip(block) | ||||
|  | ||||
| 	result.Chained = true | ||||
| 	result.AddStatement(block) | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Returns true if current token starts an inverse chain | ||||
| func (p *parser) isInverseChain() bool { | ||||
| 	return p.isOpenInverseChain() || p.isInverse() | ||||
| } | ||||
|  | ||||
| // inverseAndProgram : INVERSE program | ||||
| func (p *parser) parseInverseAndProgram() *ast.Program { | ||||
| 	// INVERSE | ||||
| 	tok := p.shift() | ||||
|  | ||||
| 	// program | ||||
| 	result := p.parseProgram() | ||||
| 	result.Strip = ast.NewStripForStr(tok.Val) | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // openBlock : OPEN_BLOCK helperName param* hash? blockParams? CLOSE | ||||
| // openInverse : OPEN_INVERSE helperName param* hash? blockParams? CLOSE | ||||
| // openInverseChain: OPEN_INVERSE_CHAIN helperName param* hash? blockParams? CLOSE | ||||
| func (p *parser) parseOpenBlock() (*ast.BlockStatement, []string) { | ||||
| 	// OPEN_BLOCK | OPEN_INVERSE | OPEN_INVERSE_CHAIN | ||||
| 	tok := p.shift() | ||||
|  | ||||
| 	// helperName param* hash? blockParams? | ||||
| 	result, blockParams := p.parseOpenBlockExpression(tok) | ||||
|  | ||||
| 	// CLOSE | ||||
| 	tokClose := p.shift() | ||||
| 	if tokClose.Kind != lexer.TokenClose { | ||||
| 		errExpected(lexer.TokenClose, tokClose) | ||||
| 	} | ||||
|  | ||||
| 	result.OpenStrip = ast.NewStrip(tok.Val, tokClose.Val) | ||||
|  | ||||
| 	// named returned values | ||||
| 	return result, blockParams | ||||
| } | ||||
|  | ||||
| // closeBlock : OPEN_ENDBLOCK helperName CLOSE | ||||
| func (p *parser) parseCloseBlock(block *ast.BlockStatement) { | ||||
| 	// OPEN_ENDBLOCK | ||||
| 	tok := p.shift() | ||||
| 	if tok.Kind != lexer.TokenOpenEndBlock { | ||||
| 		errExpected(lexer.TokenOpenEndBlock, tok) | ||||
| 	} | ||||
|  | ||||
| 	// helperName | ||||
| 	endID := p.parseHelperName() | ||||
|  | ||||
| 	closeName, ok := ast.HelperNameStr(endID) | ||||
| 	if !ok { | ||||
| 		errNode(endID, "Erroneous closing expression") | ||||
| 	} | ||||
|  | ||||
| 	openName := block.Expression.Canonical() | ||||
| 	if openName != closeName { | ||||
| 		errNode(endID, fmt.Sprintf("%s doesn't match %s", openName, closeName)) | ||||
| 	} | ||||
|  | ||||
| 	// CLOSE | ||||
| 	tokClose := p.shift() | ||||
| 	if tokClose.Kind != lexer.TokenClose { | ||||
| 		errExpected(lexer.TokenClose, tokClose) | ||||
| 	} | ||||
|  | ||||
| 	block.CloseStrip = ast.NewStrip(tok.Val, tokClose.Val) | ||||
| } | ||||
|  | ||||
| // mustache : OPEN helperName param* hash? CLOSE | ||||
| //          | OPEN_UNESCAPED helperName param* hash? CLOSE_UNESCAPED | ||||
| func (p *parser) parseMustache() *ast.MustacheStatement { | ||||
| 	// OPEN | OPEN_UNESCAPED | ||||
| 	tok := p.shift() | ||||
|  | ||||
| 	closeToken := lexer.TokenClose | ||||
| 	if tok.Kind == lexer.TokenOpenUnescaped { | ||||
| 		closeToken = lexer.TokenCloseUnescaped | ||||
| 	} | ||||
|  | ||||
| 	unescaped := false | ||||
| 	if (tok.Kind == lexer.TokenOpenUnescaped) || (rOpenAmp.MatchString(tok.Val)) { | ||||
| 		unescaped = true | ||||
| 	} | ||||
|  | ||||
| 	result := ast.NewMustacheStatement(tok.Pos, tok.Line, unescaped) | ||||
|  | ||||
| 	// helperName param* hash? | ||||
| 	result.Expression = p.parseExpression(tok) | ||||
|  | ||||
| 	// CLOSE | CLOSE_UNESCAPED | ||||
| 	tokClose := p.shift() | ||||
| 	if tokClose.Kind != closeToken { | ||||
| 		errExpected(closeToken, tokClose) | ||||
| 	} | ||||
|  | ||||
| 	result.Strip = ast.NewStrip(tok.Val, tokClose.Val) | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // partial : OPEN_PARTIAL partialName param* hash? CLOSE | ||||
| func (p *parser) parsePartial() *ast.PartialStatement { | ||||
| 	// OPEN_PARTIAL | ||||
| 	tok := p.shift() | ||||
|  | ||||
| 	result := ast.NewPartialStatement(tok.Pos, tok.Line) | ||||
|  | ||||
| 	// partialName | ||||
| 	result.Name = p.parsePartialName() | ||||
|  | ||||
| 	// param* hash? | ||||
| 	result.Params, result.Hash = p.parseExpressionParamsHash() | ||||
|  | ||||
| 	// CLOSE | ||||
| 	tokClose := p.shift() | ||||
| 	if tokClose.Kind != lexer.TokenClose { | ||||
| 		errExpected(lexer.TokenClose, tokClose) | ||||
| 	} | ||||
|  | ||||
| 	result.Strip = ast.NewStrip(tok.Val, tokClose.Val) | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // helperName | sexpr | ||||
| func (p *parser) parseHelperNameOrSexpr() ast.Node { | ||||
| 	if p.isSexpr() { | ||||
| 		// sexpr | ||||
| 		return p.parseSexpr() | ||||
| 	} | ||||
|  | ||||
| 	// helperName | ||||
| 	return p.parseHelperName() | ||||
| } | ||||
|  | ||||
| // param : helperName | sexpr | ||||
| func (p *parser) parseParam() ast.Node { | ||||
| 	return p.parseHelperNameOrSexpr() | ||||
| } | ||||
|  | ||||
| // Returns true if next tokens represent a `param` | ||||
| func (p *parser) isParam() bool { | ||||
| 	return (p.isSexpr() || p.isHelperName()) && !p.isHashSegment() | ||||
| } | ||||
|  | ||||
| // param* | ||||
| func (p *parser) parseParams() []ast.Node { | ||||
| 	var result []ast.Node | ||||
|  | ||||
| 	for p.isParam() { | ||||
| 		result = append(result, p.parseParam()) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // sexpr : OPEN_SEXPR helperName param* hash? CLOSE_SEXPR | ||||
| func (p *parser) parseSexpr() *ast.SubExpression { | ||||
| 	// OPEN_SEXPR | ||||
| 	tok := p.shift() | ||||
|  | ||||
| 	result := ast.NewSubExpression(tok.Pos, tok.Line) | ||||
|  | ||||
| 	// helperName param* hash? | ||||
| 	result.Expression = p.parseExpression(tok) | ||||
|  | ||||
| 	// CLOSE_SEXPR | ||||
| 	tok = p.shift() | ||||
| 	if tok.Kind != lexer.TokenCloseSexpr { | ||||
| 		errExpected(lexer.TokenCloseSexpr, tok) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // hash : hashSegment+ | ||||
| func (p *parser) parseHash() *ast.Hash { | ||||
| 	var pairs []*ast.HashPair | ||||
|  | ||||
| 	for p.isHashSegment() { | ||||
| 		pairs = append(pairs, p.parseHashSegment()) | ||||
| 	} | ||||
|  | ||||
| 	firstLoc := pairs[0].Location() | ||||
|  | ||||
| 	result := ast.NewHash(firstLoc.Pos, firstLoc.Line) | ||||
| 	result.Pairs = pairs | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // returns true if next tokens represents a `hashSegment` | ||||
| func (p *parser) isHashSegment() bool { | ||||
| 	return p.have(2) && (p.next().Kind == lexer.TokenID) && (p.nextAt(1).Kind == lexer.TokenEquals) | ||||
| } | ||||
|  | ||||
| // hashSegment : ID EQUALS param | ||||
| func (p *parser) parseHashSegment() *ast.HashPair { | ||||
| 	// ID | ||||
| 	tok := p.shift() | ||||
|  | ||||
| 	// EQUALS | ||||
| 	p.shift() | ||||
|  | ||||
| 	// param | ||||
| 	param := p.parseParam() | ||||
|  | ||||
| 	result := ast.NewHashPair(tok.Pos, tok.Line) | ||||
| 	result.Key = tok.Val | ||||
| 	result.Val = param | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // blockParams : OPEN_BLOCK_PARAMS ID+ CLOSE_BLOCK_PARAMS | ||||
| func (p *parser) parseBlockParams() []string { | ||||
| 	var result []string | ||||
|  | ||||
| 	// OPEN_BLOCK_PARAMS | ||||
| 	tok := p.shift() | ||||
|  | ||||
| 	// ID+ | ||||
| 	for p.isID() { | ||||
| 		result = append(result, p.shift().Val) | ||||
| 	} | ||||
|  | ||||
| 	if len(result) == 0 { | ||||
| 		errExpected(lexer.TokenID, p.next()) | ||||
| 	} | ||||
|  | ||||
| 	// CLOSE_BLOCK_PARAMS | ||||
| 	tok = p.shift() | ||||
| 	if tok.Kind != lexer.TokenCloseBlockParams { | ||||
| 		errExpected(lexer.TokenCloseBlockParams, tok) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // helperName : path | dataName | STRING | NUMBER | BOOLEAN | UNDEFINED | NULL | ||||
| func (p *parser) parseHelperName() ast.Node { | ||||
| 	var result ast.Node | ||||
|  | ||||
| 	tok := p.next() | ||||
|  | ||||
| 	switch tok.Kind { | ||||
| 	case lexer.TokenBoolean: | ||||
| 		// BOOLEAN | ||||
| 		p.shift() | ||||
| 		result = ast.NewBooleanLiteral(tok.Pos, tok.Line, (tok.Val == "true"), tok.Val) | ||||
| 	case lexer.TokenNumber: | ||||
| 		// NUMBER | ||||
| 		p.shift() | ||||
|  | ||||
| 		val, isInt := parseNumber(tok) | ||||
| 		result = ast.NewNumberLiteral(tok.Pos, tok.Line, val, isInt, tok.Val) | ||||
| 	case lexer.TokenString: | ||||
| 		// STRING | ||||
| 		p.shift() | ||||
| 		result = ast.NewStringLiteral(tok.Pos, tok.Line, tok.Val) | ||||
| 	case lexer.TokenData: | ||||
| 		// dataName | ||||
| 		result = p.parseDataName() | ||||
| 	default: | ||||
| 		// path | ||||
| 		result = p.parsePath(false) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // parseNumber parses a number | ||||
| func parseNumber(tok *lexer.Token) (result float64, isInt bool) { | ||||
| 	var valInt int | ||||
| 	var err error | ||||
|  | ||||
| 	valInt, err = strconv.Atoi(tok.Val) | ||||
| 	if err == nil { | ||||
| 		isInt = true | ||||
|  | ||||
| 		result = float64(valInt) | ||||
| 	} else { | ||||
| 		isInt = false | ||||
|  | ||||
| 		result, err = strconv.ParseFloat(tok.Val, 64) | ||||
| 		if err != nil { | ||||
| 			errToken(tok, fmt.Sprintf("Failed to parse number: %s", tok.Val)) | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	// named returned values | ||||
| 	return | ||||
| } | ||||
|  | ||||
| // Returns true if next tokens represent a `helperName` | ||||
| func (p *parser) isHelperName() bool { | ||||
| 	switch p.next().Kind { | ||||
| 	case lexer.TokenBoolean, lexer.TokenNumber, lexer.TokenString, lexer.TokenData, lexer.TokenID: | ||||
| 		return true | ||||
| 	} | ||||
|  | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // partialName : helperName | sexpr | ||||
| func (p *parser) parsePartialName() ast.Node { | ||||
| 	return p.parseHelperNameOrSexpr() | ||||
| } | ||||
|  | ||||
| // dataName : DATA pathSegments | ||||
| func (p *parser) parseDataName() *ast.PathExpression { | ||||
| 	// DATA | ||||
| 	p.shift() | ||||
|  | ||||
| 	// pathSegments | ||||
| 	return p.parsePath(true) | ||||
| } | ||||
|  | ||||
| // path : pathSegments | ||||
| // pathSegments : pathSegments SEP ID | ||||
| //              | ID | ||||
| func (p *parser) parsePath(data bool) *ast.PathExpression { | ||||
| 	var tok *lexer.Token | ||||
|  | ||||
| 	// ID | ||||
| 	tok = p.shift() | ||||
| 	if tok.Kind != lexer.TokenID { | ||||
| 		errExpected(lexer.TokenID, tok) | ||||
| 	} | ||||
|  | ||||
| 	result := ast.NewPathExpression(tok.Pos, tok.Line, data) | ||||
| 	result.Part(tok.Val) | ||||
|  | ||||
| 	for p.isPathSep() { | ||||
| 		// SEP | ||||
| 		tok = p.shift() | ||||
| 		result.Sep(tok.Val) | ||||
|  | ||||
| 		// ID | ||||
| 		tok = p.shift() | ||||
| 		if tok.Kind != lexer.TokenID { | ||||
| 			errExpected(lexer.TokenID, tok) | ||||
| 		} | ||||
|  | ||||
| 		result.Part(tok.Val) | ||||
|  | ||||
| 		if len(result.Parts) > 0 { | ||||
| 			switch tok.Val { | ||||
| 			case "..", ".", "this": | ||||
| 				errToken(tok, "Invalid path: "+result.Original) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // Ensures there is token to parse at given index | ||||
| func (p *parser) ensure(index int) { | ||||
| 	if p.lexOver { | ||||
| 		// nothing more to grab | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	nb := index + 1 | ||||
|  | ||||
| 	for len(p.tokens) < nb { | ||||
| 		// fetch next token | ||||
| 		tok := p.lex.NextToken() | ||||
|  | ||||
| 		// queue it | ||||
| 		p.tokens = append(p.tokens, &tok) | ||||
|  | ||||
| 		if (tok.Kind == lexer.TokenEOF) || (tok.Kind == lexer.TokenError) { | ||||
| 			p.lexOver = true | ||||
| 			break | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // have returns true is there are a list given number of tokens to consume left | ||||
| func (p *parser) have(nb int) bool { | ||||
| 	p.ensure(nb - 1) | ||||
|  | ||||
| 	return len(p.tokens) >= nb | ||||
| } | ||||
|  | ||||
| // nextAt returns next token at given index, without consuming it | ||||
| func (p *parser) nextAt(index int) *lexer.Token { | ||||
| 	p.ensure(index) | ||||
|  | ||||
| 	return p.tokens[index] | ||||
| } | ||||
|  | ||||
| // next returns next token without consuming it | ||||
| func (p *parser) next() *lexer.Token { | ||||
| 	return p.nextAt(0) | ||||
| } | ||||
|  | ||||
| // shift returns next token and remove it from the tokens buffer | ||||
| // | ||||
| // Panics if next token is `TokenError` | ||||
| func (p *parser) shift() *lexer.Token { | ||||
| 	var result *lexer.Token | ||||
|  | ||||
| 	p.ensure(0) | ||||
|  | ||||
| 	result, p.tokens = p.tokens[0], p.tokens[1:] | ||||
|  | ||||
| 	// check error token | ||||
| 	if result.Kind == lexer.TokenError { | ||||
| 		errToken(result, "Lexer error") | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // isToken returns true if next token is of given type | ||||
| func (p *parser) isToken(kind lexer.TokenKind) bool { | ||||
| 	return p.have(1) && p.next().Kind == kind | ||||
| } | ||||
|  | ||||
| // isSexpr returns true if next token starts a sexpr | ||||
| func (p *parser) isSexpr() bool { | ||||
| 	return p.isToken(lexer.TokenOpenSexpr) | ||||
| } | ||||
|  | ||||
| // isPathSep returns true if next token is a path separator | ||||
| func (p *parser) isPathSep() bool { | ||||
| 	return p.isToken(lexer.TokenSep) | ||||
| } | ||||
|  | ||||
| // isID returns true if next token is an ID | ||||
| func (p *parser) isID() bool { | ||||
| 	return p.isToken(lexer.TokenID) | ||||
| } | ||||
|  | ||||
| // isBlockParams returns true if next token starts a block params | ||||
| func (p *parser) isBlockParams() bool { | ||||
| 	return p.isToken(lexer.TokenOpenBlockParams) | ||||
| } | ||||
|  | ||||
| // isInverse returns true if next token starts an INVERSE sequence | ||||
| func (p *parser) isInverse() bool { | ||||
| 	return p.isToken(lexer.TokenInverse) | ||||
| } | ||||
|  | ||||
| // isOpenInverseChain returns true if next token is OPEN_INVERSE_CHAIN | ||||
| func (p *parser) isOpenInverseChain() bool { | ||||
| 	return p.isToken(lexer.TokenOpenInverseChain) | ||||
| } | ||||
							
								
								
									
										360
									
								
								vendor/github.com/aymerick/raymond/parser/whitespace.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										360
									
								
								vendor/github.com/aymerick/raymond/parser/whitespace.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,360 @@ | ||||
| package parser | ||||
|  | ||||
| import ( | ||||
| 	"regexp" | ||||
|  | ||||
| 	"github.com/aymerick/raymond/ast" | ||||
| ) | ||||
|  | ||||
| // whitespaceVisitor walks through the AST to perform whitespace control | ||||
| // | ||||
| // The logic was shamelessly borrowed from: | ||||
| //   https://github.com/wycats/handlebars.js/blob/master/lib/handlebars/compiler/whitespace-control.js | ||||
| type whitespaceVisitor struct { | ||||
| 	isRootSeen bool | ||||
| } | ||||
|  | ||||
| var ( | ||||
| 	rTrimLeft         = regexp.MustCompile(`^[ \t]*\r?\n?`) | ||||
| 	rTrimLeftMultiple = regexp.MustCompile(`^\s+`) | ||||
|  | ||||
| 	rTrimRight         = regexp.MustCompile(`[ \t]+$`) | ||||
| 	rTrimRightMultiple = regexp.MustCompile(`\s+$`) | ||||
|  | ||||
| 	rPrevWhitespace      = regexp.MustCompile(`\r?\n\s*?$`) | ||||
| 	rPrevWhitespaceStart = regexp.MustCompile(`(^|\r?\n)\s*?$`) | ||||
|  | ||||
| 	rNextWhitespace    = regexp.MustCompile(`^\s*?\r?\n`) | ||||
| 	rNextWhitespaceEnd = regexp.MustCompile(`^\s*?(\r?\n|$)`) | ||||
|  | ||||
| 	rPartialIndent = regexp.MustCompile(`([ \t]+$)`) | ||||
| ) | ||||
|  | ||||
| // newWhitespaceVisitor instanciates a new whitespaceVisitor | ||||
| func newWhitespaceVisitor() *whitespaceVisitor { | ||||
| 	return &whitespaceVisitor{} | ||||
| } | ||||
|  | ||||
| // processWhitespaces performs whitespace control on given AST | ||||
| // | ||||
| // WARNING: It must be called only once on AST. | ||||
| func processWhitespaces(node ast.Node) { | ||||
| 	node.Accept(newWhitespaceVisitor()) | ||||
| } | ||||
|  | ||||
| func omitRightFirst(body []ast.Node, multiple bool) { | ||||
| 	omitRight(body, -1, multiple) | ||||
| } | ||||
|  | ||||
| func omitRight(body []ast.Node, i int, multiple bool) { | ||||
| 	if i+1 >= len(body) { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	current := body[i+1] | ||||
|  | ||||
| 	node, ok := current.(*ast.ContentStatement) | ||||
| 	if !ok { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	if !multiple && node.RightStripped { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	original := node.Value | ||||
|  | ||||
| 	r := rTrimLeft | ||||
| 	if multiple { | ||||
| 		r = rTrimLeftMultiple | ||||
| 	} | ||||
|  | ||||
| 	node.Value = r.ReplaceAllString(node.Value, "") | ||||
|  | ||||
| 	node.RightStripped = (original != node.Value) | ||||
| } | ||||
|  | ||||
| func omitLeftLast(body []ast.Node, multiple bool) { | ||||
| 	omitLeft(body, len(body), multiple) | ||||
| } | ||||
|  | ||||
| func omitLeft(body []ast.Node, i int, multiple bool) bool { | ||||
| 	if i-1 < 0 { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	current := body[i-1] | ||||
|  | ||||
| 	node, ok := current.(*ast.ContentStatement) | ||||
| 	if !ok { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	if !multiple && node.LeftStripped { | ||||
| 		return false | ||||
| 	} | ||||
|  | ||||
| 	original := node.Value | ||||
|  | ||||
| 	r := rTrimRight | ||||
| 	if multiple { | ||||
| 		r = rTrimRightMultiple | ||||
| 	} | ||||
|  | ||||
| 	node.Value = r.ReplaceAllString(node.Value, "") | ||||
|  | ||||
| 	node.LeftStripped = (original != node.Value) | ||||
|  | ||||
| 	return node.LeftStripped | ||||
| } | ||||
|  | ||||
| func isPrevWhitespace(body []ast.Node) bool { | ||||
| 	return isPrevWhitespaceProgram(body, len(body), false) | ||||
| } | ||||
|  | ||||
| func isPrevWhitespaceProgram(body []ast.Node, i int, isRoot bool) bool { | ||||
| 	if i < 1 { | ||||
| 		return isRoot | ||||
| 	} | ||||
|  | ||||
| 	prev := body[i-1] | ||||
|  | ||||
| 	if node, ok := prev.(*ast.ContentStatement); ok { | ||||
| 		if (node.Value == "") && node.RightStripped { | ||||
| 			// already stripped, so it may be an empty string not catched by regexp | ||||
| 			return true | ||||
| 		} | ||||
|  | ||||
| 		r := rPrevWhitespaceStart | ||||
| 		if (i > 1) || !isRoot { | ||||
| 			r = rPrevWhitespace | ||||
| 		} | ||||
|  | ||||
| 		return r.MatchString(node.Value) | ||||
| 	} | ||||
|  | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| func isNextWhitespace(body []ast.Node) bool { | ||||
| 	return isNextWhitespaceProgram(body, -1, false) | ||||
| } | ||||
|  | ||||
| func isNextWhitespaceProgram(body []ast.Node, i int, isRoot bool) bool { | ||||
| 	if i+1 >= len(body) { | ||||
| 		return isRoot | ||||
| 	} | ||||
|  | ||||
| 	next := body[i+1] | ||||
|  | ||||
| 	if node, ok := next.(*ast.ContentStatement); ok { | ||||
| 		if (node.Value == "") && node.LeftStripped { | ||||
| 			// already stripped, so it may be an empty string not catched by regexp | ||||
| 			return true | ||||
| 		} | ||||
|  | ||||
| 		r := rNextWhitespaceEnd | ||||
| 		if (i+2 > len(body)) || !isRoot { | ||||
| 			r = rNextWhitespace | ||||
| 		} | ||||
|  | ||||
| 		return r.MatchString(node.Value) | ||||
| 	} | ||||
|  | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // | ||||
| // Visitor interface | ||||
| // | ||||
|  | ||||
| func (v *whitespaceVisitor) VisitProgram(program *ast.Program) interface{} { | ||||
| 	isRoot := !v.isRootSeen | ||||
| 	v.isRootSeen = true | ||||
|  | ||||
| 	body := program.Body | ||||
| 	for i, current := range body { | ||||
| 		strip, _ := current.Accept(v).(*ast.Strip) | ||||
| 		if strip == nil { | ||||
| 			continue | ||||
| 		} | ||||
|  | ||||
| 		_isPrevWhitespace := isPrevWhitespaceProgram(body, i, isRoot) | ||||
| 		_isNextWhitespace := isNextWhitespaceProgram(body, i, isRoot) | ||||
|  | ||||
| 		openStandalone := strip.OpenStandalone && _isPrevWhitespace | ||||
| 		closeStandalone := strip.CloseStandalone && _isNextWhitespace | ||||
| 		inlineStandalone := strip.InlineStandalone && _isPrevWhitespace && _isNextWhitespace | ||||
|  | ||||
| 		if strip.Close { | ||||
| 			omitRight(body, i, true) | ||||
| 		} | ||||
|  | ||||
| 		if strip.Open && (i > 0) { | ||||
| 			omitLeft(body, i, true) | ||||
| 		} | ||||
|  | ||||
| 		if inlineStandalone { | ||||
| 			omitRight(body, i, false) | ||||
|  | ||||
| 			if omitLeft(body, i, false) { | ||||
| 				// If we are on a standalone node, save the indent info for partials | ||||
| 				if partial, ok := current.(*ast.PartialStatement); ok { | ||||
| 					// Pull out the whitespace from the final line | ||||
| 					if i > 0 { | ||||
| 						if prevContent, ok := body[i-1].(*ast.ContentStatement); ok { | ||||
| 							partial.Indent = rPartialIndent.FindString(prevContent.Original) | ||||
| 						} | ||||
| 					} | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		if b, ok := current.(*ast.BlockStatement); ok { | ||||
| 			if openStandalone { | ||||
| 				prog := b.Program | ||||
| 				if prog == nil { | ||||
| 					prog = b.Inverse | ||||
| 				} | ||||
|  | ||||
| 				omitRightFirst(prog.Body, false) | ||||
|  | ||||
| 				// Strip out the previous content node if it's whitespace only | ||||
| 				omitLeft(body, i, false) | ||||
| 			} | ||||
|  | ||||
| 			if closeStandalone { | ||||
| 				prog := b.Inverse | ||||
| 				if prog == nil { | ||||
| 					prog = b.Program | ||||
| 				} | ||||
|  | ||||
| 				// Always strip the next node | ||||
| 				omitRight(body, i, false) | ||||
|  | ||||
| 				omitLeftLast(prog.Body, false) | ||||
| 			} | ||||
|  | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| func (v *whitespaceVisitor) VisitBlock(block *ast.BlockStatement) interface{} { | ||||
| 	if block.Program != nil { | ||||
| 		block.Program.Accept(v) | ||||
| 	} | ||||
|  | ||||
| 	if block.Inverse != nil { | ||||
| 		block.Inverse.Accept(v) | ||||
| 	} | ||||
|  | ||||
| 	program := block.Program | ||||
| 	inverse := block.Inverse | ||||
|  | ||||
| 	if program == nil { | ||||
| 		program = inverse | ||||
| 		inverse = nil | ||||
| 	} | ||||
|  | ||||
| 	firstInverse := inverse | ||||
| 	lastInverse := inverse | ||||
|  | ||||
| 	if (inverse != nil) && inverse.Chained { | ||||
| 		b, _ := inverse.Body[0].(*ast.BlockStatement) | ||||
| 		firstInverse = b.Program | ||||
|  | ||||
| 		for lastInverse.Chained { | ||||
| 			b, _ := lastInverse.Body[len(lastInverse.Body)-1].(*ast.BlockStatement) | ||||
| 			lastInverse = b.Program | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	closeProg := firstInverse | ||||
| 	if closeProg == nil { | ||||
| 		closeProg = program | ||||
| 	} | ||||
|  | ||||
| 	strip := &ast.Strip{ | ||||
| 		Open:  (block.OpenStrip != nil) && block.OpenStrip.Open, | ||||
| 		Close: (block.CloseStrip != nil) && block.CloseStrip.Close, | ||||
|  | ||||
| 		OpenStandalone:  isNextWhitespace(program.Body), | ||||
| 		CloseStandalone: isPrevWhitespace(closeProg.Body), | ||||
| 	} | ||||
|  | ||||
| 	if (block.OpenStrip != nil) && block.OpenStrip.Close { | ||||
| 		omitRightFirst(program.Body, true) | ||||
| 	} | ||||
|  | ||||
| 	if inverse != nil { | ||||
| 		if block.InverseStrip != nil { | ||||
| 			inverseStrip := block.InverseStrip | ||||
|  | ||||
| 			if inverseStrip.Open { | ||||
| 				omitLeftLast(program.Body, true) | ||||
| 			} | ||||
|  | ||||
| 			if inverseStrip.Close { | ||||
| 				omitRightFirst(firstInverse.Body, true) | ||||
| 			} | ||||
| 		} | ||||
|  | ||||
| 		if (block.CloseStrip != nil) && block.CloseStrip.Open { | ||||
| 			omitLeftLast(lastInverse.Body, true) | ||||
| 		} | ||||
|  | ||||
| 		// Find standalone else statements | ||||
| 		if isPrevWhitespace(program.Body) && isNextWhitespace(firstInverse.Body) { | ||||
| 			omitLeftLast(program.Body, false) | ||||
|  | ||||
| 			omitRightFirst(firstInverse.Body, false) | ||||
| 		} | ||||
| 	} else if (block.CloseStrip != nil) && block.CloseStrip.Open { | ||||
| 		omitLeftLast(program.Body, true) | ||||
| 	} | ||||
|  | ||||
| 	return strip | ||||
| } | ||||
|  | ||||
| func (v *whitespaceVisitor) VisitMustache(mustache *ast.MustacheStatement) interface{} { | ||||
| 	return mustache.Strip | ||||
| } | ||||
|  | ||||
| func _inlineStandalone(strip *ast.Strip) interface{} { | ||||
| 	return &ast.Strip{ | ||||
| 		Open:             strip.Open, | ||||
| 		Close:            strip.Close, | ||||
| 		InlineStandalone: true, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (v *whitespaceVisitor) VisitPartial(node *ast.PartialStatement) interface{} { | ||||
| 	strip := node.Strip | ||||
| 	if strip == nil { | ||||
| 		strip = &ast.Strip{} | ||||
| 	} | ||||
|  | ||||
| 	return _inlineStandalone(strip) | ||||
| } | ||||
|  | ||||
| func (v *whitespaceVisitor) VisitComment(node *ast.CommentStatement) interface{} { | ||||
| 	strip := node.Strip | ||||
| 	if strip == nil { | ||||
| 		strip = &ast.Strip{} | ||||
| 	} | ||||
|  | ||||
| 	return _inlineStandalone(strip) | ||||
| } | ||||
|  | ||||
| // NOOP | ||||
| func (v *whitespaceVisitor) VisitContent(node *ast.ContentStatement) interface{}    { return nil } | ||||
| func (v *whitespaceVisitor) VisitExpression(node *ast.Expression) interface{}       { return nil } | ||||
| func (v *whitespaceVisitor) VisitSubExpression(node *ast.SubExpression) interface{} { return nil } | ||||
| func (v *whitespaceVisitor) VisitPath(node *ast.PathExpression) interface{}         { return nil } | ||||
| func (v *whitespaceVisitor) VisitString(node *ast.StringLiteral) interface{}        { return nil } | ||||
| func (v *whitespaceVisitor) VisitBoolean(node *ast.BooleanLiteral) interface{}      { return nil } | ||||
| func (v *whitespaceVisitor) VisitNumber(node *ast.NumberLiteral) interface{}        { return nil } | ||||
| func (v *whitespaceVisitor) VisitHash(node *ast.Hash) interface{}                   { return nil } | ||||
| func (v *whitespaceVisitor) VisitHashPair(node *ast.HashPair) interface{}           { return nil } | ||||
							
								
								
									
										85
									
								
								vendor/github.com/aymerick/raymond/partial.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										85
									
								
								vendor/github.com/aymerick/raymond/partial.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,85 @@ | ||||
| package raymond | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"sync" | ||||
| ) | ||||
|  | ||||
| // partial represents a partial template | ||||
| type partial struct { | ||||
| 	name   string | ||||
| 	source string | ||||
| 	tpl    *Template | ||||
| } | ||||
|  | ||||
| // partials stores all global partials | ||||
| var partials map[string]*partial | ||||
|  | ||||
| // protects global partials | ||||
| var partialsMutex sync.RWMutex | ||||
|  | ||||
| func init() { | ||||
| 	partials = make(map[string]*partial) | ||||
| } | ||||
|  | ||||
| // newPartial instanciates a new partial | ||||
| func newPartial(name string, source string, tpl *Template) *partial { | ||||
| 	return &partial{ | ||||
| 		name:   name, | ||||
| 		source: source, | ||||
| 		tpl:    tpl, | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // RegisterPartial registers a global partial. That partial will be available to all templates. | ||||
| func RegisterPartial(name string, source string) { | ||||
| 	partialsMutex.Lock() | ||||
| 	defer partialsMutex.Unlock() | ||||
|  | ||||
| 	if partials[name] != nil { | ||||
| 		panic(fmt.Errorf("Partial already registered: %s", name)) | ||||
| 	} | ||||
|  | ||||
| 	partials[name] = newPartial(name, source, nil) | ||||
| } | ||||
|  | ||||
| // RegisterPartials registers several global partials. Those partials will be available to all templates. | ||||
| func RegisterPartials(partials map[string]string) { | ||||
| 	for name, p := range partials { | ||||
| 		RegisterPartial(name, p) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // RegisterPartialTemplate registers a global partial with given parsed template. That partial will be available to all templates. | ||||
| func RegisterPartialTemplate(name string, tpl *Template) { | ||||
| 	partialsMutex.Lock() | ||||
| 	defer partialsMutex.Unlock() | ||||
|  | ||||
| 	if partials[name] != nil { | ||||
| 		panic(fmt.Errorf("Partial already registered: %s", name)) | ||||
| 	} | ||||
|  | ||||
| 	partials[name] = newPartial(name, "", tpl) | ||||
| } | ||||
|  | ||||
| // findPartial finds a registered global partial | ||||
| func findPartial(name string) *partial { | ||||
| 	partialsMutex.RLock() | ||||
| 	defer partialsMutex.RUnlock() | ||||
|  | ||||
| 	return partials[name] | ||||
| } | ||||
|  | ||||
| // template returns parsed partial template | ||||
| func (p *partial) template() (*Template, error) { | ||||
| 	if p.tpl == nil { | ||||
| 		var err error | ||||
|  | ||||
| 		p.tpl, err = Parse(p.source) | ||||
| 		if err != nil { | ||||
| 			return nil, err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return p.tpl, nil | ||||
| } | ||||
							
								
								
									
										28
									
								
								vendor/github.com/aymerick/raymond/raymond.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								vendor/github.com/aymerick/raymond/raymond.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,28 @@ | ||||
| // Package raymond provides handlebars evaluation | ||||
| package raymond | ||||
|  | ||||
| // Render parses a template and evaluates it with given context | ||||
| // | ||||
| // Note that this function call is not optimal as your template is parsed everytime you call it. You should use Parse() function instead. | ||||
| func Render(source string, ctx interface{}) (string, error) { | ||||
| 	// parse template | ||||
| 	tpl, err := Parse(source) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	// renders template | ||||
| 	str, err := tpl.Exec(ctx) | ||||
| 	if err != nil { | ||||
| 		return "", err | ||||
| 	} | ||||
|  | ||||
| 	return str, nil | ||||
| } | ||||
|  | ||||
| // MustRender parses a template and evaluates it with given context. It panics on error. | ||||
| // | ||||
| // Note that this function call is not optimal as your template is parsed everytime you call it. You should use Parse() function instead. | ||||
| func MustRender(source string, ctx interface{}) string { | ||||
| 	return MustParse(source).MustExec(ctx) | ||||
| } | ||||
							
								
								
									
										
											BIN
										
									
								
								vendor/github.com/aymerick/raymond/raymond.png
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								vendor/github.com/aymerick/raymond/raymond.png
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 13 KiB | 
							
								
								
									
										84
									
								
								vendor/github.com/aymerick/raymond/string.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								vendor/github.com/aymerick/raymond/string.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,84 @@ | ||||
| package raymond | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"reflect" | ||||
| 	"strconv" | ||||
| ) | ||||
|  | ||||
| // SafeString represents a string that must not be escaped. | ||||
| // | ||||
| // A SafeString can be returned by helpers to disable escaping. | ||||
| type SafeString string | ||||
|  | ||||
| // isSafeString returns true if argument is a SafeString | ||||
| func isSafeString(value interface{}) bool { | ||||
| 	if _, ok := value.(SafeString); ok { | ||||
| 		return true | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // Str returns string representation of any basic type value. | ||||
| func Str(value interface{}) string { | ||||
| 	return strValue(reflect.ValueOf(value)) | ||||
| } | ||||
|  | ||||
| // strValue returns string representation of a reflect.Value | ||||
| func strValue(value reflect.Value) string { | ||||
| 	result := "" | ||||
|  | ||||
| 	ival, ok := printableValue(value) | ||||
| 	if !ok { | ||||
| 		panic(fmt.Errorf("Can't print value: %q", value)) | ||||
| 	} | ||||
|  | ||||
| 	val := reflect.ValueOf(ival) | ||||
|  | ||||
| 	switch val.Kind() { | ||||
| 	case reflect.Array, reflect.Slice: | ||||
| 		for i := 0; i < val.Len(); i++ { | ||||
| 			result += strValue(val.Index(i)) | ||||
| 		} | ||||
| 	case reflect.Bool: | ||||
| 		result = "false" | ||||
| 		if val.Bool() { | ||||
| 			result = "true" | ||||
| 		} | ||||
| 	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: | ||||
| 		result = fmt.Sprintf("%d", ival) | ||||
| 	case reflect.Float32, reflect.Float64: | ||||
| 		result = strconv.FormatFloat(val.Float(), 'f', -1, 64) | ||||
| 	case reflect.Invalid: | ||||
| 		result = "" | ||||
| 	default: | ||||
| 		result = fmt.Sprintf("%s", ival) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // printableValue returns the, possibly indirected, interface value inside v that | ||||
| // is best for a call to formatted printer. | ||||
| // | ||||
| // NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/exec.go | ||||
| func printableValue(v reflect.Value) (interface{}, bool) { | ||||
| 	if v.Kind() == reflect.Ptr { | ||||
| 		v, _ = indirect(v) // fmt.Fprint handles nil. | ||||
| 	} | ||||
| 	if !v.IsValid() { | ||||
| 		return "", true | ||||
| 	} | ||||
|  | ||||
| 	if !v.Type().Implements(errorType) && !v.Type().Implements(fmtStringerType) { | ||||
| 		if v.CanAddr() && (reflect.PtrTo(v.Type()).Implements(errorType) || reflect.PtrTo(v.Type()).Implements(fmtStringerType)) { | ||||
| 			v = v.Addr() | ||||
| 		} else { | ||||
| 			switch v.Kind() { | ||||
| 			case reflect.Chan, reflect.Func: | ||||
| 				return nil, false | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 	return v.Interface(), true | ||||
| } | ||||
							
								
								
									
										248
									
								
								vendor/github.com/aymerick/raymond/template.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										248
									
								
								vendor/github.com/aymerick/raymond/template.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,248 @@ | ||||
| package raymond | ||||
|  | ||||
| import ( | ||||
| 	"fmt" | ||||
| 	"io/ioutil" | ||||
| 	"reflect" | ||||
| 	"runtime" | ||||
| 	"sync" | ||||
|  | ||||
| 	"github.com/aymerick/raymond/ast" | ||||
| 	"github.com/aymerick/raymond/parser" | ||||
| ) | ||||
|  | ||||
| // Template represents a handlebars template. | ||||
| type Template struct { | ||||
| 	source   string | ||||
| 	program  *ast.Program | ||||
| 	helpers  map[string]reflect.Value | ||||
| 	partials map[string]*partial | ||||
| 	mutex    sync.RWMutex // protects helpers and partials | ||||
| } | ||||
|  | ||||
| // newTemplate instanciate a new template without parsing it | ||||
| func newTemplate(source string) *Template { | ||||
| 	return &Template{ | ||||
| 		source:   source, | ||||
| 		helpers:  make(map[string]reflect.Value), | ||||
| 		partials: make(map[string]*partial), | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // Parse instanciates a template by parsing given source. | ||||
| func Parse(source string) (*Template, error) { | ||||
| 	tpl := newTemplate(source) | ||||
|  | ||||
| 	// parse template | ||||
| 	if err := tpl.parse(); err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return tpl, nil | ||||
| } | ||||
|  | ||||
| // MustParse instanciates a template by parsing given source. It panics on error. | ||||
| func MustParse(source string) *Template { | ||||
| 	result, err := Parse(source) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // ParseFile reads given file and returns parsed template. | ||||
| func ParseFile(filePath string) (*Template, error) { | ||||
| 	b, err := ioutil.ReadFile(filePath) | ||||
| 	if err != nil { | ||||
| 		return nil, err | ||||
| 	} | ||||
|  | ||||
| 	return Parse(string(b)) | ||||
| } | ||||
|  | ||||
| // parse parses the template | ||||
| // | ||||
| // It can be called several times, the parsing will be done only once. | ||||
| func (tpl *Template) parse() error { | ||||
| 	if tpl.program == nil { | ||||
| 		var err error | ||||
|  | ||||
| 		tpl.program, err = parser.Parse(tpl.source) | ||||
| 		if err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // Clone returns a copy of that template. | ||||
| func (tpl *Template) Clone() *Template { | ||||
| 	result := newTemplate(tpl.source) | ||||
|  | ||||
| 	result.program = tpl.program | ||||
|  | ||||
| 	tpl.mutex.RLock() | ||||
| 	defer tpl.mutex.RUnlock() | ||||
|  | ||||
| 	for name, helper := range tpl.helpers { | ||||
| 		result.RegisterHelper(name, helper.Interface()) | ||||
| 	} | ||||
|  | ||||
| 	for name, partial := range tpl.partials { | ||||
| 		result.addPartial(name, partial.source, partial.tpl) | ||||
| 	} | ||||
|  | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| func (tpl *Template) findHelper(name string) reflect.Value { | ||||
| 	tpl.mutex.RLock() | ||||
| 	defer tpl.mutex.RUnlock() | ||||
|  | ||||
| 	return tpl.helpers[name] | ||||
| } | ||||
|  | ||||
| // RegisterHelper registers a helper for that template. | ||||
| func (tpl *Template) RegisterHelper(name string, helper interface{}) { | ||||
| 	tpl.mutex.Lock() | ||||
| 	defer tpl.mutex.Unlock() | ||||
|  | ||||
| 	if tpl.helpers[name] != zero { | ||||
| 		panic(fmt.Sprintf("Helper %s already registered", name)) | ||||
| 	} | ||||
|  | ||||
| 	val := reflect.ValueOf(helper) | ||||
| 	ensureValidHelper(name, val) | ||||
|  | ||||
| 	tpl.helpers[name] = val | ||||
| } | ||||
|  | ||||
| // RegisterHelpers registers several helpers for that template. | ||||
| func (tpl *Template) RegisterHelpers(helpers map[string]interface{}) { | ||||
| 	for name, helper := range helpers { | ||||
| 		tpl.RegisterHelper(name, helper) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| func (tpl *Template) addPartial(name string, source string, template *Template) { | ||||
| 	tpl.mutex.Lock() | ||||
| 	defer tpl.mutex.Unlock() | ||||
|  | ||||
| 	if tpl.partials[name] != nil { | ||||
| 		panic(fmt.Sprintf("Partial %s already registered", name)) | ||||
| 	} | ||||
|  | ||||
| 	tpl.partials[name] = newPartial(name, source, template) | ||||
| } | ||||
|  | ||||
| func (tpl *Template) findPartial(name string) *partial { | ||||
| 	tpl.mutex.RLock() | ||||
| 	defer tpl.mutex.RUnlock() | ||||
|  | ||||
| 	return tpl.partials[name] | ||||
| } | ||||
|  | ||||
| // RegisterPartial registers a partial for that template. | ||||
| func (tpl *Template) RegisterPartial(name string, source string) { | ||||
| 	tpl.addPartial(name, source, nil) | ||||
| } | ||||
|  | ||||
| // RegisterPartials registers several partials for that template. | ||||
| func (tpl *Template) RegisterPartials(partials map[string]string) { | ||||
| 	for name, partial := range partials { | ||||
| 		tpl.RegisterPartial(name, partial) | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // RegisterPartialFile reads given file and registers its content as a partial with given name. | ||||
| func (tpl *Template) RegisterPartialFile(filePath string, name string) error { | ||||
| 	b, err := ioutil.ReadFile(filePath) | ||||
| 	if err != nil { | ||||
| 		return err | ||||
| 	} | ||||
|  | ||||
| 	tpl.RegisterPartial(name, string(b)) | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // RegisterPartialFiles reads several files and registers them as partials, the filename base is used as the partial name. | ||||
| func (tpl *Template) RegisterPartialFiles(filePaths ...string) error { | ||||
| 	if len(filePaths) == 0 { | ||||
| 		return nil | ||||
| 	} | ||||
|  | ||||
| 	for _, filePath := range filePaths { | ||||
| 		name := fileBase(filePath) | ||||
|  | ||||
| 		if err := tpl.RegisterPartialFile(filePath, name); err != nil { | ||||
| 			return err | ||||
| 		} | ||||
| 	} | ||||
|  | ||||
| 	return nil | ||||
| } | ||||
|  | ||||
| // RegisterPartialTemplate registers an already parsed partial for that template. | ||||
| func (tpl *Template) RegisterPartialTemplate(name string, template *Template) { | ||||
| 	tpl.addPartial(name, "", template) | ||||
| } | ||||
|  | ||||
| // Exec evaluates template with given context. | ||||
| func (tpl *Template) Exec(ctx interface{}) (result string, err error) { | ||||
| 	return tpl.ExecWith(ctx, nil) | ||||
| } | ||||
|  | ||||
| // MustExec evaluates template with given context. It panics on error. | ||||
| func (tpl *Template) MustExec(ctx interface{}) string { | ||||
| 	result, err := tpl.Exec(ctx) | ||||
| 	if err != nil { | ||||
| 		panic(err) | ||||
| 	} | ||||
| 	return result | ||||
| } | ||||
|  | ||||
| // ExecWith evaluates template with given context and private data frame. | ||||
| func (tpl *Template) ExecWith(ctx interface{}, privData *DataFrame) (result string, err error) { | ||||
| 	defer errRecover(&err) | ||||
|  | ||||
| 	// parses template if necessary | ||||
| 	err = tpl.parse() | ||||
| 	if err != nil { | ||||
| 		return | ||||
| 	} | ||||
|  | ||||
| 	// setup visitor | ||||
| 	v := newEvalVisitor(tpl, ctx, privData) | ||||
|  | ||||
| 	// visit AST | ||||
| 	result, _ = tpl.program.Accept(v).(string) | ||||
|  | ||||
| 	// named return values | ||||
| 	return | ||||
| } | ||||
|  | ||||
| // errRecover recovers evaluation panic | ||||
| func errRecover(errp *error) { | ||||
| 	e := recover() | ||||
| 	if e != nil { | ||||
| 		switch err := e.(type) { | ||||
| 		case runtime.Error: | ||||
| 			panic(e) | ||||
| 		case error: | ||||
| 			*errp = err | ||||
| 		default: | ||||
| 			panic(e) | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
|  | ||||
| // PrintAST returns string representation of parsed template. | ||||
| func (tpl *Template) PrintAST() string { | ||||
| 	if err := tpl.parse(); err != nil { | ||||
| 		return fmt.Sprintf("PARSER ERROR: %s", err) | ||||
| 	} | ||||
|  | ||||
| 	return ast.Print(tpl.program) | ||||
| } | ||||
							
								
								
									
										85
									
								
								vendor/github.com/aymerick/raymond/utils.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										85
									
								
								vendor/github.com/aymerick/raymond/utils.go
									
									
									
										generated
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,85 @@ | ||||
| package raymond | ||||
|  | ||||
| import ( | ||||
| 	"path" | ||||
| 	"reflect" | ||||
| ) | ||||
|  | ||||
| // indirect returns the item at the end of indirection, and a bool to indicate if it's nil. | ||||
| // We indirect through pointers and empty interfaces (only) because | ||||
| // non-empty interfaces have methods we might need. | ||||
| // | ||||
| // NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/exec.go | ||||
| func indirect(v reflect.Value) (rv reflect.Value, isNil bool) { | ||||
| 	for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() { | ||||
| 		if v.IsNil() { | ||||
| 			return v, true | ||||
| 		} | ||||
| 		if v.Kind() == reflect.Interface && v.NumMethod() > 0 { | ||||
| 			break | ||||
| 		} | ||||
| 	} | ||||
| 	return v, false | ||||
| } | ||||
|  | ||||
| // IsTrue returns true if obj is a truthy value. | ||||
| func IsTrue(obj interface{}) bool { | ||||
| 	thruth, ok := isTrueValue(reflect.ValueOf(obj)) | ||||
| 	if !ok { | ||||
| 		return false | ||||
| 	} | ||||
| 	return thruth | ||||
| } | ||||
|  | ||||
| // isTrueValue reports whether the value is 'true', in the sense of not the zero of its type, | ||||
| // and whether the value has a meaningful truth value | ||||
| // | ||||
| // NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/exec.go | ||||
| func isTrueValue(val reflect.Value) (truth, ok bool) { | ||||
| 	if !val.IsValid() { | ||||
| 		// Something like var x interface{}, never set. It's a form of nil. | ||||
| 		return false, true | ||||
| 	} | ||||
| 	switch val.Kind() { | ||||
| 	case reflect.Array, reflect.Map, reflect.Slice, reflect.String: | ||||
| 		truth = val.Len() > 0 | ||||
| 	case reflect.Bool: | ||||
| 		truth = val.Bool() | ||||
| 	case reflect.Complex64, reflect.Complex128: | ||||
| 		truth = val.Complex() != 0 | ||||
| 	case reflect.Chan, reflect.Func, reflect.Ptr, reflect.Interface: | ||||
| 		truth = !val.IsNil() | ||||
| 	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: | ||||
| 		truth = val.Int() != 0 | ||||
| 	case reflect.Float32, reflect.Float64: | ||||
| 		truth = val.Float() != 0 | ||||
| 	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: | ||||
| 		truth = val.Uint() != 0 | ||||
| 	case reflect.Struct: | ||||
| 		truth = true // Struct values are always true. | ||||
| 	default: | ||||
| 		return | ||||
| 	} | ||||
| 	return truth, true | ||||
| } | ||||
|  | ||||
| // canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero. | ||||
| // | ||||
| // NOTE: borrowed from https://github.com/golang/go/tree/master/src/text/template/exec.go | ||||
| func canBeNil(typ reflect.Type) bool { | ||||
| 	switch typ.Kind() { | ||||
| 	case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: | ||||
| 		return true | ||||
| 	} | ||||
| 	return false | ||||
| } | ||||
|  | ||||
| // fileBase returns base file name | ||||
| // | ||||
| // example: /foo/bar/baz.png => baz | ||||
| func fileBase(filePath string) string { | ||||
| 	fileName := path.Base(filePath) | ||||
| 	fileExt := path.Ext(filePath) | ||||
|  | ||||
| 	return fileName[:len(fileName)-len(fileExt)] | ||||
| } | ||||
		Reference in New Issue
	
	Block a user