diff options
26 files changed, 1579 insertions, 228 deletions
diff --git a/Godeps/Godeps.json b/Godeps/Godeps.json index dcf0a60cb..2858ef0c9 100644 --- a/Godeps/Godeps.json +++ b/Godeps/Godeps.json @@ -84,7 +84,7 @@ }, { "ImportPath": "github.com/robertkrimen/otto", - "Rev": "c21072f61b64b51ea58138ccacf0a85d54b9f07c" + "Rev": "53221230c215611a90762720c9042ac782ef74ee" }, { "ImportPath": "github.com/syndtr/goleveldb/leveldb", diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/ast/comments.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/ast/comments.go new file mode 100644 index 000000000..227e34ecb --- /dev/null +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/ast/comments.go @@ -0,0 +1,92 @@ +package ast + +import ( + "fmt" + "github.com/robertkrimen/otto/file" +) + +// CommentPosition determines where the comment is in a given context +type CommentPosition int + +const ( + _ CommentPosition = iota + LEADING // Before the pertinent expression + TRAILING // After the pertinent expression + KEY // After a key or keyword + COLON // After a colon in a field declaration + FINAL // Final comments in a block, not belonging to a specific expression or the comment after a trailing , in an array or object literal + TBD +) + +// Comment contains the data of the comment +type Comment struct { + Begin file.Idx + Text string + Position CommentPosition +} + +// String returns a stringified version of the position +func (cp CommentPosition) String() string { + switch cp { + case LEADING: + return "Leading" + case TRAILING: + return "Trailing" + case KEY: + return "Key" + case COLON: + return "Colon" + case FINAL: + return "Final" + default: + return "???" + } +} + +// String returns a stringified version of the comment +func (c Comment) String() string { + return fmt.Sprintf("Comment: %v", c.Text) +} + +// CommentMap is the data structure where all found comments are stored +type CommentMap map[Node][]*Comment + +// AddComment adds a single comment to the map +func (cm CommentMap) AddComment(node Node, comment *Comment) { + list := cm[node] + list = append(list, comment) + + cm[node] = list +} + +// AddComments adds a slice of comments, given a node and an updated position +func (cm CommentMap) AddComments(node Node, comments []*Comment, position CommentPosition) { + for _, comment := range comments { + comment.Position = position + cm.AddComment(node, comment) + } +} + +// Size returns the size of the map +func (cm CommentMap) Size() int { + size := 0 + for _, comments := range cm { + size += len(comments) + } + + return size +} + +// MoveComments moves comments with a given position from a node to another +func (cm CommentMap) MoveComments(from, to Node, position CommentPosition) { + for i, c := range cm[from] { + if c.Position == position { + cm.AddComment(to, c) + + // Remove the comment from the "from" slice + cm[from][i] = cm[from][len(cm[from])-1] + cm[from][len(cm[from])-1] = nil + cm[from] = cm[from][:len(cm[from])-1] + } + } +} diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/ast/node.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/ast/node.go index eb46f8601..8a651dc2f 100644 --- a/Godeps/_workspace/src/github.com/robertkrimen/otto/ast/node.go +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/ast/node.go @@ -86,6 +86,11 @@ type ( Identifier Identifier } + EmptyExpression struct { + Begin file.Idx + End file.Idx + } + FunctionLiteral struct { Function file.Idx Name *Identifier @@ -185,6 +190,7 @@ func (*BracketExpression) _expressionNode() {} func (*CallExpression) _expressionNode() {} func (*ConditionalExpression) _expressionNode() {} func (*DotExpression) _expressionNode() {} +func (*EmptyExpression) _expressionNode() {} func (*FunctionLiteral) _expressionNode() {} func (*Identifier) _expressionNode() {} func (*NewExpression) _expressionNode() {} @@ -399,6 +405,7 @@ func (self *BracketExpression) Idx0() file.Idx { return self.Left.Idx0() } func (self *CallExpression) Idx0() file.Idx { return self.Callee.Idx0() } func (self *ConditionalExpression) Idx0() file.Idx { return self.Test.Idx0() } func (self *DotExpression) Idx0() file.Idx { return self.Left.Idx0() } +func (self *EmptyExpression) Idx0() file.Idx { return self.Begin } func (self *FunctionLiteral) Idx0() file.Idx { return self.Function } func (self *Identifier) Idx0() file.Idx { return self.Idx } func (self *NewExpression) Idx0() file.Idx { return self.New } @@ -447,6 +454,7 @@ func (self *BracketExpression) Idx1() file.Idx { return self.RightBracket + func (self *CallExpression) Idx1() file.Idx { return self.RightParenthesis + 1 } func (self *ConditionalExpression) Idx1() file.Idx { return self.Test.Idx1() } func (self *DotExpression) Idx1() file.Idx { return self.Identifier.Idx1() } +func (self *EmptyExpression) Idx1() file.Idx { return self.End } func (self *FunctionLiteral) Idx1() file.Idx { return self.Body.Idx1() } func (self *Identifier) Idx1() file.Idx { return file.Idx(int(self.Idx) + len(self.Name)) } func (self *NewExpression) Idx1() file.Idx { return self.RightParenthesis + 1 } diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/builtin_math.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/builtin_math.go index a9f4a55c1..7ce90c339 100644 --- a/Godeps/_workspace/src/github.com/robertkrimen/otto/builtin_math.go +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/builtin_math.go @@ -117,7 +117,13 @@ func builtinMath_pow(call FunctionCall) Value { } func builtinMath_random(call FunctionCall) Value { - return toValue_float64(rand.Float64()) + var v float64 + if call.runtime.random != nil { + v = call.runtime.random() + } else { + v = rand.Float64() + } + return toValue_float64(v) } func builtinMath_round(call FunctionCall) Value { diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/cmpl_parse.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/cmpl_parse.go index e758a5230..f1e002d39 100644 --- a/Godeps/_workspace/src/github.com/robertkrimen/otto/cmpl_parse.go +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/cmpl_parse.go @@ -82,6 +82,9 @@ func (cmpl *_compiler) parseExpression(in ast.Expression) _nodeExpression { identifier: in.Identifier.Name, } + case *ast.EmptyExpression: + return nil + case *ast.FunctionLiteral: name := "" if in.Name != nil { diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/otto.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/otto.go index 2ec033cbc..613533082 100644 --- a/Godeps/_workspace/src/github.com/robertkrimen/otto/otto.go +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/otto.go @@ -363,6 +363,10 @@ func (self Otto) SetDebuggerHandler(fn func(vm *Otto)) { self.runtime.debugger = fn } +func (self Otto) SetRandomSource(fn func() float64) { + self.runtime.random = fn +} + // Context is a structure that contains information about the current execution // context. type Context struct { diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/expression.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/expression.go index 8baf22f7c..a23a7279a 100644 --- a/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/expression.go +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/expression.go @@ -12,10 +12,14 @@ func (self *_parser) parseIdentifier() *ast.Identifier { literal := self.literal idx := self.idx self.next() - return &ast.Identifier{ + comments := self.findComments(false) + exp := &ast.Identifier{ Name: literal, Idx: idx, } + + self.commentMap.AddComments(exp, comments, ast.TRAILING) + return exp } func (self *_parser) parsePrimaryExpression() ast.Expression { @@ -196,11 +200,20 @@ func (self *_parser) parseVariableDeclarationList(var_ file.Idx) []ast.Expressio var list []ast.Expression for { - list = append(list, self.parseVariableDeclaration(&declarationList)) + comments := self.findComments(false) + + decl := self.parseVariableDeclaration(&declarationList) + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(decl, comments, ast.LEADING) + self.commentMap.AddComments(decl, self.findComments(false), ast.TRAILING) + } + + list = append(list, decl) if self.token != token.COMMA { break } self.next() + } self.scope.declare(&ast.VariableDeclaration{ @@ -211,10 +224,13 @@ func (self *_parser) parseVariableDeclarationList(var_ file.Idx) []ast.Expressio return list } -func (self *_parser) parseObjectPropertyKey() (string, string) { +func (self *_parser) parseObjectPropertyKey() (string, string, []*ast.Comment) { idx, tkn, literal := self.idx, self.token, self.literal value := "" self.next() + + comments := self.findComments(false) + switch tkn { case token.IDENTIFIER: value = literal @@ -238,15 +254,14 @@ func (self *_parser) parseObjectPropertyKey() (string, string) { value = literal } } - return literal, value + return literal, value, comments } func (self *_parser) parseObjectProperty() ast.Property { - - literal, value := self.parseObjectPropertyKey() + literal, value, comments := self.parseObjectPropertyKey() if literal == "get" && self.token != token.COLON { idx := self.idx - _, value := self.parseObjectPropertyKey() + _, value, _ := self.parseObjectPropertyKey() parameterList := self.parseFunctionParameterList() node := &ast.FunctionLiteral{ @@ -261,7 +276,7 @@ func (self *_parser) parseObjectProperty() ast.Property { } } else if literal == "set" && self.token != token.COLON { idx := self.idx - _, value := self.parseObjectPropertyKey() + _, value, _ := self.parseObjectPropertyKey() parameterList := self.parseFunctionParameterList() node := &ast.FunctionLiteral{ @@ -277,63 +292,128 @@ func (self *_parser) parseObjectProperty() ast.Property { } self.expect(token.COLON) + comments2 := self.findComments(false) - return ast.Property{ + exp := ast.Property{ Key: value, Kind: "value", Value: self.parseAssignmentExpression(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp.Value, comments, ast.KEY) + self.commentMap.AddComments(exp.Value, comments2, ast.COLON) + } + return exp } func (self *_parser) parseObjectLiteral() ast.Expression { var value []ast.Property idx0 := self.expect(token.LEFT_BRACE) + + var comments2 []*ast.Comment for self.token != token.RIGHT_BRACE && self.token != token.EOF { + + // Leading comments for object literal + comments := self.findComments(false) property := self.parseObjectProperty() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(property.Value, comments, ast.LEADING) + self.commentMap.AddComments(property.Value, comments2, ast.LEADING) + } value = append(value, property) if self.token == token.COMMA { self.next() + + // Find leading comments after trailing comma + comments2 = self.findComments(false) continue } } idx1 := self.expect(token.RIGHT_BRACE) - return &ast.ObjectLiteral{ + exp := &ast.ObjectLiteral{ LeftBrace: idx0, RightBrace: idx1, Value: value, } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp, comments2, ast.FINAL) + } + self.consumeComments(exp, ast.FINAL) + + return exp } func (self *_parser) parseArrayLiteral() ast.Expression { - idx0 := self.expect(token.LEFT_BRACKET) + var comments2 []*ast.Comment + var comments []*ast.Comment var value []ast.Expression for self.token != token.RIGHT_BRACKET && self.token != token.EOF { + // Find leading comments for both empty and non-empty expressions + comments = self.findComments(false) + if self.token == token.COMMA { self.next() - value = append(value, nil) + + // This kind of comment requires a special empty expression node. + empty := &ast.EmptyExpression{self.idx, self.idx} + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(empty, comments, ast.LEADING) + self.commentMap.AddComments(empty, comments2, ast.LEADING) + } + + value = append(value, empty) + + // This comment belongs to the following expression, or trailing + comments2 = self.findComments(false) + continue } - value = append(value, self.parseAssignmentExpression()) + + exp := self.parseAssignmentExpression() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp, comments, ast.LEADING) + self.commentMap.AddComments(exp, comments2, ast.LEADING) + } + + value = append(value, exp) if self.token != token.RIGHT_BRACKET { self.expect(token.COMMA) } + + // This comment belongs to the following expression, or trailing + comments2 = self.findComments(false) } idx1 := self.expect(token.RIGHT_BRACKET) - return &ast.ArrayLiteral{ + array := &ast.ArrayLiteral{ LeftBracket: idx0, RightBracket: idx1, Value: value, } + + // This is where comments after a possible trailing comma are added + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(array, comments2, ast.FINAL) + } + + return array } func (self *_parser) parseArgumentList() (argumentList []ast.Expression, idx0, idx1 file.Idx) { idx0 = self.expect(token.LEFT_PARENTHESIS) if self.token != token.RIGHT_PARENTHESIS { for { - argumentList = append(argumentList, self.parseAssignmentExpression()) + comments := self.findComments(false) + exp := self.parseAssignmentExpression() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp, comments, ast.LEADING) + } + argumentList = append(argumentList, exp) if self.token != token.COMMA { break } @@ -346,12 +426,17 @@ func (self *_parser) parseArgumentList() (argumentList []ast.Expression, idx0, i func (self *_parser) parseCallExpression(left ast.Expression) ast.Expression { argumentList, idx0, idx1 := self.parseArgumentList() - return &ast.CallExpression{ + exp := &ast.CallExpression{ Callee: left, LeftParenthesis: idx0, ArgumentList: argumentList, RightParenthesis: idx1, } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp, self.findComments(false), ast.TRAILING) + } + return exp } func (self *_parser) parseDotMember(left ast.Expression) ast.Expression { @@ -402,6 +487,11 @@ func (self *_parser) parseNewExpression() ast.Expression { node.LeftParenthesis = idx0 node.RightParenthesis = idx1 } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, self.findComments(false), ast.TRAILING) + } + return node } @@ -414,6 +504,10 @@ func (self *_parser) parseLeftHandSideExpression() ast.Expression { left = self.parsePrimaryExpression() } + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left, self.findComments(false), ast.TRAILING) + } + for { if self.token == token.PERIOD { left = self.parseDotMember(left) @@ -442,6 +536,10 @@ func (self *_parser) parseLeftHandSideExpressionAllowCall() ast.Expression { left = self.parsePrimaryExpression() } + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left, self.findComments(false), ast.TRAILING) + } + for { if self.token == token.PERIOD { left = self.parseDotMember(left) @@ -476,12 +574,18 @@ func (self *_parser) parsePostfixExpression() ast.Expression { self.nextStatement() return &ast.BadExpression{From: idx, To: self.idx} } - return &ast.UnaryExpression{ + exp := &ast.UnaryExpression{ Operator: tkn, Idx: idx, Operand: operand, Postfix: true, } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp, self.findComments(false), ast.TRAILING) + } + + return exp } return operand @@ -496,16 +600,30 @@ func (self *_parser) parseUnaryExpression() ast.Expression { tkn := self.token idx := self.idx self.next() - return &ast.UnaryExpression{ + + comments := self.findComments(false) + + exp := &ast.UnaryExpression{ Operator: tkn, Idx: idx, Operand: self.parseUnaryExpression(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp.Operand, comments, ast.LEADING) + } + return exp case token.INCREMENT, token.DECREMENT: tkn := self.token idx := self.idx self.next() + + comments := self.findComments(false) + operand := self.parseUnaryExpression() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(operand, comments, ast.LEADING) + } switch operand.(type) { case *ast.Identifier, *ast.DotExpression, *ast.BracketExpression: default: @@ -531,11 +649,18 @@ func (self *_parser) parseMultiplicativeExpression() ast.Expression { self.token == token.REMAINDER { tkn := self.token self.next() + + comments := self.findComments(false) + left = &ast.BinaryExpression{ Operator: tkn, Left: left, Right: next(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left.(*ast.BinaryExpression).Right, comments, ast.LEADING) + } } return left @@ -548,11 +673,18 @@ func (self *_parser) parseAdditiveExpression() ast.Expression { for self.token == token.PLUS || self.token == token.MINUS { tkn := self.token self.next() + + comments := self.findComments(false) + left = &ast.BinaryExpression{ Operator: tkn, Left: left, Right: next(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left.(*ast.BinaryExpression).Right, comments, ast.LEADING) + } } return left @@ -566,11 +698,18 @@ func (self *_parser) parseShiftExpression() ast.Expression { self.token == token.UNSIGNED_SHIFT_RIGHT { tkn := self.token self.next() + + comments := self.findComments(false) + left = &ast.BinaryExpression{ Operator: tkn, Left: left, Right: next(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left.(*ast.BinaryExpression).Right, comments, ast.LEADING) + } } return left @@ -590,31 +729,55 @@ func (self *_parser) parseRelationalExpression() ast.Expression { case token.LESS, token.LESS_OR_EQUAL, token.GREATER, token.GREATER_OR_EQUAL: tkn := self.token self.next() - return &ast.BinaryExpression{ + + comments := self.findComments(false) + + exp := &ast.BinaryExpression{ Operator: tkn, Left: left, Right: self.parseRelationalExpression(), Comparison: true, } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp.Right, comments, ast.LEADING) + } + return exp case token.INSTANCEOF: tkn := self.token self.next() - return &ast.BinaryExpression{ + + comments := self.findComments(false) + + exp := &ast.BinaryExpression{ Operator: tkn, Left: left, Right: self.parseRelationalExpression(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp.Right, comments, ast.LEADING) + } + return exp case token.IN: if !allowIn { return left } tkn := self.token self.next() - return &ast.BinaryExpression{ + + comments := self.findComments(false) + + exp := &ast.BinaryExpression{ Operator: tkn, Left: left, Right: self.parseRelationalExpression(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp.Right, comments, ast.LEADING) + } + return exp } return left @@ -628,12 +791,19 @@ func (self *_parser) parseEqualityExpression() ast.Expression { self.token == token.STRICT_EQUAL || self.token == token.STRICT_NOT_EQUAL { tkn := self.token self.next() + + comments := self.findComments(false) + left = &ast.BinaryExpression{ Operator: tkn, Left: left, Right: next(), Comparison: true, } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left.(*ast.BinaryExpression).Right, comments, ast.LEADING) + } } return left @@ -646,11 +816,18 @@ func (self *_parser) parseBitwiseAndExpression() ast.Expression { for self.token == token.AND { tkn := self.token self.next() + + comments := self.findComments(false) + left = &ast.BinaryExpression{ Operator: tkn, Left: left, Right: next(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left.(*ast.BinaryExpression).Right, comments, ast.LEADING) + } } return left @@ -663,11 +840,18 @@ func (self *_parser) parseBitwiseExclusiveOrExpression() ast.Expression { for self.token == token.EXCLUSIVE_OR { tkn := self.token self.next() + + comments := self.findComments(false) + left = &ast.BinaryExpression{ Operator: tkn, Left: left, Right: next(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left.(*ast.BinaryExpression).Right, comments, ast.LEADING) + } } return left @@ -680,11 +864,18 @@ func (self *_parser) parseBitwiseOrExpression() ast.Expression { for self.token == token.OR { tkn := self.token self.next() + + comments := self.findComments(false) + left = &ast.BinaryExpression{ Operator: tkn, Left: left, Right: next(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left.(*ast.BinaryExpression).Right, comments, ast.LEADING) + } } return left @@ -697,11 +888,18 @@ func (self *_parser) parseLogicalAndExpression() ast.Expression { for self.token == token.LOGICAL_AND { tkn := self.token self.next() + + comments := self.findComments(false) + left = &ast.BinaryExpression{ Operator: tkn, Left: left, Right: next(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left.(*ast.BinaryExpression).Right, comments, ast.LEADING) + } } return left @@ -714,11 +912,18 @@ func (self *_parser) parseLogicalOrExpression() ast.Expression { for self.token == token.LOGICAL_OR { tkn := self.token self.next() + + comments := self.findComments(false) + left = &ast.BinaryExpression{ Operator: tkn, Left: left, Right: next(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left.(*ast.BinaryExpression).Right, comments, ast.LEADING) + } } return left @@ -729,13 +934,29 @@ func (self *_parser) parseConditionlExpression() ast.Expression { if self.token == token.QUESTION_MARK { self.next() + + // Comments before the consequence + comments1 := self.findComments(false) + consequent := self.parseAssignmentExpression() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(consequent, comments1, ast.LEADING) + } + self.expect(token.COLON) - return &ast.ConditionalExpression{ + + // Comments before the alternate + comments2 := self.findComments(false) + exp := &ast.ConditionalExpression{ Test: left, Consequent: consequent, Alternate: self.parseAssignmentExpression(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp.Alternate, comments2, ast.LEADING) + } + return exp } return left @@ -783,17 +1004,30 @@ func (self *_parser) parseAssignmentExpression() ast.Expression { self.nextStatement() return &ast.BadExpression{From: idx, To: self.idx} } - return &ast.AssignExpression{ + + comments := self.findComments(false) + + exp := &ast.AssignExpression{ Left: left, Operator: operator, Right: self.parseAssignmentExpression(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp.Right, comments, ast.LEADING) + } + + return exp } return left } func (self *_parser) parseExpression() ast.Expression { + + comments := self.findComments(false) + statementComments := self.fetchComments() + next := self.parseAssignmentExpression left := next() @@ -811,5 +1045,10 @@ func (self *_parser) parseExpression() ast.Expression { } } + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left, comments, ast.LEADING) + self.commentMap.AddComments(left, statementComments, ast.LEADING) + } + return left } diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/lexer.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/lexer.go index bc3e74f77..a510c76d2 100644 --- a/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/lexer.go +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/lexer.go @@ -120,6 +120,7 @@ func isLineTerminator(chr rune) bool { func (self *_parser) scan() (tkn token.Token, literal string, idx file.Idx) { self.implicitSemicolon = false + self.skippedLineBreak = false for { self.skipWhiteSpace() @@ -238,9 +239,20 @@ func (self *_parser) scan() (tkn token.Token, literal string, idx file.Idx) { tkn = self.switch2(token.MULTIPLY, token.MULTIPLY_ASSIGN) case '/': if self.chr == '/' { + if self.mode&StoreComments != 0 { + runes := self.readSingleLineComment() + literal = string(runes) + tkn = token.COMMENT + return + } self.skipSingleLineComment() continue } else if self.chr == '*' { + if self.mode&StoreComments != 0 { + literal = string(self.readMultiLineComment()) + tkn = token.COMMENT + return + } self.skipMultiLineComment() continue } else { @@ -411,6 +423,39 @@ func (self *_RegExp_parser) read() { } } +func (self *_parser) readSingleLineComment() (result []rune) { + for self.chr != -1 { + self.read() + if isLineTerminator(self.chr) { + return + } + result = append(result, self.chr) + } + + // Get rid of the trailing -1 + result = result[:len(result)-1] + + return +} + +func (self *_parser) readMultiLineComment() (result []rune) { + self.read() + for self.chr >= 0 { + chr := self.chr + self.read() + if chr == '*' && self.chr == '/' { + self.read() + return + } + + result = append(result, chr) + } + + self.errorUnexpected(0, self.chr) + + return +} + func (self *_parser) skipSingleLineComment() { for self.chr != -1 { self.read() @@ -442,6 +487,7 @@ func (self *_parser) skipWhiteSpace() { continue case '\r': if self._peek() == '\n' { + self.skippedLineBreak = true self.read() } fallthrough @@ -449,6 +495,7 @@ func (self *_parser) skipWhiteSpace() { if self.insertSemicolon { return } + self.skippedLineBreak = true self.read() continue } diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/parser.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/parser.go index 92ac5b0c7..18328edd6 100644 --- a/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/parser.go +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/parser.go @@ -49,12 +49,13 @@ type Mode uint const ( IgnoreRegExpErrors Mode = 1 << iota // Ignore RegExp compatibility errors (allow backtracking) + StoreComments // Store the comments from source to the comments map ) type _parser struct { - str string - length int - base int + str string + length int + base int chr rune // The current character chrOffset int // The offset of current character @@ -79,15 +80,22 @@ type _parser struct { mode Mode file *file.File + + comments []*ast.Comment + commentMap *ast.CommentMap + skippedLineBreak bool } func _newParser(filename, src string, base int) *_parser { return &_parser{ - chr: ' ', // This is set so we can start scanning by skipping whitespace - str: src, - length: len(src), - base: base, - file: file.NewFile(filename, src, base), + chr: ' ', // This is set so we can start scanning by skipping whitespace + str: src, + length: len(src), + base: base, + file: file.NewFile(filename, src, base), + comments: make([]*ast.Comment, 0), + commentMap: &ast.CommentMap{}, + skippedLineBreak: false, } } @@ -184,6 +192,9 @@ func (self *_parser) parse() (*ast.Program, error) { if false { self.errors.Sort() } + + self.addCommentStatements(program, ast.FINAL) + return program, self.errors.Err() } @@ -270,3 +281,63 @@ func (self *_parser) position(idx file.Idx) file.Position { return position } + +// findComments finds the following comments. +// Comments on the same line will be grouped together and returned. +// After the first line break, comments will be added as statement comments. +func (self *_parser) findComments(ignoreLineBreak bool) []*ast.Comment { + if self.mode&StoreComments == 0 { + return nil + } + comments := make([]*ast.Comment, 0) + + newline := false + + for self.implicitSemicolon == false || ignoreLineBreak { + if self.token != token.COMMENT { + break + } + + comment := &ast.Comment{ + Begin: self.idx, + Text: self.literal, + Position: ast.TBD, + } + + newline = self.skippedLineBreak || newline + + if newline && !ignoreLineBreak { + self.comments = append(self.comments, comment) + } else { + comments = append(comments, comment) + } + + self.next() + } + + return comments +} + +// addCommentStatements will add the previously parsed, not positioned comments to the provided node +func (self *_parser) addCommentStatements(node ast.Node, position ast.CommentPosition) { + if len(self.comments) > 0 { + self.commentMap.AddComments(node, self.comments, position) + + // Reset comments + self.comments = make([]*ast.Comment, 0) + } +} + +// fetchComments fetches the current comments, resets the slice and returns the comments +func (self *_parser) fetchComments() (comments []*ast.Comment) { + comments = self.comments + self.comments = nil + + return comments +} + +// consumeComments consumes the current comments and appends them to the provided node +func (self *_parser) consumeComments(node ast.Node, position ast.CommentPosition) { + self.commentMap.AddComments(node, self.comments, position) + self.comments = nil +} diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/statement.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/statement.go index 2059d3856..987ac02c1 100644 --- a/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/statement.go +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/parser/statement.go @@ -7,10 +7,24 @@ import ( func (self *_parser) parseBlockStatement() *ast.BlockStatement { node := &ast.BlockStatement{} + + // Find comments before the leading brace + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, self.findComments(false), ast.LEADING) + } + node.LeftBrace = self.expect(token.LEFT_BRACE) node.List = self.parseStatementList() + + self.consumeComments(node, ast.FINAL) + node.RightBrace = self.expect(token.RIGHT_BRACE) + // Find comments after the trailing brace + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, self.findComments(false), ast.TRAILING) + } + return node } @@ -21,7 +35,14 @@ func (self *_parser) parseEmptyStatement() ast.Statement { func (self *_parser) parseStatementList() (list []ast.Statement) { for self.token != token.RIGHT_BRACE && self.token != token.EOF { - list = append(list, self.parseStatement()) + if self.token == token.COMMENT { + self.parseCommentElement() + continue + } + statement := self.parseStatement() + list = append(list, statement) + + self.addCommentStatements(statement, ast.LEADING) } return @@ -77,6 +98,9 @@ func (self *_parser) parseStatement() ast.Statement { // LabelledStatement colon := self.idx self.next() // : + + comments := self.findComments(false) + label := identifier.Name for _, value := range self.scope.labels { if label == value { @@ -86,11 +110,17 @@ func (self *_parser) parseStatement() ast.Statement { self.scope.labels = append(self.scope.labels, label) // Push the label statement := self.parseStatement() self.scope.labels = self.scope.labels[:len(self.scope.labels)-1] // Pop the label - return &ast.LabelledStatement{ + exp := &ast.LabelledStatement{ Label: identifier, Colon: colon, Statement: statement, } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(exp, comments, ast.TRAILING) + } + + return exp } self.optionalSemicolon() @@ -107,16 +137,26 @@ func (self *_parser) parseTryStatement() ast.Statement { Body: self.parseBlockStatement(), } + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Body, self.findComments(true), ast.TRAILING) + } + if self.token == token.CATCH { catch := self.idx self.next() self.expect(token.LEFT_PARENTHESIS) + comments := self.findComments(true) if self.token != token.IDENTIFIER { self.expect(token.IDENTIFIER) self.nextStatement() return &ast.BadStatement{From: catch, To: self.idx} } else { identifier := self.parseIdentifier() + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(identifier, comments, ast.LEADING) + } + self.expect(token.RIGHT_PARENTHESIS) node.Catch = &ast.CatchStatement{ Catch: catch, @@ -124,11 +164,22 @@ func (self *_parser) parseTryStatement() ast.Statement { Body: self.parseBlockStatement(), } } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Catch, self.findComments(true), ast.TRAILING) + } } if self.token == token.FINALLY { self.next() + + comments := self.findComments(true) + node.Finally = self.parseBlockStatement() + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Finally, comments, ast.LEADING) + } } if node.Catch == nil && node.Finally == nil { @@ -143,10 +194,15 @@ func (self *_parser) parseFunctionParameterList() *ast.ParameterList { opening := self.expect(token.LEFT_PARENTHESIS) var list []*ast.Identifier for self.token != token.RIGHT_PARENTHESIS && self.token != token.EOF { + comments := self.findComments(true) if self.token != token.IDENTIFIER { self.expect(token.IDENTIFIER) } else { - list = append(list, self.parseIdentifier()) + identifier := self.parseIdentifier() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(identifier, comments, ast.LEADING) + } + list = append(list, identifier) } if self.token != token.RIGHT_PARENTHESIS { self.expect(token.COMMA) @@ -218,12 +274,24 @@ func (self *_parser) parseFunctionBlock(node *ast.FunctionLiteral) { func (self *_parser) parseDebuggerStatement() ast.Statement { idx := self.expect(token.DEBUGGER) + comments := self.findComments(true) + node := &ast.DebuggerStatement{ Debugger: idx, } + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, comments, ast.TRAILING) + } + self.semicolon() + if !self.skippedLineBreak { + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, self.findComments(false), ast.TRAILING) + } + } + return node } @@ -309,30 +377,77 @@ func (self *_parser) parseSwitchStatement() ast.Statement { func (self *_parser) parseWithStatement() ast.Statement { self.expect(token.WITH) + + // Find the comments after with + comments := self.findComments(true) + self.expect(token.LEFT_PARENTHESIS) + node := &ast.WithStatement{ Object: self.parseExpression(), } self.expect(token.RIGHT_PARENTHESIS) + // Add the key comments + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, comments, ast.KEY) + } + + // Find the leading comments for the body + comments = self.findComments(true) + node.Body = self.parseStatement() + // Add the body comments + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Body, comments, ast.LEADING) + } + + // Move the trailing comments to the with statement + self.commentMap.MoveComments(node.Body, node, ast.TRAILING) + return node } func (self *_parser) parseCaseStatement() *ast.CaseStatement { + var comments []*ast.Comment + node := &ast.CaseStatement{ Case: self.idx, } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, self.findComments(true), ast.LEADING) + } + + // Consume current comments + self.consumeComments(node, ast.LEADING) + if self.token == token.DEFAULT { self.next() } else { self.expect(token.CASE) + + comments = self.findComments(true) + node.Test = self.parseExpression() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Test, comments, ast.LEADING) + } + + comments = self.findComments(true) + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Test, comments, ast.TRAILING) + } } + self.expect(token.COLON) + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Test, self.findComments(false), ast.TRAILING) + } + for { if self.token == token.EOF || self.token == token.RIGHT_BRACE || @@ -340,8 +455,12 @@ func (self *_parser) parseCaseStatement() *ast.CaseStatement { self.token == token.DEFAULT { break } - node.Consequent = append(node.Consequent, self.parseStatement()) + consequent := self.parseStatement() + node.Consequent = append(node.Consequent, consequent) + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(consequent, self.findComments(false), ast.TRAILING) + } } return node @@ -360,44 +479,84 @@ func (self *_parser) parseForIn(into ast.Expression) *ast.ForInStatement { // Already have consumed "<into> in" + // Comments after the in, before the expression + comments := self.findComments(true) + source := self.parseExpression() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(source, comments, ast.LEADING) + } + self.expect(token.RIGHT_PARENTHESIS) - return &ast.ForInStatement{ + comments = self.findComments(true) + body := self.parseIterationStatement() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(body, comments, ast.LEADING) + } + + forin := &ast.ForInStatement{ Into: into, Source: source, - Body: self.parseIterationStatement(), + Body: body, } + + self.commentMap.MoveComments(body, forin, ast.TRAILING) + + return forin } func (self *_parser) parseFor(initializer ast.Expression) *ast.ForStatement { // Already have consumed "<initializer> ;" + comments := self.findComments(true) + var test, update ast.Expression if self.token != token.SEMICOLON { test = self.parseExpression() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(test, comments, ast.LEADING) + } } self.expect(token.SEMICOLON) + comments = self.findComments(true) + if self.token != token.RIGHT_PARENTHESIS { update = self.parseExpression() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(update, comments, ast.LEADING) + } } self.expect(token.RIGHT_PARENTHESIS) - return &ast.ForStatement{ + comments = self.findComments(true) + + body := self.parseIterationStatement() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(body, comments, ast.LEADING) + } + + forstatement := &ast.ForStatement{ Initializer: initializer, Test: test, Update: update, - Body: self.parseIterationStatement(), + Body: body, } + + self.commentMap.MoveComments(body, forstatement, ast.TRAILING) + + return forstatement } func (self *_parser) parseForOrForInStatement() ast.Statement { idx := self.expect(token.FOR) self.expect(token.LEFT_PARENTHESIS) + comments := self.findComments(true) + var left []ast.Expression forIn := false @@ -435,11 +594,19 @@ func (self *_parser) parseForOrForInStatement() ast.Statement { self.nextStatement() return &ast.BadStatement{From: idx, To: self.idx} } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(left[0], comments, ast.LEADING) + } return self.parseForIn(left[0]) } self.expect(token.SEMICOLON) - return self.parseFor(&ast.SequenceExpression{Sequence: left}) + initializer := &ast.SequenceExpression{Sequence: left} + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(initializer, comments, ast.LEADING) + } + return self.parseFor(initializer) } func (self *_parser) parseVariableStatement() *ast.VariableStatement { @@ -447,12 +614,27 @@ func (self *_parser) parseVariableStatement() *ast.VariableStatement { idx := self.expect(token.VAR) list := self.parseVariableDeclarationList(idx) - self.semicolon() - return &ast.VariableStatement{ + statement := &ast.VariableStatement{ Var: idx, List: list, } + + self.commentMap.MoveComments(statement.List[len(statement.List)-1], statement, ast.TRAILING) + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(statement, self.findComments(true), ast.TRAILING) + } + + self.semicolon() + + if self.skippedLineBreak { + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(statement, self.findComments(false), ast.TRAILING) + } + } + + return statement } func (self *_parser) parseDoWhileStatement() ast.Statement { @@ -463,7 +645,13 @@ func (self *_parser) parseDoWhileStatement() ast.Statement { }() self.expect(token.DO) + + comments := self.findComments(true) + node := &ast.DoWhileStatement{} + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, comments, ast.KEY) + } if self.token == token.LEFT_BRACE { node.Body = self.parseBlockStatement() } else { @@ -471,49 +659,123 @@ func (self *_parser) parseDoWhileStatement() ast.Statement { } self.expect(token.WHILE) + + comments = self.findComments(true) + self.expect(token.LEFT_PARENTHESIS) node.Test = self.parseExpression() + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Test, comments, ast.LEADING) + } + self.expect(token.RIGHT_PARENTHESIS) + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Test, self.findComments(false), ast.TRAILING) + } + return node } func (self *_parser) parseWhileStatement() ast.Statement { self.expect(token.WHILE) + + // Comments after while keyword + comments := self.findComments(true) + self.expect(token.LEFT_PARENTHESIS) node := &ast.WhileStatement{ Test: self.parseExpression(), } + + // Add the while comments + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, comments, ast.KEY) + } + self.expect(token.RIGHT_PARENTHESIS) + + // Finding comments prior to the body + comments = self.findComments(true) + node.Body = self.parseIterationStatement() + // Adding the comments prior to the body + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Body, comments, ast.LEADING) + } + + // Move the trailing comments to the while statement + self.commentMap.MoveComments(node.Body, node, ast.TRAILING) + return node } func (self *_parser) parseIfStatement() ast.Statement { self.expect(token.IF) + + comments := self.findComments(true) + self.expect(token.LEFT_PARENTHESIS) node := &ast.IfStatement{ Test: self.parseExpression(), } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node, comments, ast.KEY) + } + self.expect(token.RIGHT_PARENTHESIS) + comments = self.findComments(true) + if self.token == token.LEFT_BRACE { node.Consequent = self.parseBlockStatement() } else { node.Consequent = self.parseStatement() } + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Consequent, comments, ast.LEADING) + self.commentMap.AddComments(node.Consequent, self.findComments(true), ast.TRAILING) + } + if self.token == token.ELSE { self.next() + comments = self.findComments(true) + node.Alternate = self.parseStatement() + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(node.Alternate, comments, ast.LEADING) + self.commentMap.AddComments(node.Alternate, self.findComments(false), ast.TRAILING) + } } return node } func (self *_parser) parseSourceElement() ast.Statement { - return self.parseStatement() + + statementComment := self.fetchComments() + + statement := self.parseStatement() + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(statement, statementComment, ast.LEADING) + } + + return statement +} + +func (self *_parser) parseCommentElement() { + literal := self.literal + idx := self.expect(token.COMMENT) + self.comments = append(self.comments, &ast.Comment{ + Begin: idx, + Text: literal, + Position: ast.LEADING, + }) } func (self *_parser) parseSourceElements() []ast.Statement { @@ -524,10 +786,19 @@ func (self *_parser) parseSourceElements() []ast.Statement { break } + if self.token == token.COMMENT { + self.parseCommentElement() + continue + } + body = append(body, self.parseSourceElement()) } for self.token != token.EOF { + if self.token == token.COMMENT { + self.parseCommentElement() + continue + } body = append(body, self.parseSourceElement()) } @@ -546,6 +817,9 @@ func (self *_parser) parseProgram() *ast.Program { func (self *_parser) parseBreakStatement() ast.Statement { idx := self.expect(token.BREAK) + + breakComments := self.findComments(true) + semicolon := self.implicitSemicolon if self.token == token.SEMICOLON { semicolon = true @@ -557,10 +831,16 @@ func (self *_parser) parseBreakStatement() ast.Statement { if !self.scope.inIteration && !self.scope.inSwitch { goto illegal } - return &ast.BranchStatement{ + breakStatement := &ast.BranchStatement{ Idx: idx, Token: token.BREAK, } + + if self.mode&StoreComments != 0 { + self.commentMap.AddComments(breakStatement, breakComments, ast.TRAILING) + } + + return breakStatement } if self.token == token.IDENTIFIER { diff --git a/Godeps/_workspace/src/github.com/robertkrimen/otto/runtime.go b/Godeps/_workspace/src/github.com/robertkrimen/otto/runtime.go index 168cb1cde..a998f7acc 100644 --- a/Godeps/_workspace/src/github.com/robertkrimen/otto/runtime.go +++ b/Godeps/_workspace/src/github.com/robertkrimen/otto/runtime.go @@ -55,6 +55,7 @@ type _runtime struct { otto *Otto eval *_object // The builtin eval, for determine indirect versus direct invocation debugger func(*Otto) + random func() float64 labels []string // FIXME lck sync.Mutex diff --git a/crypto/ecies/ecies.go b/crypto/ecies/ecies.go index 65dc5b38b..ee4285617 100644 --- a/crypto/ecies/ecies.go +++ b/crypto/ecies/ecies.go @@ -192,11 +192,9 @@ func concatKDF(hash hash.Hash, z, s1 []byte, kdLen int) (k []byte, err error) { // messageTag computes the MAC of a message (called the tag) as per // SEC 1, 3.5. func messageTag(hash func() hash.Hash, km, msg, shared []byte) []byte { - if shared == nil { - shared = make([]byte, 0) - } mac := hmac.New(hash, km) mac.Write(msg) + mac.Write(shared) tag := mac.Sum(nil) return tag } @@ -243,9 +241,11 @@ func symDecrypt(rand io.Reader, params *ECIESParams, key, ct []byte) (m []byte, return } -// Encrypt encrypts a message using ECIES as specified in SEC 1, 5.1. If -// the shared information parameters aren't being used, they should be -// nil. +// Encrypt encrypts a message using ECIES as specified in SEC 1, 5.1. +// +// s1 and s2 contain shared information that is not part of the resulting +// ciphertext. s1 is fed into key derivation, s2 is fed into the MAC. If the +// shared information parameters aren't being used, they should be nil. func Encrypt(rand io.Reader, pub *PublicKey, m, s1, s2 []byte) (ct []byte, err error) { params := pub.Params if params == nil { diff --git a/crypto/ecies/ecies_test.go b/crypto/ecies/ecies_test.go index 6a0ea3f02..cb09061ce 100644 --- a/crypto/ecies/ecies_test.go +++ b/crypto/ecies/ecies_test.go @@ -408,6 +408,36 @@ func TestEncryptDecrypt(t *testing.T) { } } +func TestDecryptShared2(t *testing.T) { + prv, err := GenerateKey(rand.Reader, DefaultCurve, nil) + if err != nil { + t.Fatal(err) + } + message := []byte("Hello, world.") + shared2 := []byte("shared data 2") + ct, err := Encrypt(rand.Reader, &prv.PublicKey, message, nil, shared2) + if err != nil { + t.Fatal(err) + } + + // Check that decrypting with correct shared data works. + pt, err := prv.Decrypt(rand.Reader, ct, nil, shared2) + if err != nil { + t.Fatal(err) + } + if !bytes.Equal(pt, message) { + t.Fatal("ecies: plaintext doesn't match message") + } + + // Decrypting without shared data or incorrect shared data fails. + if _, err = prv.Decrypt(rand.Reader, ct, nil, nil); err == nil { + t.Fatal("ecies: decrypting without shared data didn't fail") + } + if _, err = prv.Decrypt(rand.Reader, ct, nil, []byte("garbage")); err == nil { + t.Fatal("ecies: decrypting with incorrect shared data didn't fail") + } +} + // TestMarshalEncryption validates the encode/decode produces a valid // ECIES encryption key. func TestMarshalEncryption(t *testing.T) { diff --git a/jsre/jsre.go b/jsre/jsre.go index a4c9d970b..f4464910d 100644 --- a/jsre/jsre.go +++ b/jsre/jsre.go @@ -18,8 +18,11 @@ package jsre import ( + crand "crypto/rand" + "encoding/binary" "fmt" "io/ioutil" + "math/rand" "sync" "time" @@ -70,6 +73,18 @@ func New(assetPath string) *JSRE { return re } +// randomSource returns a pseudo random value generator. +func randomSource() *rand.Rand { + bytes := make([]byte, 8) + seed := time.Now().UnixNano() + if _, err := crand.Read(bytes); err == nil { + seed = int64(binary.LittleEndian.Uint64(bytes)) + } + + src := rand.NewSource(seed) + return rand.New(src) +} + // This function runs the main event loop from a goroutine that is started // when JSRE is created. Use Stop() before exiting to properly stop it. // The event loop processes vm access requests from the evalQueue in a @@ -81,6 +96,9 @@ func New(assetPath string) *JSRE { // called from JS through an RPC call. func (self *JSRE) runEventLoop() { vm := otto.New() + r := randomSource() + vm.SetRandomSource(r.Float64) + registry := map[*jsTimer]*jsTimer{} ready := make(chan *jsTimer) diff --git a/p2p/discover/udp.go b/p2p/discover/udp.go index cec9046a3..81674f552 100644 --- a/p2p/discover/udp.go +++ b/p2p/discover/udp.go @@ -67,6 +67,8 @@ type ( Version uint From, To rpcEndpoint Expiration uint64 + // Ignore additional fields (for forward compatibility). + Rest []rlp.RawValue `rlp:"tail"` } // pong is the reply to ping. @@ -78,18 +80,24 @@ type ( ReplyTok []byte // This contains the hash of the ping packet. Expiration uint64 // Absolute timestamp at which the packet becomes invalid. + // Ignore additional fields (for forward compatibility). + Rest []rlp.RawValue `rlp:"tail"` } // findnode is a query for nodes close to the given target. findnode struct { Target NodeID // doesn't need to be an actual public key Expiration uint64 + // Ignore additional fields (for forward compatibility). + Rest []rlp.RawValue `rlp:"tail"` } // reply to findnode neighbors struct { Nodes []rpcNode Expiration uint64 + // Ignore additional fields (for forward compatibility). + Rest []rlp.RawValue `rlp:"tail"` } rpcNode struct { @@ -522,7 +530,8 @@ func decodePacket(buf []byte) (packet, NodeID, []byte, error) { default: return nil, fromID, hash, fmt.Errorf("unknown type: %d", ptype) } - err = rlp.DecodeBytes(sigdata[1:], req) + s := rlp.NewStream(bytes.NewReader(sigdata[1:]), 0) + err = s.Decode(req) return req, fromID, hash, err } diff --git a/p2p/discover/udp_test.go b/p2p/discover/udp_test.go index ec28867cc..66fc4cf2c 100644 --- a/p2p/discover/udp_test.go +++ b/p2p/discover/udp_test.go @@ -20,6 +20,7 @@ import ( "bytes" "crypto/ecdsa" "encoding/binary" + "encoding/hex" "errors" "fmt" "io" @@ -33,7 +34,9 @@ import ( "time" "github.com/davecgh/go-spew/spew" + "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/crypto" + "github.com/ethereum/go-ethereum/rlp" ) func init() { @@ -434,6 +437,115 @@ func TestUDP_successfulPing(t *testing.T) { } } +var testPackets = []struct { + input string + wantPacket interface{} +}{ + { + input: "71dbda3a79554728d4f94411e42ee1f8b0d561c10e1e5f5893367948c6a7d70bb87b235fa28a77070271b6c164a2dce8c7e13a5739b53b5e96f2e5acb0e458a02902f5965d55ecbeb2ebb6cabb8b2b232896a36b737666c55265ad0a68412f250001ea04cb847f000001820cfa8215a8d790000000000000000000000000000000018208ae820d058443b9a355", + wantPacket: &ping{ + Version: 4, + From: rpcEndpoint{net.ParseIP("127.0.0.1").To4(), 3322, 5544}, + To: rpcEndpoint{net.ParseIP("::1"), 2222, 3333}, + Expiration: 1136239445, + Rest: []rlp.RawValue{}, + }, + }, + { + input: "e9614ccfd9fc3e74360018522d30e1419a143407ffcce748de3e22116b7e8dc92ff74788c0b6663aaa3d67d641936511c8f8d6ad8698b820a7cf9e1be7155e9a241f556658c55428ec0563514365799a4be2be5a685a80971ddcfa80cb422cdd0101ec04cb847f000001820cfa8215a8d790000000000000000000000000000000018208ae820d058443b9a3550102", + wantPacket: &ping{ + Version: 4, + From: rpcEndpoint{net.ParseIP("127.0.0.1").To4(), 3322, 5544}, + To: rpcEndpoint{net.ParseIP("::1"), 2222, 3333}, + Expiration: 1136239445, + Rest: []rlp.RawValue{{0x01}, {0x02}}, + }, + }, + { + input: "577be4349c4dd26768081f58de4c6f375a7a22f3f7adda654d1428637412c3d7fe917cadc56d4e5e7ffae1dbe3efffb9849feb71b262de37977e7c7a44e677295680e9e38ab26bee2fcbae207fba3ff3d74069a50b902a82c9903ed37cc993c50001f83e82022bd79020010db83c4d001500000000abcdef12820cfa8215a8d79020010db885a308d313198a2e037073488208ae82823a8443b9a355c5010203040531b9019afde696e582a78fa8d95ea13ce3297d4afb8ba6433e4154caa5ac6431af1b80ba76023fa4090c408f6b4bc3701562c031041d4702971d102c9ab7fa5eed4cd6bab8f7af956f7d565ee1917084a95398b6a21eac920fe3dd1345ec0a7ef39367ee69ddf092cbfe5b93e5e568ebc491983c09c76d922dc3", + wantPacket: &ping{ + Version: 555, + From: rpcEndpoint{net.ParseIP("2001:db8:3c4d:15::abcd:ef12"), 3322, 5544}, + To: rpcEndpoint{net.ParseIP("2001:db8:85a3:8d3:1319:8a2e:370:7348"), 2222, 33338}, + Expiration: 1136239445, + Rest: []rlp.RawValue{{0xC5, 0x01, 0x02, 0x03, 0x04, 0x05}}, + }, + }, + { + input: "09b2428d83348d27cdf7064ad9024f526cebc19e4958f0fdad87c15eb598dd61d08423e0bf66b2069869e1724125f820d851c136684082774f870e614d95a2855d000f05d1648b2d5945470bc187c2d2216fbe870f43ed0909009882e176a46b0102f846d79020010db885a308d313198a2e037073488208ae82823aa0fbc914b16819237dcd8801d7e53f69e9719adecb3cc0e790c57e91ca4461c9548443b9a355c6010203c2040506a0c969a58f6f9095004c0177a6b47f451530cab38966a25cca5cb58f055542124e", + wantPacket: &pong{ + To: rpcEndpoint{net.ParseIP("2001:db8:85a3:8d3:1319:8a2e:370:7348"), 2222, 33338}, + ReplyTok: common.Hex2Bytes("fbc914b16819237dcd8801d7e53f69e9719adecb3cc0e790c57e91ca4461c954"), + Expiration: 1136239445, + Rest: []rlp.RawValue{{0xC6, 0x01, 0x02, 0x03, 0xC2, 0x04, 0x05}, {0x06}}, + }, + }, + { + input: "c7c44041b9f7c7e41934417ebac9a8e1a4c6298f74553f2fcfdcae6ed6fe53163eb3d2b52e39fe91831b8a927bf4fc222c3902202027e5e9eb812195f95d20061ef5cd31d502e47ecb61183f74a504fe04c51e73df81f25c4d506b26db4517490103f84eb840ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd31387574077f301b421bc84df7266c44e9e6d569fc56be00812904767bf5ccd1fc7f8443b9a35582999983999999280dc62cc8255c73471e0a61da0c89acdc0e035e260add7fc0c04ad9ebf3919644c91cb247affc82b69bd2ca235c71eab8e49737c937a2c396", + wantPacket: &findnode{ + Target: MustHexID("ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd31387574077f301b421bc84df7266c44e9e6d569fc56be00812904767bf5ccd1fc7f"), + Expiration: 1136239445, + Rest: []rlp.RawValue{{0x82, 0x99, 0x99}, {0x83, 0x99, 0x99, 0x99}}, + }, + }, + { + input: "c679fc8fe0b8b12f06577f2e802d34f6fa257e6137a995f6f4cbfc9ee50ed3710faf6e66f932c4c8d81d64343f429651328758b47d3dbc02c4042f0fff6946a50f4a49037a72bb550f3a7872363a83e1b9ee6469856c24eb4ef80b7535bcf99c0004f9015bf90150f84d846321163782115c82115db8403155e1427f85f10a5c9a7755877748041af1bcd8d474ec065eb33df57a97babf54bfd2103575fa829115d224c523596b401065a97f74010610fce76382c0bf32f84984010203040101b840312c55512422cf9b8a4097e9a6ad79402e87a15ae909a4bfefa22398f03d20951933beea1e4dfa6f968212385e829f04c2d314fc2d4e255e0d3bc08792b069dbf8599020010db83c4d001500000000abcdef12820d05820d05b84038643200b172dcfef857492156971f0e6aa2c538d8b74010f8e140811d53b98c765dd2d96126051913f44582e8c199ad7c6d6819e9a56483f637feaac9448aacf8599020010db885a308d313198a2e037073488203e78203e8b8408dcab8618c3253b558d459da53bd8fa68935a719aff8b811197101a4b2b47dd2d47295286fc00cc081bb542d760717d1bdd6bec2c37cd72eca367d6dd3b9df738443b9a355010203b525a138aa34383fec3d2719a0", + wantPacket: &neighbors{ + Nodes: []rpcNode{ + { + ID: MustHexID("3155e1427f85f10a5c9a7755877748041af1bcd8d474ec065eb33df57a97babf54bfd2103575fa829115d224c523596b401065a97f74010610fce76382c0bf32"), + IP: net.ParseIP("99.33.22.55").To4(), + UDP: 4444, + TCP: 4445, + }, + { + ID: MustHexID("312c55512422cf9b8a4097e9a6ad79402e87a15ae909a4bfefa22398f03d20951933beea1e4dfa6f968212385e829f04c2d314fc2d4e255e0d3bc08792b069db"), + IP: net.ParseIP("1.2.3.4").To4(), + UDP: 1, + TCP: 1, + }, + { + ID: MustHexID("38643200b172dcfef857492156971f0e6aa2c538d8b74010f8e140811d53b98c765dd2d96126051913f44582e8c199ad7c6d6819e9a56483f637feaac9448aac"), + IP: net.ParseIP("2001:db8:3c4d:15::abcd:ef12"), + UDP: 3333, + TCP: 3333, + }, + { + ID: MustHexID("8dcab8618c3253b558d459da53bd8fa68935a719aff8b811197101a4b2b47dd2d47295286fc00cc081bb542d760717d1bdd6bec2c37cd72eca367d6dd3b9df73"), + IP: net.ParseIP("2001:db8:85a3:8d3:1319:8a2e:370:7348"), + UDP: 999, + TCP: 1000, + }, + }, + Expiration: 1136239445, + Rest: []rlp.RawValue{{0x01}, {0x02}, {0x03}}, + }, + }, +} + +func TestForwardCompatibility(t *testing.T) { + testkey, _ := crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") + wantNodeID := PubkeyID(&testkey.PublicKey) + + for _, test := range testPackets { + input, err := hex.DecodeString(test.input) + if err != nil { + t.Fatalf("invalid hex: %s", test.input) + } + packet, nodeid, _, err := decodePacket(input) + if err != nil { + t.Errorf("did not accept packet %s\n%v", test.input, err) + continue + } + if !reflect.DeepEqual(packet, test.wantPacket) { + t.Errorf("got %s\nwant %s", spew.Sdump(packet), spew.Sdump(test.wantPacket)) + } + if nodeid != wantNodeID { + t.Errorf("got id %v\nwant id %v", nodeid, wantNodeID) + } + } +} + // dgramPipe is a fake UDP socket. It queues all sent datagrams. type dgramPipe struct { mu *sync.Mutex diff --git a/p2p/message_test.go b/p2p/message_test.go index 8599b7e87..013214e21 100644 --- a/p2p/message_test.go +++ b/p2p/message_test.go @@ -143,7 +143,8 @@ func TestEOFSignal(t *testing.T) { } func unhex(str string) []byte { - b, err := hex.DecodeString(strings.Replace(str, "\n", "", -1)) + r := strings.NewReplacer("\t", "", " ", "", "\n", "") + b, err := hex.DecodeString(r.Replace(str)) if err != nil { panic(fmt.Sprintf("invalid hex string: %q", str)) } diff --git a/p2p/peer.go b/p2p/peer.go index 72ed4069c..b9d6c099d 100644 --- a/p2p/peer.go +++ b/p2p/peer.go @@ -56,6 +56,9 @@ type protoHandshake struct { Caps []Cap ListenPort uint64 ID discover.NodeID + + // Ignore additional fields (for forward compatibility). + Rest []rlp.RawValue `rlp:"tail"` } // Peer represents a connected remote node. diff --git a/p2p/rlpx.go b/p2p/rlpx.go index 8f429d6ec..9d6cba5b6 100644 --- a/p2p/rlpx.go +++ b/p2p/rlpx.go @@ -24,11 +24,14 @@ import ( "crypto/elliptic" "crypto/hmac" "crypto/rand" + "encoding/binary" "errors" "fmt" "hash" "io" + mrand "math/rand" "net" + "os" "sync" "time" @@ -51,9 +54,10 @@ const ( authMsgLen = sigLen + shaLen + pubLen + shaLen + 1 authRespLen = pubLen + shaLen + 1 - eciesBytes = 65 + 16 + 32 - encAuthMsgLen = authMsgLen + eciesBytes // size of the final ECIES payload sent as initiator's handshake - encAuthRespLen = authRespLen + eciesBytes // size of the final ECIES payload sent as receiver's handshake + eciesOverhead = 65 /* pubkey */ + 16 /* IV */ + 32 /* MAC */ + + encAuthMsgLen = authMsgLen + eciesOverhead // size of encrypted pre-EIP-8 initiator handshake + encAuthRespLen = authRespLen + eciesOverhead // size of encrypted pre-EIP-8 handshake reply // total timeout for encryption handshake and protocol // handshake in both directions. @@ -151,10 +155,6 @@ func readProtocolHandshake(rw MsgReader, our *protoHandshake) (*protoHandshake, if err := msg.Decode(&hs); err != nil { return nil, err } - // validate handshake info - if hs.Version != our.Version { - return nil, DiscIncompatibleVersion - } if (hs.ID == discover.NodeID{}) { return nil, DiscInvalidIdentity } @@ -200,6 +200,29 @@ type secrets struct { Token []byte } +// RLPx v4 handshake auth (defined in EIP-8). +type authMsgV4 struct { + gotPlain bool // whether read packet had plain format. + + Signature [sigLen]byte + InitiatorPubkey [pubLen]byte + Nonce [shaLen]byte + Version uint + + // Ignore additional fields (forward-compatibility) + Rest []rlp.RawValue `rlp:"tail"` +} + +// RLPx v4 handshake response (defined in EIP-8). +type authRespV4 struct { + RandomPubkey [pubLen]byte + Nonce [shaLen]byte + Version uint + + // Ignore additional fields (forward-compatibility) + Rest []rlp.RawValue `rlp:"tail"` +} + // secrets is called after the handshake is completed. // It extracts the connection secrets from the handshake values. func (h *encHandshake) secrets(auth, authResp []byte) (secrets, error) { @@ -215,7 +238,6 @@ func (h *encHandshake) secrets(auth, authResp []byte) (secrets, error) { RemoteID: h.remoteID, AES: aesSecret, MAC: crypto.Sha3(ecdheSecret, aesSecret), - Token: crypto.Sha3(sharedSecret), } // setup sha3 instances for the MACs @@ -234,114 +256,89 @@ func (h *encHandshake) secrets(auth, authResp []byte) (secrets, error) { return s, nil } -func (h *encHandshake) ecdhShared(prv *ecdsa.PrivateKey) ([]byte, error) { +// staticSharedSecret returns the static shared secret, the result +// of key agreement between the local and remote static node key. +func (h *encHandshake) staticSharedSecret(prv *ecdsa.PrivateKey) ([]byte, error) { return ecies.ImportECDSA(prv).GenerateShared(h.remotePub, sskLen, sskLen) } +var configSendEIP = os.Getenv("RLPX_EIP8") != "" + // initiatorEncHandshake negotiates a session token on conn. // it should be called on the dialing side of the connection. // // prv is the local client's private key. -// token is the token from a previous session with this node. func initiatorEncHandshake(conn io.ReadWriter, prv *ecdsa.PrivateKey, remoteID discover.NodeID, token []byte) (s secrets, err error) { - h, err := newInitiatorHandshake(remoteID) + h := &encHandshake{initiator: true, remoteID: remoteID} + authMsg, err := h.makeAuthMsg(prv, token) if err != nil { return s, err } - auth, err := h.authMsg(prv, token) + var authPacket []byte + if configSendEIP { + authPacket, err = sealEIP8(authMsg, h) + } else { + authPacket, err = authMsg.sealPlain(h) + } if err != nil { return s, err } - if _, err = conn.Write(auth); err != nil { + if _, err = conn.Write(authPacket); err != nil { return s, err } - response := make([]byte, encAuthRespLen) - if _, err = io.ReadFull(conn, response); err != nil { + authRespMsg := new(authRespV4) + authRespPacket, err := readHandshakeMsg(authRespMsg, encAuthRespLen, prv, conn) + if err != nil { return s, err } - if err := h.decodeAuthResp(response, prv); err != nil { + if err := h.handleAuthResp(authRespMsg); err != nil { return s, err } - return h.secrets(auth, response) + return h.secrets(authPacket, authRespPacket) } -func newInitiatorHandshake(remoteID discover.NodeID) (*encHandshake, error) { - rpub, err := remoteID.Pubkey() +// makeAuthMsg creates the initiator handshake message. +func (h *encHandshake) makeAuthMsg(prv *ecdsa.PrivateKey, token []byte) (*authMsgV4, error) { + rpub, err := h.remoteID.Pubkey() if err != nil { return nil, fmt.Errorf("bad remoteID: %v", err) } - // generate random initiator nonce - n := make([]byte, shaLen) - if _, err := rand.Read(n); err != nil { + h.remotePub = ecies.ImportECDSAPublic(rpub) + // Generate random initiator nonce. + h.initNonce = make([]byte, shaLen) + if _, err := rand.Read(h.initNonce); err != nil { return nil, err } - // generate random keypair to use for signing - randpriv, err := ecies.GenerateKey(rand.Reader, secp256k1.S256(), nil) + // Generate random keypair to for ECDH. + h.randomPrivKey, err = ecies.GenerateKey(rand.Reader, secp256k1.S256(), nil) if err != nil { return nil, err } - h := &encHandshake{ - initiator: true, - remoteID: remoteID, - remotePub: ecies.ImportECDSAPublic(rpub), - initNonce: n, - randomPrivKey: randpriv, - } - return h, nil -} - -// authMsg creates an encrypted initiator handshake message. -func (h *encHandshake) authMsg(prv *ecdsa.PrivateKey, token []byte) ([]byte, error) { - var tokenFlag byte - if token == nil { - // no session token found means we need to generate shared secret. - // ecies shared secret is used as initial session token for new peers - // generate shared key from prv and remote pubkey - var err error - if token, err = h.ecdhShared(prv); err != nil { - return nil, err - } - } else { - // for known peers, we use stored token from the previous session - tokenFlag = 0x01 - } - // sign known message: - // ecdh-shared-secret^nonce for new peers - // token^nonce for old peers + // Sign known message: static-shared-secret ^ nonce + token, err = h.staticSharedSecret(prv) + if err != nil { + return nil, err + } signed := xor(token, h.initNonce) signature, err := crypto.Sign(signed, h.randomPrivKey.ExportECDSA()) if err != nil { return nil, err } - // encode auth message - // signature || sha3(ecdhe-random-pubk) || pubk || nonce || token-flag - msg := make([]byte, authMsgLen) - n := copy(msg, signature) - n += copy(msg[n:], crypto.Sha3(exportPubkey(&h.randomPrivKey.PublicKey))) - n += copy(msg[n:], crypto.FromECDSAPub(&prv.PublicKey)[1:]) - n += copy(msg[n:], h.initNonce) - msg[n] = tokenFlag - - // encrypt auth message using remote-pubk - return ecies.Encrypt(rand.Reader, h.remotePub, msg, nil, nil) + msg := new(authMsgV4) + copy(msg.Signature[:], signature) + copy(msg.InitiatorPubkey[:], crypto.FromECDSAPub(&prv.PublicKey)[1:]) + copy(msg.Nonce[:], h.initNonce) + msg.Version = 4 + return msg, nil } -// decodeAuthResp decode an encrypted authentication response message. -func (h *encHandshake) decodeAuthResp(auth []byte, prv *ecdsa.PrivateKey) error { - msg, err := crypto.Decrypt(prv, auth) - if err != nil { - return fmt.Errorf("could not decrypt auth response (%v)", err) - } - h.respNonce = msg[pubLen : pubLen+shaLen] - h.remoteRandomPub, err = importPublicKey(msg[:pubLen]) - if err != nil { - return err - } - // ignore token flag for now - return nil +func (h *encHandshake) handleAuthResp(msg *authRespV4) (err error) { + h.respNonce = msg.Nonce[:] + h.remoteRandomPub, err = importPublicKey(msg.RandomPubkey[:]) + return err } // receiverEncHandshake negotiates a session token on conn. @@ -350,99 +347,165 @@ func (h *encHandshake) decodeAuthResp(auth []byte, prv *ecdsa.PrivateKey) error // prv is the local client's private key. // token is the token from a previous session with this node. func receiverEncHandshake(conn io.ReadWriter, prv *ecdsa.PrivateKey, token []byte) (s secrets, err error) { - // read remote auth sent by initiator. - auth := make([]byte, encAuthMsgLen) - if _, err := io.ReadFull(conn, auth); err != nil { + authMsg := new(authMsgV4) + authPacket, err := readHandshakeMsg(authMsg, encAuthMsgLen, prv, conn) + if err != nil { return s, err } - h, err := decodeAuthMsg(prv, token, auth) - if err != nil { + h := new(encHandshake) + if err := h.handleAuthMsg(authMsg, prv); err != nil { return s, err } - // send auth response - resp, err := h.authResp(prv, token) + authRespMsg, err := h.makeAuthResp() if err != nil { return s, err } - if _, err = conn.Write(resp); err != nil { - return s, err + var authRespPacket []byte + if authMsg.gotPlain { + authRespPacket, err = authRespMsg.sealPlain(h) + } else { + authRespPacket, err = sealEIP8(authRespMsg, h) } - - return h.secrets(auth, resp) -} - -func decodeAuthMsg(prv *ecdsa.PrivateKey, token []byte, auth []byte) (*encHandshake, error) { - var err error - h := new(encHandshake) - // generate random keypair for session - h.randomPrivKey, err = ecies.GenerateKey(rand.Reader, secp256k1.S256(), nil) if err != nil { - return nil, err - } - // generate random nonce - h.respNonce = make([]byte, shaLen) - if _, err = rand.Read(h.respNonce); err != nil { - return nil, err + return s, err } - - msg, err := crypto.Decrypt(prv, auth) - if err != nil { - return nil, fmt.Errorf("could not decrypt auth message (%v)", err) + if _, err = conn.Write(authRespPacket); err != nil { + return s, err } + return h.secrets(authPacket, authRespPacket) +} - // decode message parameters - // signature || sha3(ecdhe-random-pubk) || pubk || nonce || token-flag - h.initNonce = msg[authMsgLen-shaLen-1 : authMsgLen-1] - copy(h.remoteID[:], msg[sigLen+shaLen:sigLen+shaLen+pubLen]) +func (h *encHandshake) handleAuthMsg(msg *authMsgV4, prv *ecdsa.PrivateKey) error { + // Import the remote identity. + h.initNonce = msg.Nonce[:] + h.remoteID = msg.InitiatorPubkey rpub, err := h.remoteID.Pubkey() if err != nil { - return nil, fmt.Errorf("bad remoteID: %#v", err) + return fmt.Errorf("bad remoteID: %#v", err) } h.remotePub = ecies.ImportECDSAPublic(rpub) - // recover remote random pubkey from signed message. - if token == nil { - // TODO: it is an error if the initiator has a token and we don't. check that. - - // no session token means we need to generate shared secret. - // ecies shared secret is used as initial session token for new peers. - // generate shared key from prv and remote pubkey. - if token, err = h.ecdhShared(prv); err != nil { - return nil, err + // Generate random keypair for ECDH. + // If a private key is already set, use it instead of generating one (for testing). + if h.randomPrivKey == nil { + h.randomPrivKey, err = ecies.GenerateKey(rand.Reader, secp256k1.S256(), nil) + if err != nil { + return err } } + + // Check the signature. + token, err := h.staticSharedSecret(prv) + if err != nil { + return err + } signedMsg := xor(token, h.initNonce) - remoteRandomPub, err := secp256k1.RecoverPubkey(signedMsg, msg[:sigLen]) + remoteRandomPub, err := secp256k1.RecoverPubkey(signedMsg, msg.Signature[:]) if err != nil { + return err + } + h.remoteRandomPub, _ = importPublicKey(remoteRandomPub) + return nil +} + +func (h *encHandshake) makeAuthResp() (msg *authRespV4, err error) { + // Generate random nonce. + h.respNonce = make([]byte, shaLen) + if _, err = rand.Read(h.respNonce); err != nil { return nil, err } - // validate the sha3 of recovered pubkey - remoteRandomPubMAC := msg[sigLen : sigLen+shaLen] - shaRemoteRandomPub := crypto.Sha3(remoteRandomPub[1:]) - if !bytes.Equal(remoteRandomPubMAC, shaRemoteRandomPub) { - return nil, fmt.Errorf("sha3 of recovered ephemeral pubkey does not match checksum in auth message") + msg = new(authRespV4) + copy(msg.Nonce[:], h.respNonce) + copy(msg.RandomPubkey[:], exportPubkey(&h.randomPrivKey.PublicKey)) + msg.Version = 4 + return msg, nil +} + +func (msg *authMsgV4) sealPlain(h *encHandshake) ([]byte, error) { + buf := make([]byte, authMsgLen) + n := copy(buf, msg.Signature[:]) + n += copy(buf[n:], crypto.Sha3(exportPubkey(&h.randomPrivKey.PublicKey))) + n += copy(buf[n:], msg.InitiatorPubkey[:]) + n += copy(buf[n:], msg.Nonce[:]) + buf[n] = 0 // token-flag + return ecies.Encrypt(rand.Reader, h.remotePub, buf, nil, nil) +} + +func (msg *authMsgV4) decodePlain(input []byte) { + n := copy(msg.Signature[:], input) + n += shaLen // skip sha3(initiator-ephemeral-pubk) + n += copy(msg.InitiatorPubkey[:], input[n:]) + n += copy(msg.Nonce[:], input[n:]) + msg.Version = 4 + msg.gotPlain = true +} + +func (msg *authRespV4) sealPlain(hs *encHandshake) ([]byte, error) { + buf := make([]byte, authRespLen) + n := copy(buf, msg.RandomPubkey[:]) + n += copy(buf[n:], msg.Nonce[:]) + return ecies.Encrypt(rand.Reader, hs.remotePub, buf, nil, nil) +} + +func (msg *authRespV4) decodePlain(input []byte) { + n := copy(msg.RandomPubkey[:], input) + n += copy(msg.Nonce[:], input[n:]) + msg.Version = 4 +} + +var padSpace = make([]byte, 300) + +func sealEIP8(msg interface{}, h *encHandshake) ([]byte, error) { + buf := new(bytes.Buffer) + if err := rlp.Encode(buf, msg); err != nil { + return nil, err } + // pad with random amount of data. the amount needs to be at least 100 bytes to make + // the message distinguishable from pre-EIP-8 handshakes. + pad := padSpace[:mrand.Intn(len(padSpace)-100)+100] + buf.Write(pad) + prefix := make([]byte, 2) + binary.BigEndian.PutUint16(prefix, uint16(buf.Len()+eciesOverhead)) - h.remoteRandomPub, _ = importPublicKey(remoteRandomPub) - return h, nil -} - -// authResp generates the encrypted authentication response message. -func (h *encHandshake) authResp(prv *ecdsa.PrivateKey, token []byte) ([]byte, error) { - // responder auth message - // E(remote-pubk, ecdhe-random-pubk || nonce || 0x0) - resp := make([]byte, authRespLen) - n := copy(resp, exportPubkey(&h.randomPrivKey.PublicKey)) - n += copy(resp[n:], h.respNonce) - if token == nil { - resp[n] = 0 - } else { - resp[n] = 1 + enc, err := ecies.Encrypt(rand.Reader, h.remotePub, buf.Bytes(), nil, prefix) + return append(prefix, enc...), err +} + +type plainDecoder interface { + decodePlain([]byte) +} + +func readHandshakeMsg(msg plainDecoder, plainSize int, prv *ecdsa.PrivateKey, r io.Reader) ([]byte, error) { + buf := make([]byte, plainSize) + if _, err := io.ReadFull(r, buf); err != nil { + return buf, err + } + // Attempt decoding pre-EIP-8 "plain" format. + key := ecies.ImportECDSA(prv) + if dec, err := key.Decrypt(rand.Reader, buf, nil, nil); err == nil { + msg.decodePlain(dec) + return buf, nil } - // encrypt using remote-pubk - return ecies.Encrypt(rand.Reader, h.remotePub, resp, nil, nil) + // Could be EIP-8 format, try that. + prefix := buf[:2] + size := binary.BigEndian.Uint16(prefix) + if size < uint16(plainSize) { + return buf, fmt.Errorf("size underflow, need at least %d bytes", plainSize) + } + buf = append(buf, make([]byte, size-uint16(plainSize)+2)...) + if _, err := io.ReadFull(r, buf[plainSize:]); err != nil { + return buf, err + } + dec, err := key.Decrypt(rand.Reader, buf[2:], nil, prefix) + if err != nil { + return buf, err + } + // Can't use rlp.DecodeBytes here because it rejects + // trailing data (forward-compatibility). + s := rlp.NewStream(bytes.NewReader(dec), 0) + return buf, s.Decode(msg) } // importPublicKey unmarshals 512 bit public keys. @@ -458,7 +521,11 @@ func importPublicKey(pubKey []byte) (*ecies.PublicKey, error) { return nil, fmt.Errorf("invalid public key length %v (expect 64/65)", len(pubKey)) } // TODO: fewer pointless conversions - return ecies.ImportECDSAPublic(crypto.ToECDSAPub(pubKey65)), nil + pub := crypto.ToECDSAPub(pubKey65) + if pub.X == nil { + return nil, fmt.Errorf("invalid public key") + } + return ecies.ImportECDSAPublic(pub), nil } func exportPubkey(pub *ecies.PublicKey) []byte { diff --git a/p2p/rlpx_test.go b/p2p/rlpx_test.go index 7cc7548e2..f9583e224 100644 --- a/p2p/rlpx_test.go +++ b/p2p/rlpx_test.go @@ -21,6 +21,7 @@ import ( "crypto/rand" "errors" "fmt" + "io" "io/ioutil" "net" "reflect" @@ -162,6 +163,7 @@ func TestProtocolHandshake(t *testing.T) { wg.Add(2) go func() { defer wg.Done() + defer fd1.Close() rlpx := newRLPX(fd0) remid, err := rlpx.doEncHandshake(prv0, node1) if err != nil { @@ -178,6 +180,7 @@ func TestProtocolHandshake(t *testing.T) { t.Errorf("dial side proto handshake error: %v", err) return } + phs.Rest = nil if !reflect.DeepEqual(phs, hs1) { t.Errorf("dial side proto handshake mismatch:\ngot: %s\nwant: %s\n", spew.Sdump(phs), spew.Sdump(hs1)) return @@ -186,6 +189,7 @@ func TestProtocolHandshake(t *testing.T) { }() go func() { defer wg.Done() + defer fd1.Close() rlpx := newRLPX(fd1) remid, err := rlpx.doEncHandshake(prv1, nil) if err != nil { @@ -202,6 +206,7 @@ func TestProtocolHandshake(t *testing.T) { t.Errorf("listen side proto handshake error: %v", err) return } + phs.Rest = nil if !reflect.DeepEqual(phs, hs0) { t.Errorf("listen side proto handshake mismatch:\ngot: %s\nwant: %s\n", spew.Sdump(phs), spew.Sdump(hs0)) return @@ -216,7 +221,6 @@ func TestProtocolHandshake(t *testing.T) { func TestProtocolHandshakeErrors(t *testing.T) { our := &protoHandshake{Version: 3, Caps: []Cap{{"foo", 2}, {"bar", 3}}, Name: "quux"} - id := randomID() tests := []struct { code uint64 msg interface{} @@ -244,11 +248,6 @@ func TestProtocolHandshakeErrors(t *testing.T) { }, { code: handshakeMsg, - msg: &protoHandshake{Version: 9944, ID: id}, - err: DiscIncompatibleVersion, - }, - { - code: handshakeMsg, msg: &protoHandshake{Version: 3}, err: DiscInvalidIdentity, }, @@ -374,3 +373,227 @@ func TestRLPXFrameRW(t *testing.T) { } } } + +type handshakeAuthTest struct { + input string + isPlain bool + wantVersion uint + wantRest []rlp.RawValue +} + +var eip8HandshakeAuthTests = []handshakeAuthTest{ + // (Auth₁) RLPx v4 plain encoding + { + input: ` + 048ca79ad18e4b0659fab4853fe5bc58eb83992980f4c9cc147d2aa31532efd29a3d3dc6a3d89eaf + 913150cfc777ce0ce4af2758bf4810235f6e6ceccfee1acc6b22c005e9e3a49d6448610a58e98744 + ba3ac0399e82692d67c1f58849050b3024e21a52c9d3b01d871ff5f210817912773e610443a9ef14 + 2e91cdba0bd77b5fdf0769b05671fc35f83d83e4d3b0b000c6b2a1b1bba89e0fc51bf4e460df3105 + c444f14be226458940d6061c296350937ffd5e3acaceeaaefd3c6f74be8e23e0f45163cc7ebd7622 + 0f0128410fd05250273156d548a414444ae2f7dea4dfca2d43c057adb701a715bf59f6fb66b2d1d2 + 0f2c703f851cbf5ac47396d9ca65b6260bd141ac4d53e2de585a73d1750780db4c9ee4cd4d225173 + a4592ee77e2bd94d0be3691f3b406f9bba9b591fc63facc016bfa8 + `, + isPlain: true, + wantVersion: 4, + }, + // (Auth₂) EIP-8 encoding + { + input: ` + 01b304ab7578555167be8154d5cc456f567d5ba302662433674222360f08d5f1534499d3678b513b + 0fca474f3a514b18e75683032eb63fccb16c156dc6eb2c0b1593f0d84ac74f6e475f1b8d56116b84 + 9634a8c458705bf83a626ea0384d4d7341aae591fae42ce6bd5c850bfe0b999a694a49bbbaf3ef6c + da61110601d3b4c02ab6c30437257a6e0117792631a4b47c1d52fc0f8f89caadeb7d02770bf999cc + 147d2df3b62e1ffb2c9d8c125a3984865356266bca11ce7d3a688663a51d82defaa8aad69da39ab6 + d5470e81ec5f2a7a47fb865ff7cca21516f9299a07b1bc63ba56c7a1a892112841ca44b6e0034dee + 70c9adabc15d76a54f443593fafdc3b27af8059703f88928e199cb122362a4b35f62386da7caad09 + c001edaeb5f8a06d2b26fb6cb93c52a9fca51853b68193916982358fe1e5369e249875bb8d0d0ec3 + 6f917bc5e1eafd5896d46bd61ff23f1a863a8a8dcd54c7b109b771c8e61ec9c8908c733c0263440e + 2aa067241aaa433f0bb053c7b31a838504b148f570c0ad62837129e547678c5190341e4f1693956c + 3bf7678318e2d5b5340c9e488eefea198576344afbdf66db5f51204a6961a63ce072c8926c + `, + wantVersion: 4, + wantRest: []rlp.RawValue{}, + }, + // (Auth₃) RLPx v4 EIP-8 encoding with version 56, additional list elements + { + input: ` + 01b8044c6c312173685d1edd268aa95e1d495474c6959bcdd10067ba4c9013df9e40ff45f5bfd6f7 + 2471f93a91b493f8e00abc4b80f682973de715d77ba3a005a242eb859f9a211d93a347fa64b597bf + 280a6b88e26299cf263b01b8dfdb712278464fd1c25840b995e84d367d743f66c0e54a586725b7bb + f12acca27170ae3283c1073adda4b6d79f27656993aefccf16e0d0409fe07db2dc398a1b7e8ee93b + cd181485fd332f381d6a050fba4c7641a5112ac1b0b61168d20f01b479e19adf7fdbfa0905f63352 + bfc7e23cf3357657455119d879c78d3cf8c8c06375f3f7d4861aa02a122467e069acaf513025ff19 + 6641f6d2810ce493f51bee9c966b15c5043505350392b57645385a18c78f14669cc4d960446c1757 + 1b7c5d725021babbcd786957f3d17089c084907bda22c2b2675b4378b114c601d858802a55345a15 + 116bc61da4193996187ed70d16730e9ae6b3bb8787ebcaea1871d850997ddc08b4f4ea668fbf3740 + 7ac044b55be0908ecb94d4ed172ece66fd31bfdadf2b97a8bc690163ee11f5b575a4b44e36e2bfb2 + f0fce91676fd64c7773bac6a003f481fddd0bae0a1f31aa27504e2a533af4cef3b623f4791b2cca6 + d490 + `, + wantVersion: 56, + wantRest: []rlp.RawValue{{0x01}, {0x02}, {0xC2, 0x04, 0x05}}, + }, +} + +type handshakeAckTest struct { + input string + wantVersion uint + wantRest []rlp.RawValue +} + +var eip8HandshakeRespTests = []handshakeAckTest{ + // (Ack₁) RLPx v4 plain encoding + { + input: ` + 049f8abcfa9c0dc65b982e98af921bc0ba6e4243169348a236abe9df5f93aa69d99cadddaa387662 + b0ff2c08e9006d5a11a278b1b3331e5aaabf0a32f01281b6f4ede0e09a2d5f585b26513cb794d963 + 5a57563921c04a9090b4f14ee42be1a5461049af4ea7a7f49bf4c97a352d39c8d02ee4acc416388c + 1c66cec761d2bc1c72da6ba143477f049c9d2dde846c252c111b904f630ac98e51609b3b1f58168d + dca6505b7196532e5f85b259a20c45e1979491683fee108e9660edbf38f3add489ae73e3dda2c71b + d1497113d5c755e942d1 + `, + wantVersion: 4, + }, + // (Ack₂) EIP-8 encoding + { + input: ` + 01ea0451958701280a56482929d3b0757da8f7fbe5286784beead59d95089c217c9b917788989470 + b0e330cc6e4fb383c0340ed85fab836ec9fb8a49672712aeabbdfd1e837c1ff4cace34311cd7f4de + 05d59279e3524ab26ef753a0095637ac88f2b499b9914b5f64e143eae548a1066e14cd2f4bd7f814 + c4652f11b254f8a2d0191e2f5546fae6055694aed14d906df79ad3b407d94692694e259191cde171 + ad542fc588fa2b7333313d82a9f887332f1dfc36cea03f831cb9a23fea05b33deb999e85489e645f + 6aab1872475d488d7bd6c7c120caf28dbfc5d6833888155ed69d34dbdc39c1f299be1057810f34fb + e754d021bfca14dc989753d61c413d261934e1a9c67ee060a25eefb54e81a4d14baff922180c395d + 3f998d70f46f6b58306f969627ae364497e73fc27f6d17ae45a413d322cb8814276be6ddd13b885b + 201b943213656cde498fa0e9ddc8e0b8f8a53824fbd82254f3e2c17e8eaea009c38b4aa0a3f306e8 + 797db43c25d68e86f262e564086f59a2fc60511c42abfb3057c247a8a8fe4fb3ccbadde17514b7ac + 8000cdb6a912778426260c47f38919a91f25f4b5ffb455d6aaaf150f7e5529c100ce62d6d92826a7 + 1778d809bdf60232ae21ce8a437eca8223f45ac37f6487452ce626f549b3b5fdee26afd2072e4bc7 + 5833c2464c805246155289f4 + `, + wantVersion: 4, + wantRest: []rlp.RawValue{}, + }, + // (Ack₃) EIP-8 encoding with version 57, additional list elements + { + input: ` + 01f004076e58aae772bb101ab1a8e64e01ee96e64857ce82b1113817c6cdd52c09d26f7b90981cd7 + ae835aeac72e1573b8a0225dd56d157a010846d888dac7464baf53f2ad4e3d584531fa203658fab0 + 3a06c9fd5e35737e417bc28c1cbf5e5dfc666de7090f69c3b29754725f84f75382891c561040ea1d + dc0d8f381ed1b9d0d4ad2a0ec021421d847820d6fa0ba66eaf58175f1b235e851c7e2124069fbc20 + 2888ddb3ac4d56bcbd1b9b7eab59e78f2e2d400905050f4a92dec1c4bdf797b3fc9b2f8e84a482f3 + d800386186712dae00d5c386ec9387a5e9c9a1aca5a573ca91082c7d68421f388e79127a5177d4f8 + 590237364fd348c9611fa39f78dcdceee3f390f07991b7b47e1daa3ebcb6ccc9607811cb17ce51f1 + c8c2c5098dbdd28fca547b3f58c01a424ac05f869f49c6a34672ea2cbbc558428aa1fe48bbfd6115 + 8b1b735a65d99f21e70dbc020bfdface9f724a0d1fb5895db971cc81aa7608baa0920abb0a565c9c + 436e2fd13323428296c86385f2384e408a31e104670df0791d93e743a3a5194ee6b076fb6323ca59 + 3011b7348c16cf58f66b9633906ba54a2ee803187344b394f75dd2e663a57b956cb830dd7a908d4f + 39a2336a61ef9fda549180d4ccde21514d117b6c6fd07a9102b5efe710a32af4eeacae2cb3b1dec0 + 35b9593b48b9d3ca4c13d245d5f04169b0b1 + `, + wantVersion: 57, + wantRest: []rlp.RawValue{{0x06}, {0xC2, 0x07, 0x08}, {0x81, 0xFA}}, + }, +} + +func TestHandshakeForwardCompatibility(t *testing.T) { + var ( + keyA, _ = crypto.HexToECDSA("49a7b37aa6f6645917e7b807e9d1c00d4fa71f18343b0d4122a4d2df64dd6fee") + keyB, _ = crypto.HexToECDSA("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291") + pubA = crypto.FromECDSAPub(&keyA.PublicKey)[1:] + pubB = crypto.FromECDSAPub(&keyB.PublicKey)[1:] + ephA, _ = crypto.HexToECDSA("869d6ecf5211f1cc60418a13b9d870b22959d0c16f02bec714c960dd2298a32d") + ephB, _ = crypto.HexToECDSA("e238eb8e04fee6511ab04c6dd3c89ce097b11f25d584863ac2b6d5b35b1847e4") + ephPubA = crypto.FromECDSAPub(&ephA.PublicKey)[1:] + ephPubB = crypto.FromECDSAPub(&ephB.PublicKey)[1:] + nonceA = unhex("7e968bba13b6c50e2c4cd7f241cc0d64d1ac25c7f5952df231ac6a2bda8ee5d6") + nonceB = unhex("559aead08264d5795d3909718cdd05abd49572e84fe55590eef31a88a08fdffd") + _, _, _, _ = pubA, pubB, ephPubA, ephPubB + authSignature = unhex("299ca6acfd35e3d72d8ba3d1e2b60b5561d5af5218eb5bc182045769eb4226910a301acae3b369fffc4a4899d6b02531e89fd4fe36a2cf0d93607ba470b50f7800") + _ = authSignature + ) + makeAuth := func(test handshakeAuthTest) *authMsgV4 { + msg := &authMsgV4{Version: test.wantVersion, Rest: test.wantRest, gotPlain: test.isPlain} + copy(msg.Signature[:], authSignature) + copy(msg.InitiatorPubkey[:], pubA) + copy(msg.Nonce[:], nonceA) + return msg + } + makeAck := func(test handshakeAckTest) *authRespV4 { + msg := &authRespV4{Version: test.wantVersion, Rest: test.wantRest} + copy(msg.RandomPubkey[:], ephPubB) + copy(msg.Nonce[:], nonceB) + return msg + } + + // check auth msg parsing + for _, test := range eip8HandshakeAuthTests { + r := bytes.NewReader(unhex(test.input)) + msg := new(authMsgV4) + ciphertext, err := readHandshakeMsg(msg, encAuthMsgLen, keyB, r) + if err != nil { + t.Errorf("error for input %x:\n %v", unhex(test.input), err) + continue + } + if !bytes.Equal(ciphertext, unhex(test.input)) { + t.Errorf("wrong ciphertext for input %x:\n %x", unhex(test.input), ciphertext) + } + want := makeAuth(test) + if !reflect.DeepEqual(msg, want) { + t.Errorf("wrong msg for input %x:\ngot %s\nwant %s", unhex(test.input), spew.Sdump(msg), spew.Sdump(want)) + } + } + + // check auth resp parsing + for _, test := range eip8HandshakeRespTests { + input := unhex(test.input) + r := bytes.NewReader(input) + msg := new(authRespV4) + ciphertext, err := readHandshakeMsg(msg, encAuthRespLen, keyA, r) + if err != nil { + t.Errorf("error for input %x:\n %v", input, err) + continue + } + if !bytes.Equal(ciphertext, input) { + t.Errorf("wrong ciphertext for input %x:\n %x", input, err) + } + want := makeAck(test) + if !reflect.DeepEqual(msg, want) { + t.Errorf("wrong msg for input %x:\ngot %s\nwant %s", input, spew.Sdump(msg), spew.Sdump(want)) + } + } + + // check derivation for (Auth₂, Ack₂) on recipient side + var ( + hs = &encHandshake{ + initiator: false, + respNonce: nonceB, + randomPrivKey: ecies.ImportECDSA(ephB), + } + authCiphertext = unhex(eip8HandshakeAuthTests[1].input) + authRespCiphertext = unhex(eip8HandshakeRespTests[1].input) + authMsg = makeAuth(eip8HandshakeAuthTests[1]) + wantAES = unhex("80e8632c05fed6fc2a13b0f8d31a3cf645366239170ea067065aba8e28bac487") + wantMAC = unhex("2ea74ec5dae199227dff1af715362700e989d889d7a493cb0639691efb8e5f98") + wantFooIngressHash = unhex("0c7ec6340062cc46f5e9f1e3cf86f8c8c403c5a0964f5df0ebd34a75ddc86db5") + ) + if err := hs.handleAuthMsg(authMsg, keyB); err != nil { + t.Fatalf("handleAuthMsg: %v", err) + } + derived, err := hs.secrets(authCiphertext, authRespCiphertext) + if err != nil { + t.Fatalf("secrets: %v", err) + } + if !bytes.Equal(derived.AES, wantAES) { + t.Errorf("aes-secret mismatch:\ngot %x\nwant %x", derived.AES, wantAES) + } + if !bytes.Equal(derived.MAC, wantMAC) { + t.Errorf("mac-secret mismatch:\ngot %x\nwant %x", derived.MAC, wantMAC) + } + io.WriteString(derived.IngressMAC, "foo") + fooIngressHash := derived.IngressMAC.Sum(nil) + if !bytes.Equal(fooIngressHash, wantFooIngressHash) { + t.Errorf("ingress-mac('foo') mismatch:\ngot %x\nwant %x", fooIngressHash, wantFooIngressHash) + } +} diff --git a/rlp/decode.go b/rlp/decode.go index c4d42c6fc..c4e5869cc 100644 --- a/rlp/decode.go +++ b/rlp/decode.go @@ -63,11 +63,12 @@ type Decoder interface { // must contain an element for each decoded field. Decode returns an // error if there are too few or too many elements. // -// The decoding of struct fields honours one particular struct tag, -// "nil". This tag applies to pointer-typed fields and changes the +// The decoding of struct fields honours two struct tags, "tail" and +// "nil". For an explanation of "tail", see the example. +// The "nil" tag applies to pointer-typed fields and changes the // decoding rules for the field such that input values of size zero -// decode as a nil pointer. This tag can be useful when decoding recursive -// types. +// decode as a nil pointer. This tag can be useful when decoding +// recursive types. // // type StructWithEmptyOK struct { // Foo *[20]byte `rlp:"nil"` @@ -190,7 +191,7 @@ func makeDecoder(typ reflect.Type, tags tags) (dec decoder, err error) { case kind == reflect.String: return decodeString, nil case kind == reflect.Slice || kind == reflect.Array: - return makeListDecoder(typ) + return makeListDecoder(typ, tags) case kind == reflect.Struct: return makeStructDecoder(typ) case kind == reflect.Ptr: @@ -264,7 +265,7 @@ func decodeBigInt(s *Stream, val reflect.Value) error { return nil } -func makeListDecoder(typ reflect.Type) (decoder, error) { +func makeListDecoder(typ reflect.Type, tag tags) (decoder, error) { etype := typ.Elem() if etype.Kind() == reflect.Uint8 && !reflect.PtrTo(etype).Implements(decoderInterface) { if typ.Kind() == reflect.Array { @@ -277,15 +278,26 @@ func makeListDecoder(typ reflect.Type) (decoder, error) { if err != nil { return nil, err } - - isArray := typ.Kind() == reflect.Array - return func(s *Stream, val reflect.Value) error { - if isArray { + var dec decoder + switch { + case typ.Kind() == reflect.Array: + dec = func(s *Stream, val reflect.Value) error { return decodeListArray(s, val, etypeinfo.decoder) - } else { + } + case tag.tail: + // A slice with "tail" tag can occur as the last field + // of a struct and is upposed to swallow all remaining + // list elements. The struct decoder already called s.List, + // proceed directly to decoding the elements. + dec = func(s *Stream, val reflect.Value) error { + return decodeSliceElems(s, val, etypeinfo.decoder) + } + default: + dec = func(s *Stream, val reflect.Value) error { return decodeListSlice(s, val, etypeinfo.decoder) } - }, nil + } + return dec, nil } func decodeListSlice(s *Stream, val reflect.Value, elemdec decoder) error { @@ -297,7 +309,13 @@ func decodeListSlice(s *Stream, val reflect.Value, elemdec decoder) error { val.Set(reflect.MakeSlice(val.Type(), 0, 0)) return s.ListEnd() } + if err := decodeSliceElems(s, val, elemdec); err != nil { + return err + } + return s.ListEnd() +} +func decodeSliceElems(s *Stream, val reflect.Value, elemdec decoder) error { i := 0 for ; ; i++ { // grow slice if necessary @@ -323,12 +341,11 @@ func decodeListSlice(s *Stream, val reflect.Value, elemdec decoder) error { if i < val.Len() { val.SetLen(i) } - return s.ListEnd() + return nil } func decodeListArray(s *Stream, val reflect.Value, elemdec decoder) error { - _, err := s.List() - if err != nil { + if _, err := s.List(); err != nil { return wrapStreamError(err, val.Type()) } vlen := val.Len() @@ -398,11 +415,11 @@ func makeStructDecoder(typ reflect.Type) (decoder, error) { return nil, err } dec := func(s *Stream, val reflect.Value) (err error) { - if _, err = s.List(); err != nil { + if _, err := s.List(); err != nil { return wrapStreamError(err, typ) } for _, f := range fields { - err = f.info.decoder(s, val.Field(f.index)) + err := f.info.decoder(s, val.Field(f.index)) if err == EOL { return &decodeError{msg: "too few elements", typ: typ} } else if err != nil { diff --git a/rlp/decode_tail_test.go b/rlp/decode_tail_test.go new file mode 100644 index 000000000..885354390 --- /dev/null +++ b/rlp/decode_tail_test.go @@ -0,0 +1,33 @@ +package rlp + +import ( + "bytes" + "fmt" +) + +type structWithTail struct { + A, B uint + C []uint `rlp:"tail"` +} + +func ExampleDecode_structTagTail() { + // In this example, the "tail" struct tag is used to decode lists of + // differing length into a struct. + var val structWithTail + + err := Decode(bytes.NewReader([]byte{0xC4, 0x01, 0x02, 0x03, 0x04}), &val) + fmt.Printf("with 4 elements: err=%v val=%v\n", err, val) + + err = Decode(bytes.NewReader([]byte{0xC6, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06}), &val) + fmt.Printf("with 6 elements: err=%v val=%v\n", err, val) + + // Note that at least two list elements must be present to + // fill fields A and B: + err = Decode(bytes.NewReader([]byte{0xC1, 0x01}), &val) + fmt.Printf("with 1 element: err=%q\n", err) + + // Output: + // with 4 elements: err=<nil> val={1 2 [3 4]} + // with 6 elements: err=<nil> val={1 2 [3 4 5 6]} + // with 1 element: err="rlp: too few elements for rlp.structWithTail" +} diff --git a/rlp/decode_test.go b/rlp/decode_test.go index 408f1a5a9..2d465b74d 100644 --- a/rlp/decode_test.go +++ b/rlp/decode_test.go @@ -312,6 +312,26 @@ type recstruct struct { Child *recstruct `rlp:"nil"` } +type invalidTail1 struct { + A uint `rlp:"tail"` + B string +} + +type invalidTail2 struct { + A uint + B string `rlp:"tail"` +} + +type tailRaw struct { + A uint + Tail []RawValue `rlp:"tail"` +} + +type tailUint struct { + A uint + Tail []uint `rlp:"tail"` +} + var ( veryBigInt = big.NewInt(0).Add( big.NewInt(0).Lsh(big.NewInt(0xFFFFFFFFFFFFFF), 16), @@ -437,6 +457,38 @@ var decodeTests = []decodeTest{ ptr: new(recstruct), error: "rlp: expected input string or byte for uint, decoding into (rlp.recstruct).Child.I", }, + { + input: "C0", + ptr: new(invalidTail1), + error: "rlp: invalid struct tag \"tail\" for rlp.invalidTail1.A (must be on last field)", + }, + { + input: "C0", + ptr: new(invalidTail2), + error: "rlp: invalid struct tag \"tail\" for rlp.invalidTail2.B (field type is not slice)", + }, + { + input: "C50102C20102", + ptr: new(tailUint), + error: "rlp: expected input string or byte for uint, decoding into (rlp.tailUint).Tail[1]", + }, + + // struct tag "tail" + { + input: "C3010203", + ptr: new(tailRaw), + value: tailRaw{A: 1, Tail: []RawValue{unhex("02"), unhex("03")}}, + }, + { + input: "C20102", + ptr: new(tailRaw), + value: tailRaw{A: 1, Tail: []RawValue{unhex("02")}}, + }, + { + input: "C101", + ptr: new(tailRaw), + value: tailRaw{A: 1, Tail: []RawValue{}}, + }, // RawValue {input: "01", ptr: new(RawValue), value: RawValue(unhex("01"))}, diff --git a/rlp/encode.go b/rlp/encode.go index d73b17c28..17cfc6b66 100644 --- a/rlp/encode.go +++ b/rlp/encode.go @@ -345,7 +345,7 @@ var ( ) // makeWriter creates a writer function for the given type. -func makeWriter(typ reflect.Type) (writer, error) { +func makeWriter(typ reflect.Type, ts tags) (writer, error) { kind := typ.Kind() switch { case typ == rawValueType: @@ -371,7 +371,7 @@ func makeWriter(typ reflect.Type) (writer, error) { case kind == reflect.Array && isByte(typ.Elem()): return writeByteArray, nil case kind == reflect.Slice || kind == reflect.Array: - return makeSliceWriter(typ) + return makeSliceWriter(typ, ts) case kind == reflect.Struct: return makeStructWriter(typ) case kind == reflect.Ptr: @@ -507,20 +507,21 @@ func writeInterface(val reflect.Value, w *encbuf) error { return ti.writer(eval, w) } -func makeSliceWriter(typ reflect.Type) (writer, error) { +func makeSliceWriter(typ reflect.Type, ts tags) (writer, error) { etypeinfo, err := cachedTypeInfo1(typ.Elem(), tags{}) if err != nil { return nil, err } writer := func(val reflect.Value, w *encbuf) error { - lh := w.list() + if !ts.tail { + defer w.listEnd(w.list()) + } vlen := val.Len() for i := 0; i < vlen; i++ { if err := etypeinfo.writer(val.Index(i), w); err != nil { return err } } - w.listEnd(lh) return nil } return writer, nil diff --git a/rlp/encode_test.go b/rlp/encode_test.go index a3f30d804..6f38294e4 100644 --- a/rlp/encode_test.go +++ b/rlp/encode_test.go @@ -214,6 +214,10 @@ var encTests = []encTest{ {val: simplestruct{A: 3, B: "foo"}, output: "C50383666F6F"}, {val: &recstruct{5, nil}, output: "C205C0"}, {val: &recstruct{5, &recstruct{4, &recstruct{3, nil}}}, output: "C605C404C203C0"}, + {val: &tailRaw{A: 1, Tail: []RawValue{unhex("02"), unhex("03")}}, output: "C3010203"}, + {val: &tailRaw{A: 1, Tail: []RawValue{unhex("02")}}, output: "C20102"}, + {val: &tailRaw{A: 1, Tail: []RawValue{}}, output: "C101"}, + {val: &tailRaw{A: 1, Tail: nil}, output: "C101"}, // nil {val: (*uint)(nil), output: "80"}, diff --git a/rlp/typecache.go b/rlp/typecache.go index 0ab096695..a2f217c66 100644 --- a/rlp/typecache.go +++ b/rlp/typecache.go @@ -17,7 +17,9 @@ package rlp import ( + "fmt" "reflect" + "strings" "sync" ) @@ -33,7 +35,13 @@ type typeinfo struct { // represents struct tags type tags struct { + // rlp:"nil" controls whether empty input results in a nil pointer. nilOK bool + + // rlp:"tail" controls whether this field swallows additional list + // elements. It can only be set for the last field, which must be + // of slice type. + tail bool } type typekey struct { @@ -89,7 +97,10 @@ type field struct { func structFields(typ reflect.Type) (fields []field, err error) { for i := 0; i < typ.NumField(); i++ { if f := typ.Field(i); f.PkgPath == "" { // exported - tags := parseStructTag(f.Tag.Get("rlp")) + tags, err := parseStructTag(typ, i) + if err != nil { + return nil, err + } info, err := cachedTypeInfo1(f.Type, tags) if err != nil { return nil, err @@ -100,8 +111,27 @@ func structFields(typ reflect.Type) (fields []field, err error) { return fields, nil } -func parseStructTag(tag string) tags { - return tags{nilOK: tag == "nil"} +func parseStructTag(typ reflect.Type, fi int) (tags, error) { + f := typ.Field(fi) + var ts tags + for _, t := range strings.Split(f.Tag.Get("rlp"), ",") { + switch t = strings.TrimSpace(t); t { + case "": + case "nil": + ts.nilOK = true + case "tail": + ts.tail = true + if fi != typ.NumField()-1 { + return ts, fmt.Errorf(`rlp: invalid struct tag "tail" for %v.%s (must be on last field)`, typ, f.Name) + } + if f.Type.Kind() != reflect.Slice { + return ts, fmt.Errorf(`rlp: invalid struct tag "tail" for %v.%s (field type is not slice)`, typ, f.Name) + } + default: + return ts, fmt.Errorf("rlp: unknown struct tag %q on %v.%s", t, typ, f.Name) + } + } + return ts, nil } func genTypeInfo(typ reflect.Type, tags tags) (info *typeinfo, err error) { @@ -109,7 +139,7 @@ func genTypeInfo(typ reflect.Type, tags tags) (info *typeinfo, err error) { if info.decoder, err = makeDecoder(typ, tags); err != nil { return nil, err } - if info.writer, err = makeWriter(typ); err != nil { + if info.writer, err = makeWriter(typ, tags); err != nil { return nil, err } return info, nil |