@@ -134,6 +134,7 @@ let s:NODE_ENV = 88
134134let s: NODE_REG = 89
135135let s: NODE_CURLYNAMEPART = 90
136136let s: NODE_CURLYNAMEEXPR = 91
137+ let s: NODE_LAMBDA = 92
137138
138139let s: TOKEN_EOF = 1
139140let s: TOKEN_EOL = 2
@@ -199,6 +200,7 @@ let s:TOKEN_SEMICOLON = 61
199200let s: TOKEN_BACKTICK = 62
200201let s: TOKEN_DOTDOTDOT = 63
201202let s: TOKEN_SHARP = 64
203+ let s: TOKEN_ARROW = 65
202204
203205let s: MAX_FUNC_ARGS = 20
204206
@@ -398,6 +400,7 @@ endfunction
398400" REG .value
399401" CURLYNAMEPART .value
400402" CURLYNAMEEXPR .value
403+ " LAMBDA .rlist .left
401404function ! s: Node (type )
402405 return {' type' : a: type }
403406endfunction
@@ -2542,8 +2545,13 @@ function! s:ExprTokenizer.get2()
25422545 call r .seek_cur (1 )
25432546 return self .token (s: TOKEN_PLUS , ' +' , pos)
25442547 elseif c == # ' -'
2545- call r .seek_cur (1 )
2546- return self .token (s: TOKEN_MINUS , ' -' , pos)
2548+ if r .p (1 ) == # ' >'
2549+ call r .seek_cur (2 )
2550+ return self .token (s: TOKEN_ARROW , ' ->' , pos)
2551+ else
2552+ call r .seek_cur (1 )
2553+ return self .token (s: TOKEN_MINUS , ' -' , pos)
2554+ endif
25472555 elseif c == # ' .'
25482556 if r .p (1 ) == # ' .' && r .p (2 ) == # ' .'
25492557 call r .seek_cur (3 )
@@ -3174,6 +3182,7 @@ endfunction
31743182" 'string'
31753183" [expr1, ...]
31763184" {expr1: expr1, ...}
3185+ " {args -> expr1}
31773186" &option
31783187" (expr1)
31793188" variable
@@ -3225,13 +3234,20 @@ function! s:ExprParser.parse_expr9()
32253234 endwhile
32263235 endif
32273236 elseif token.type == s: TOKEN_COPEN
3228- let node = s: Node (s: NODE_DICT )
3229- let node.pos = token.pos
3230- let node.value = []
3237+ let node = s: Node (-1 )
3238+ let p = token.pos
32313239 let token = self .tokenizer.peek ()
32323240 if token.type == s: TOKEN_CCLOSE
3241+ " dict
32333242 call self .tokenizer.get ()
3234- else
3243+ let node = s: Node (s: NODE_DICT )
3244+ let node.pos = p
3245+ let node.value = []
3246+ elseif token.type == s: TOKEN_DQUOTE || token.type == s: TOKEN_SQUOTE
3247+ " dict
3248+ let node = s: Node (s: NODE_DICT )
3249+ let node.pos = p
3250+ let node.value = []
32353251 while 1
32363252 let key = self .parse_expr1 ()
32373253 let token = self .tokenizer.get ()
@@ -3260,6 +3276,72 @@ function! s:ExprParser.parse_expr9()
32603276 throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
32613277 endif
32623278 endwhile
3279+ else
3280+ " lambda ref: s:NODE_FUNCTION
3281+ let node = s: Node (s: NODE_LAMBDA )
3282+ let node.pos = p
3283+ let node.rlist = []
3284+ let named = {}
3285+ while 1
3286+ let token = self .tokenizer.get ()
3287+ if token.type == s: TOKEN_ARROW
3288+ break
3289+ elseif token.type == s: TOKEN_IDENTIFIER
3290+ if ! s: isargname (token.value)
3291+ throw s: Err (printf (' E125: Illegal argument: %s' , token.value), token.pos)
3292+ elseif has_key (named, token.value)
3293+ throw s: Err (printf (' E853: Duplicate argument name: %s' , token.value), token.pos)
3294+ endif
3295+ let named[token.value] = 1
3296+ let varnode = s: Node (s: NODE_IDENTIFIER )
3297+ let varnode.pos = token.pos
3298+ let varnode.value = token.value
3299+ " XXX: Vim doesn't skip white space before comma. {a ,b -> ...} => E475
3300+ if s: iswhite (self .reader.p (0 )) && self .tokenizer.peek ().type == s: TOKEN_COMMA
3301+ throw s: Err (' E475: Invalid argument: White space is not allowed before comma' , self .reader.getpos ())
3302+ endif
3303+ let token = self .tokenizer.get ()
3304+ " handle curly_parts
3305+ if token.type == s: TOKEN_COPEN || token.type == s: TOKEN_CCLOSE
3306+ if ! empty (node.rlist)
3307+ throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3308+ endif
3309+ call self .reader.seek_set (pos)
3310+ let node = self .parse_identifier ()
3311+ return node
3312+ endif
3313+ call add (node.rlist, varnode)
3314+ if token.type == s: TOKEN_COMMA
3315+ " XXX: Vim allows last comma. {a, b, -> ...} => OK
3316+ if self .reader.peekn (2 ) == s: TOKEN_ARROW
3317+ call self .tokenizer.get ()
3318+ break
3319+ endif
3320+ elseif token.type == s: TOKEN_ARROW
3321+ break
3322+ else
3323+ throw s: Err (printf (' unexpected token: %s, type: %d' , token.value, token.type ), token.pos)
3324+ endif
3325+ elseif token.type == s: TOKEN_DOTDOTDOT
3326+ let varnode = s: Node (s: NODE_IDENTIFIER )
3327+ let varnode.pos = token.pos
3328+ let varnode.value = token.value
3329+ call add (node.rlist, varnode)
3330+ let token = self .tokenizer.get ()
3331+ if token.type == s: TOKEN_ARROW
3332+ break
3333+ else
3334+ throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3335+ endif
3336+ else
3337+ throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3338+ endif
3339+ endwhile
3340+ let node.left = self .parse_expr1 ()
3341+ let token = self .tokenizer.get ()
3342+ if token.type != s: TOKEN_CCLOSE
3343+ throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3344+ endif
32633345 endif
32643346 elseif token.type == s: TOKEN_POPEN
32653347 let node = self .parse_expr1 ()
@@ -3929,6 +4011,8 @@ function! s:Compiler.compile(node)
39294011 return self .compile_curlynamepart (a: node )
39304012 elseif a: node .type == s: NODE_CURLYNAMEEXPR
39314013 return self .compile_curlynameexpr (a: node )
4014+ elseif a: node .type == s: NODE_LAMBDA
4015+ return self .compile_lambda (a: node )
39324016 else
39334017 throw printf (' Compiler: unknown node: %s' , string (a: node ))
39344018 endif
@@ -4384,6 +4468,11 @@ function! s:Compiler.compile_curlynameexpr(node)
43844468 return ' {' . self .compile (a: node .value) . ' }'
43854469endfunction
43864470
4471+ function ! s: Compiler .compile_lambda (node)
4472+ let rlist = map (a: node .rlist, ' self.compile(v:val)' )
4473+ return printf (' (lambda (%s) %s)' , join (rlist, ' ' ), self .compile (a: node .left ))
4474+ endfunction
4475+
43874476" TODO: under construction
43884477let s: RegexpParser = {}
43894478
0 commit comments