@@ -134,6 +134,7 @@ let s:NODE_ENV = 88
134134let s: NODE_REG = 89
135135let s: NODE_CURLYNAMEPART = 90
136136let s: NODE_CURLYNAMEEXPR = 91
137+ let s: NODE_LAMBDA = 92
137138
138139let s: TOKEN_EOF = 1
139140let s: TOKEN_EOL = 2
@@ -199,6 +200,7 @@ let s:TOKEN_SEMICOLON = 61
199200let s: TOKEN_BACKTICK = 62
200201let s: TOKEN_DOTDOTDOT = 63
201202let s: TOKEN_SHARP = 64
203+ let s: TOKEN_ARROW = 65
202204
203205let s: MAX_FUNC_ARGS = 20
204206
@@ -398,6 +400,7 @@ endfunction
398400" REG .value
399401" CURLYNAMEPART .value
400402" CURLYNAMEEXPR .value
403+ " LAMBDA .rlist .left
401404function ! s: Node (type )
402405 return {' type' : a: type }
403406endfunction
@@ -2568,8 +2571,13 @@ function! s:ExprTokenizer.get2()
25682571 call r .seek_cur (1 )
25692572 return self .token (s: TOKEN_PLUS , ' +' , pos)
25702573 elseif c == # ' -'
2571- call r .seek_cur (1 )
2572- return self .token (s: TOKEN_MINUS , ' -' , pos)
2574+ if r .p (1 ) == # ' >'
2575+ call r .seek_cur (2 )
2576+ return self .token (s: TOKEN_ARROW , ' ->' , pos)
2577+ else
2578+ call r .seek_cur (1 )
2579+ return self .token (s: TOKEN_MINUS , ' -' , pos)
2580+ endif
25732581 elseif c == # ' .'
25742582 if r .p (1 ) == # ' .' && r .p (2 ) == # ' .'
25752583 call r .seek_cur (3 )
@@ -3200,6 +3208,7 @@ endfunction
32003208" 'string'
32013209" [expr1, ...]
32023210" {expr1: expr1, ...}
3211+ " {args -> expr1}
32033212" &option
32043213" (expr1)
32053214" variable
@@ -3251,42 +3260,121 @@ function! s:ExprParser.parse_expr9()
32513260 endwhile
32523261 endif
32533262 elseif token.type == s: TOKEN_COPEN
3254- let node = s: Node (s: NODE_DICT )
3255- let node.pos = token.pos
3256- let node.value = []
3257- let token = self .tokenizer.peek ()
3258- if token.type == s: TOKEN_CCLOSE
3259- call self .tokenizer.get ()
3260- else
3263+ let savepos = self .reader.tell ()
3264+ let nodepos = token.pos
3265+ let token = self .tokenizer.get ()
3266+ let lambda = token.type == s: TOKEN_ARROW
3267+ if ! lambda && ! (token.type == s: TOKEN_SQUOTE || token.type == s: TOKEN_DQUOTE )
3268+ " if the token type is stirng, we cannot peek next token and we can
3269+ " assume it's not lambda.
3270+ let token2 = self .tokenizer.peek ()
3271+ let lambda = token2.type == s: TOKEN_ARROW || token2.type == s: TOKEN_COMMA
3272+ endif
3273+ " fallback to dict or {expr} if true
3274+ let fallback = 0
3275+ if lambda
3276+ " lambda {token,...} {->...} {token->...}
3277+ let node = s: Node (s: NODE_LAMBDA )
3278+ let node.pos = nodepos
3279+ let node.rlist = []
3280+ let named = {}
32613281 while 1
3262- let key = self .parse_expr1 ()
3263- let token = self .tokenizer.get ()
3264- if token.type == s: TOKEN_CCLOSE
3265- if ! empty (node.value)
3266- throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3267- endif
3268- call self .reader.seek_set (pos)
3269- let node = self .parse_identifier ()
3282+ if token.type == s: TOKEN_ARROW
32703283 break
3271- endif
3272- if token.type != s: TOKEN_COLON
3273- throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3274- endif
3275- let val = self .parse_expr1 ()
3276- call add (node.value, [key , val])
3277- let token = self .tokenizer.get ()
3278- if token.type == s: TOKEN_COMMA
3279- if self .tokenizer.peek ().type == s: TOKEN_CCLOSE
3284+ elseif token.type == s: TOKEN_IDENTIFIER
3285+ if ! s: isargname (token.value)
3286+ throw s: Err (printf (' E125: Illegal argument: %s' , token.value), token.pos)
3287+ elseif has_key (named, token.value)
3288+ throw s: Err (printf (' E853: Duplicate argument name: %s' , token.value), token.pos)
3289+ endif
3290+ let named[token.value] = 1
3291+ let varnode = s: Node (s: NODE_IDENTIFIER )
3292+ let varnode.pos = token.pos
3293+ let varnode.value = token.value
3294+ " XXX: Vim doesn't skip white space before comma. {a ,b -> ...} => E475
3295+ if s: iswhite (self .reader.p (0 )) && self .tokenizer.peek ().type == s: TOKEN_COMMA
3296+ throw s: Err (' E475: Invalid argument: White space is not allowed before comma' , self .reader.getpos ())
3297+ endif
3298+ let token = self .tokenizer.get ()
3299+ call add (node.rlist, varnode)
3300+ if token.type == s: TOKEN_COMMA
3301+ " XXX: Vim allows last comma. {a, b, -> ...} => OK
3302+ let token = self .tokenizer.peek ()
3303+ if token.type == s: TOKEN_ARROW
3304+ call self .tokenizer.get ()
3305+ break
3306+ endif
3307+ elseif token.type == s: TOKEN_ARROW
3308+ break
3309+ else
3310+ throw s: Err (printf (' unexpected token: %s, type: %d' , token.value, token.type ), token.pos)
3311+ endif
3312+ elseif token.type == s: TOKEN_DOTDOTDOT
3313+ let varnode = s: Node (s: NODE_IDENTIFIER )
3314+ let varnode.pos = token.pos
3315+ let varnode.value = token.value
3316+ call add (node.rlist, varnode)
3317+ let token = self .tokenizer.peek ()
3318+ if token.type == s: TOKEN_ARROW
32803319 call self .tokenizer.get ()
32813320 break
3321+ else
3322+ throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
32823323 endif
3283- elseif token.type == s: TOKEN_CCLOSE
3284- break
32853324 else
3286- throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3325+ let fallback = 1
3326+ break
32873327 endif
3328+ let token = self .tokenizer.get ()
32883329 endwhile
3330+ if ! fallback
3331+ let node.left = self .parse_expr1 ()
3332+ let token = self .tokenizer.get ()
3333+ if token.type != s: TOKEN_CCLOSE
3334+ throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3335+ endif
3336+ return node
3337+ endif
32893338 endif
3339+ " dict
3340+ let node = s: Node (s: NODE_DICT )
3341+ let node.pos = nodepos
3342+ let node.value = []
3343+ call self .reader.seek_set (savepos)
3344+ let token = self .tokenizer.peek ()
3345+ if token.type == s: TOKEN_CCLOSE
3346+ call self .tokenizer.get ()
3347+ return node
3348+ endif
3349+ while 1
3350+ let key = self .parse_expr1 ()
3351+ let token = self .tokenizer.get ()
3352+ if token.type == s: TOKEN_CCLOSE
3353+ if ! empty (node.value)
3354+ throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3355+ endif
3356+ call self .reader.seek_set (pos)
3357+ let node = self .parse_identifier ()
3358+ break
3359+ endif
3360+ if token.type != s: TOKEN_COLON
3361+ throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3362+ endif
3363+ let val = self .parse_expr1 ()
3364+ call add (node.value, [key , val])
3365+ let token = self .tokenizer.get ()
3366+ if token.type == s: TOKEN_COMMA
3367+ if self .tokenizer.peek ().type == s: TOKEN_CCLOSE
3368+ call self .tokenizer.get ()
3369+ break
3370+ endif
3371+ elseif token.type == s: TOKEN_CCLOSE
3372+ break
3373+ else
3374+ throw s: Err (printf (' unexpected token: %s' , token.value), token.pos)
3375+ endif
3376+ endwhile
3377+ return node
32903378 elseif token.type == s: TOKEN_POPEN
32913379 let node = self .parse_expr1 ()
32923380 let token = self .tokenizer.get ()
@@ -3955,6 +4043,8 @@ function! s:Compiler.compile(node)
39554043 return self .compile_curlynamepart (a: node )
39564044 elseif a: node .type == s: NODE_CURLYNAMEEXPR
39574045 return self .compile_curlynameexpr (a: node )
4046+ elseif a: node .type == s: NODE_LAMBDA
4047+ return self .compile_lambda (a: node )
39584048 else
39594049 throw printf (' Compiler: unknown node: %s' , string (a: node ))
39604050 endif
@@ -4410,6 +4500,11 @@ function! s:Compiler.compile_curlynameexpr(node)
44104500 return ' {' . self .compile (a: node .value) . ' }'
44114501endfunction
44124502
4503+ function ! s: Compiler .compile_lambda (node)
4504+ let rlist = map (a: node .rlist, ' self.compile(v:val)' )
4505+ return printf (' (lambda (%s) %s)' , join (rlist, ' ' ), self .compile (a: node .left ))
4506+ endfunction
4507+
44134508" TODO: under construction
44144509let s: RegexpParser = {}
44154510
0 commit comments