mirror of
https://github.com/adelphes/android-dev-ext.git
synced 2025-12-23 18:08:29 +00:00
add support for parsing parameterless lambdas
This commit is contained in:
@@ -522,6 +522,18 @@ class BracketedExpression extends Expression {
|
||||
this.expression = expression;
|
||||
}
|
||||
}
|
||||
class LambdaExpression extends Expression {
|
||||
/**
|
||||
*
|
||||
* @param {*[]} params
|
||||
* @param {Expression|Block} body
|
||||
*/
|
||||
constructor(params, body) {
|
||||
super();
|
||||
this.params = params;
|
||||
this.body = body;
|
||||
}
|
||||
}
|
||||
class IncDecExpression extends Expression {
|
||||
/**
|
||||
* @param {ResolvedIdent} expr
|
||||
@@ -1674,6 +1686,19 @@ function rootTerm(tokens, mdecls, scope, imports, typemap) {
|
||||
return newTerm(tokens, mdecls, scope, imports, typemap);
|
||||
case 'open-bracket':
|
||||
tokens.inc();
|
||||
if (tokens.isValue(')')) {
|
||||
// parameterless lambda
|
||||
tokens.expectValue('->');
|
||||
let ident, lambdaBody = null;
|
||||
if (tokens.current.value === '{') {
|
||||
// todo - parse lambda body
|
||||
skipBody(tokens);
|
||||
} else {
|
||||
lambdaBody = expression(tokens, mdecls, scope, imports, typemap);
|
||||
ident = `() -> ${lambdaBody.source}`;
|
||||
}
|
||||
return new ResolvedIdent(ident, [new LambdaExpression([], lambdaBody)]);
|
||||
}
|
||||
matches = expression(tokens, mdecls, scope, imports, typemap);
|
||||
tokens.expectValue(')');
|
||||
if (isCastExpression(tokens.current, matches)) {
|
||||
|
||||
@@ -90,7 +90,7 @@ class Token extends TextBlock {
|
||||
*/
|
||||
function tokenize(source, offset = 0, length = source.length) {
|
||||
const text = source.slice(offset, offset + length);
|
||||
const raw_token_re = /(\s+|\/\/.*|\/\*[\d\D]*?\*\/|\/\*[\d\D]*)|("[^\r\n\\"]*(?:\\.[^\r\n\\"]*)*"|".*)|('\\u[\da-fA-F]{0,4}'?|'\\?.?'?)|(\.?\d)|([\p{L}\p{N}$_]+)|(\()|([;,?:(){}\[\]@]|\.(?:\.\.)?)|([!=/%*^]=?|<<?=?|>>?>?=?|&[&=]?|\|[|=]?|(\+\+|--)|[+-]=?|~)|$/gu;
|
||||
const raw_token_re = /(\s+|\/\/.*|\/\*[\d\D]*?\*\/|\/\*[\d\D]*)|("[^\r\n\\"]*(?:\\.[^\r\n\\"]*)*"|".*)|('\\u[\da-fA-F]{0,4}'?|'\\?.?'?)|(\.?\d)|([\p{L}\p{N}$_]+)|(\()|([;,?:(){}\[\]@]|\.(?:\.\.)?)|([!=/%*^]=?|<<?=?|>>?>?=?|&[&=]?|\|[|=]?|(\+\+|--)|->|[+-]=?|~)|$/gu;
|
||||
const raw_token_types = [
|
||||
'wsc',
|
||||
'string-literal',
|
||||
@@ -209,7 +209,7 @@ function tokenize(source, offset = 0, length = source.length) {
|
||||
* [~!] unary
|
||||
* ```
|
||||
*/
|
||||
const operator_re = /^(?:(=|[/%*&|^+-]=|>>>?=|<<=)|(\+\+|--)|([!=]=)|([<>]=?)|([&|^])|(<<|>>>?)|(&&|[|][|])|([*%/])|([+-])|([~!]))$/;
|
||||
const operator_re = /^(?:(=|[/%*&|^+-]=|>>>?=|<<=)|(\+\+|--)|([!=]=)|([<>]=?)|([&|^])|(<<|>>>?)|(&&|[|][|])|([*%/])|(->)|([+-])|([~!]))$/;
|
||||
/**
|
||||
* @typedef {
|
||||
'assignment-operator'|
|
||||
@@ -220,6 +220,7 @@ const operator_re = /^(?:(=|[/%*&|^+-]=|>>>?=|<<=)|(\+\+|--)|([!=]=)|([<>]=?)|([
|
||||
'shift-operator'|
|
||||
'logical-operator'|
|
||||
'muldiv-operator'|
|
||||
'lambda-operator'|
|
||||
'plumin-operator'|
|
||||
'unary-operator'} OperatorKind
|
||||
*/
|
||||
@@ -233,6 +234,7 @@ const operator_token_types = [
|
||||
'shift-operator',
|
||||
'logical-operator',
|
||||
'muldiv-operator',
|
||||
'lambda-operator',
|
||||
'plumin-operator',
|
||||
'unary-operator',
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user