first hacky version of source parsing and type checking

This commit is contained in:
Dave Holoway
2020-05-16 16:59:58 +01:00
parent 3156a2ddc2
commit 79bba76b9f
22 changed files with 2646 additions and 289 deletions

View File

@@ -0,0 +1,116 @@
/**
* @typedef {import('./parsetypes/import')} ImportDeclaration
*/
const ResolvedImport = require('./parsetypes/resolved-import');
/**
* Search a space-separated list of type names for values that match a dotted import.
*
* @param {string} typenames newline-separated list of fully qualified type names
* @param {string} dotted_import fully-qualified import name (e.g "java.util")
* @param {boolean} demandload true if this is a demand-load import
*/
function fetchImportedTypes(typenames, dotted_import, demandload) {
const matcher = demandload
// for demand-load, we search for any types that begin with the specified import name
// - note that after the import text, only words and $ are allowed (because additional dots would imply a subpackage)
? new RegExp(`^${dotted_import.replace(/\./g, '[.$]')}[.$][\\w$]+$`, 'gm')
// for exact-load, we search for any types that precisely matches the specified import name
: new RegExp(`^${dotted_import.replace(/\./g, '[.$]')}$`, 'gm');
// run the regex against the list of type names
const matching_names = typenames.match(matcher);
return matching_names;
}
/**
* @param {string} typenames newline-separated list of fully qualified type names
* @param {import('./parsetypes/import')} import_decl import declaration
*/
function resolveImportTypes(typenames, import_decl) {
const dotted = import_decl.getDottedName();
return fetchImportedTypes(typenames, dotted, !!import_decl.asterisk);
}
/**
* Resolve a set of imports for a module.
*
* Note that the order of the resolved imports is important for correct type resolution:
* - same-package imports are first,
* - followed by import declarations (in order of declaration),
* - followed by implicit packages
*
* @param {*} androidLibrary imported types from the Android platform library
* @param {import('./parsetypes/import')[]} imports list of declared imports in the module
* @param {string} package_name package name of the module
* @param {import('./mti').Type[]} source_mtis MTIs representing types declared in the source
* @param {string[]} [implicitPackages] list of implicit demand-load packages
*/
function resolveImports(androidLibrary, imports, package_name, source_mtis, implicitPackages = ['java.lang']) {
/**
* create a new Map that maps JRE type names to MTI instances
* @type {Map<string, import('./mti').Type>}
*/
const typemap = new Map(
androidLibrary.types.map(mti => [`${mti.package}.${mti.name}`, mti])
);
// add the source MTIs
// todo - should we overwrite entries when source MTIs match types in the library?
source_mtis.forEach(mti => typemap.set(`${mti.package}.${mti.name}`, mti))
// construct the list of typenames
const typenames = [...typemap.keys()].join('\n');
/**
* The list of explicit import declarations we are unable to resolve
* @type {ImportDeclaration[]}
*/
const unresolved = [];
/** @type {ResolvedImport[]} */
const resolved = [];
// import types matching the current package
if (package_name) {
const matches = fetchImportedTypes(typenames, package_name, true);
if (matches)
resolved.push(new ResolvedImport(null, matches, typemap, 'owner-package'));
}
// import types from each import declaration
imports.forEach(import_decl => {
const matches = resolveImportTypes(typenames, import_decl);
if (matches) {
resolved.push(new ResolvedImport(import_decl, matches, typemap, 'import'));
} else {
// if we cannot match the import to any types, add it to the unresolved list so
// we can flag it as a warning later.
// Note that empty packages (packages with no types) will appear here - they
// are technically valid, but represent useless imports
unresolved.push(import_decl);
}
});
// import types from the implicit packages
implicitPackages.forEach(package_name => {
const matches = fetchImportedTypes(typenames, package_name, true);
if (matches)
resolved.push(new ResolvedImport(null, matches, typemap, 'implicit-import'));
})
/**
* return the resolved and unresolved imports.
* The typemap is also included to support fully qualified type names that, by virtue of
* being fully-qualified, don't require importing.
*/
return {
resolved,
unresolved,
typemap,
}
}
module.exports = {
resolveImports,
ResolvedImport,
}

View File

@@ -16,22 +16,27 @@ function packageNameFromRef(ref, mti) {
/** /**
* @param {number} ref * @param {number} ref
* @param {MTI} mti * @param {MTI} unit
*/ */
function typeFromRef(ref, mti) { function typeFromRef(ref, unit) {
if (typeof ref !== 'number') { if (typeof ref !== 'number') {
return null; return null;
} }
if (ref < 16) { if (ref < 16) {
return KnownTypes[ref]; return KnownTypes[ref];
} }
return mti.referenced.types[ref - 16]; return unit.referenced.types[ref - 16];
} }
function indent(s) { function indent(s) {
return '\n' + s.split('\n').map(s => ` ${s}`).join('\n'); return '\n' + s.split('\n').map(s => ` ${s}`).join('\n');
} }
/**
* @typedef {MTIType|MTIArrayType|MTIPrimitiveType} Type
* @typedef {'class'|'interface'|'enum'|'@interface'|'primitive'|'array'} MTITypeKind
*/
class MinifiableInfo { class MinifiableInfo {
constructor(minified) { constructor(minified) {
@@ -75,11 +80,132 @@ class MinifiableInfo {
``` ```
*/ */
class MTI extends MinifiableInfo { class MTI extends MinifiableInfo {
/**
* @param {string} package_name
* @param {string} docs
* @param {string[]} modifiers
* @param {'class'|'enum'|'interface'|'@interface'} typeKind
* @param {string} name
*/
addType(package_name, docs, modifiers, typeKind, name) {
const t = {
d: docs,
p: this.addPackage(package_name),
m: getTypeMods(modifiers, typeKind),
n: name.replace(/\./g,'$'),
v: [],
e: /interface/.test(typeKind) ? []
: typeKind === 'enum' ? this.addRefType('java.lang', 'Enum')
: this.addRefType('java.lang', 'Object'),
i: [],
f: [],
c: [],
g: [],
}
this.minified.it.push(t);
const mtitype = new MTIType(this, t);
this.types.push(mtitype);
return mtitype;
}
/**
* @param {number} base_typeref
* @param {number[]} type_args
*/
addGenericRefType(base_typeref, type_args) {
const targs_key = type_args.join(',');
let idx = this.minified.rt.findIndex(t => (t.n === base_typeref) && !t.a && t.g && (t.g.join(',') === targs_key));
if (idx < 0) {
const rt_mti = {
n: base_typeref,
g: type_args,
};
idx = this.minified.rt.push(rt_mti) - 1;
this.referenced.types.push(new ReferencedType(this, rt_mti));
}
return idx + 16;
}
addArrayRefType(element_typeref, dimensions) {
let idx = this.minified.rt.findIndex(t => (t.n === element_typeref) && !t.g && (t.a === dimensions));
if (idx < 0) {
const rt_mti = {
n: element_typeref,
a: dimensions,
};
idx = this.minified.rt.push(rt_mti) - 1;
this.referenced.types.push(new ReferencedType(this, rt_mti));
}
return idx + 16;
}
/** /**
* @param {{rp:[], rt:[], it:[]}} mti * @param {string} package_name
* @param {string} type_name
*/ */
constructor(mti) { addRefType(package_name, type_name) {
let idx;
if (!package_name || package_name === 'java.lang') {
idx = KnownTypes.findIndex(t => t.name === type_name);
if (idx >= 0) {
return idx;
}
}
const pkgref = this.addPackage(package_name);
const jre_type_name = type_name.replace(/\./g, '$');
idx = this.minified.rt.findIndex(t => t.p === pkgref && t.n === jre_type_name);
if (idx < 0) {
const rt_mti = {
p: pkgref,
n: jre_type_name,
};
idx = this.minified.rt.push(rt_mti) - 1;
this.referenced.types.push(new ReferencedType(this, rt_mti))
}
return idx + 16;
}
/**
* @param {string} packagename
*/
addPackage(packagename) {
let idx = KnownPackages.indexOf(packagename);
if (idx >= 0) {
return idx;
}
idx = this.minified.rp.indexOf(packagename);
if (idx < 0) {
idx = this.minified.rp.push(packagename) - 1;
}
return idx + 16;
}
static get defaultPackageRef() {
return KnownPackages.indexOf("");
}
/**
* @param {string} name
*/
static fromPrimitive(name) {
return MTIPrimitiveType.fromName(name);
}
/**
* @param {Type} element
*/
static makeArrayType(element, dimensions) {
let res = element;
for (let i = 0; i < dimensions; i++) {
res = new MTIArrayType(res);
}
return res;
}
/**
* @param {{rp:string[], rt:*[], it:*[]}} mti
*/
constructor(mti = {rp:[],rt:[],it:[]}) {
super(mti); super(mti);
// initialise the lists of referenced packages and types // initialise the lists of referenced packages and types
this.referenced = { this.referenced = {
@@ -162,7 +288,7 @@ class ReferencedType extends MinifiableInfo {
baseType, baseType,
/** @type {ReferencedType[]} */ /** @type {ReferencedType[]} */
typeParams: mti.g && mti.g.map(t => typeFromRef(t, unit)), typeArgs: mti.g && mti.g.map(t => typeFromRef(t, unit)),
/** @type {string} */ /** @type {string} */
arr: '[]'.repeat(mti.a | 0), arr: '[]'.repeat(mti.a | 0),
@@ -177,10 +303,10 @@ class ReferencedType extends MinifiableInfo {
get name() { get name() {
// note: names in enclosed types are in x$y format // note: names in enclosed types are in x$y format
const n = this.parsed.baseType ? this.parsed.baseType.name : this.minified.n; const n = this.parsed.baseType ? this.parsed.baseType.name : this.minified.n;
const type_params = this.parsed.typeParams const type_args = this.parsed.typeArgs
? `<${this.parsed.typeParams.map(tp => tp.name).join(',')}>` ? `<${this.parsed.typeArgs.map(tp => tp.name).join(',')}>`
: '' : ''
return `${n}${type_params}${this.parsed.arr}`; return `${n}${type_args}${this.parsed.arr}`;
} }
get dottedName() { get dottedName() {
@@ -188,6 +314,97 @@ class ReferencedType extends MinifiableInfo {
} }
} }
class MTITypeBase extends MinifiableInfo {
/**
* type docs
* @type {string}
*/
get docs() { return this.minified.d }
/**
* type modifiers
* @type {number}
*/
get modifiers() { return this.minified.m }
/**
* type name (in x$y format for enclosed types)
* @type {string}
*/
get name() { return this.minified.n }
/**
* package this type belongs to
*/
get package() { return null }
/**
* @type {MTIConstructor[]}
*/
get constructors() { return [] }
/**
* @type {MTIField[]}
*/
get fields() { return [] }
/**
* @type {MTIMethod[]}
*/
get methods() { return [] }
/**
* @param {string} name
*/
hasModifier(name) {
return ((this.minified.m | 0) & getModifierBit(name)) !== 0;
}
toSource() {
return this.name;
}
}
class MTIArrayType extends MTITypeBase {
/**
* @param {Type} element_type
*/
constructor(element_type) {
super({
n: element_type.name + '[]',
d: '',
m: 0, // should array types be implicitly final?
});
this.element_type = element_type;
}
get fullyDottedRawName() { return `${this.element_type.fullyDottedRawName}[]` }
/** @type {MTITypeKind} */
get typeKind() { return 'array' }
}
class MTIPrimitiveType extends MTITypeBase {
static _cached = new Map();
static fromName(name) {
let value = MTIPrimitiveType._cached.get(name);
if (!value) {
value = new MTIPrimitiveType({
n: name,
d: '',
m: 0,
});
MTIPrimitiveType._cached.set(name, value);
}
return value;
}
get fullyDottedRawName() { return this.name }
/** @type {MTITypeKind} */
get typeKind() { return 'primitive' }
}
/** /**
* MTIType encodes a complete type (class, interface or enum) * MTIType encodes a complete type (class, interface or enum)
@@ -205,7 +422,7 @@ class ReferencedType extends MinifiableInfo {
* } * }
* ``` * ```
*/ */
class MTIType extends MinifiableInfo { class MTIType extends MTITypeBase {
/** /**
* @param {MTI} unit * @param {MTI} unit
@@ -241,26 +458,13 @@ class MTIType extends MinifiableInfo {
} }
} }
/**
* type docs
* @type {string}
*/
get docs() { return this.minified.d }
/**
* type modifiers
* @type {number}
*/
get modifiers() { return this.minified.m }
/**
* type name (in x$y format for enclosed types)
* @type {string}
*/
get name() { return this.minified.n }
get dottedRawName() { return this.minified.n.replace(/[$]/g, '.') }; get dottedRawName() { return this.minified.n.replace(/[$]/g, '.') };
get fullyDottedRawName() {
const pkg = this.package;
return pkg ? `${pkg}.${this.dottedRawName}` : this.dottedRawName;
};
get dottedName() { get dottedName() {
const t = this.typevars.map(t => t.name).join(','); const t = this.typevars.map(t => t.name).join(',');
return t ? `${this.dottedRawName}<${t}>` : this.dottedRawName; return t ? `${this.dottedRawName}<${t}>` : this.dottedRawName;
@@ -277,6 +481,7 @@ class MTIType extends MinifiableInfo {
*/ */
get package() { return this.parsed.package } get package() { return this.parsed.package }
/** @type {MTITypeKind} */
get typeKind() { get typeKind() {
const m = this.minified.m; const m = this.minified.m;
return (m & TypeModifiers.enum) return (m & TypeModifiers.enum)
@@ -332,7 +537,9 @@ class MTIType extends MinifiableInfo {
// only add extends if it's not derived from java.lang.Object // only add extends if it's not derived from java.lang.Object
if (this.extends !== KnownTypes[3]) { if (this.extends !== KnownTypes[3]) {
const x = Array.isArray(this.extends) ? this.extends : [this.extends]; const x = Array.isArray(this.extends) ? this.extends : [this.extends];
ex = `extends ${x.map(type => type.dottedName).join(', ')} `; if (x.length) {
ex = `extends ${x.map(type => type.dottedName).join(', ')} `;
}
} }
} }
@@ -348,6 +555,85 @@ class MTIType extends MinifiableInfo {
`}` `}`
].join('\n'); ].join('\n');
} }
/**
* @param {MTI} unit
* @param {number} typeref
*/
setExtends(unit, typeref) {
if (Array.isArray(this.minified.e)) {
this.minified.e.push(typeref);
// @ts-ignore
this.parsed.extends.push(typeFromRef(typeref, unit));
} else {
this.minified.e = typeref;
this.parsed.extends = typeFromRef(typeref, unit);
}
}
/**
* @param {MTI} unit
* @param {string} docs
* @param {string[]} modifiers
* @param {number} typeref
* @param {string} name
*/
addField(unit, docs, modifiers, typeref, name) {
const o = {
d: docs,
m: getAccessMods(modifiers),
n: name,
t: typeref,
}
this.minified.f.push(o);
this.parsed.fields.push(new MTIField(unit, o));
}
/**
* @param {MTI} unit
* @param {string} docs
* @param {string[]} modifiers
*/
addConstructor(unit, docs, modifiers) {
const o = {
d: docs,
m: getAccessMods(modifiers),
p: [],
}
this.minified.c.push(o);
const c = new MTIConstructor(unit, o);
this.parsed.constructors.push(c);
return c;
}
/**
* @param {MTI} unit
* @param {MTIType} owner
* @param {string} docs
* @param {string[]} modifiers
* @param {number} typeref
* @param {string} name
*/
addMethod(unit, owner, docs, modifiers, typeref, name) {
let g = this.minified.g.find(m => m.name === name);
if (!g) {
g = {
n:name,
s: [],
}
this.minified.g.push(g);
}
const o = {
d: docs,
m: getAccessMods(modifiers),
t: typeref,
p: [],
};
g.s.push(o);
const method = new MTIMethod(unit, owner, name, o);
this.parsed.methods.push(method);
return method;
}
} }
/** /**
@@ -442,6 +728,22 @@ class MTIConstructor extends MTIMethodBase {
const typename = this.parsed.typename.split('$').pop(); const typename = this.parsed.typename.split('$').pop();
return `${this.fmtdocs()}${access(this.modifiers)}${typename}(${this.parameters.map(p => p.toSource()).join(', ')}) {}` return `${this.fmtdocs()}${access(this.modifiers)}${typename}(${this.parameters.map(p => p.toSource()).join(', ')}) {}`
} }
/**
* @param {MTI} unit
* @param {string[]} modifiers
* @param {number} typeref
* @param {string} name
*/
addParameter(unit, modifiers, typeref, name) {
const o = {
m: getAccessMods(modifiers),
t: typeref,
n: name,
}
this.minified.p.push(o);
this.parsed.parameters.push(new MTIParameter(unit, o));
}
} }
/** /**
@@ -518,6 +820,10 @@ class MTIConstructor extends MTIMethodBase {
*/ */
get parameters() { return this.parsed.parameters } get parameters() { return this.parsed.parameters }
toDeclSource() {
return `${this.return_type.dottedName} ${this.name}(${this.parameters.map(p => p.toSource()).join(', ')})`;
}
toSource() { toSource() {
let m = this.modifiers, body = ' {}'; let m = this.modifiers, body = ' {}';
if (m & 0x400) { if (m & 0x400) {
@@ -530,6 +836,22 @@ class MTIConstructor extends MTIMethodBase {
} }
return `${this.fmtdocs()}${access(m)}${this.return_type.dottedName} ${this.name}(${this.parameters.map(p => p.toSource()).join(', ')})${body}` return `${this.fmtdocs()}${access(m)}${this.return_type.dottedName} ${this.name}(${this.parameters.map(p => p.toSource()).join(', ')})${body}`
} }
/**
* @param {MTI} unit
* @param {string[]} modifiers
* @param {number} typeref
* @param {string} name
*/
addParameter(unit, modifiers, typeref, name) {
const o = {
m: getAccessMods(modifiers),
t: typeref,
n: name,
}
this.minified.p.push(o);
this.parsed.parameters.push(new MTIParameter(unit, o));
}
} }
/** /**
@@ -589,6 +911,27 @@ function access(modifier_bits) {
return decls.join(' '); return decls.join(' ');
} }
/**
* @param {string} modifier
*/
function getModifierBit(modifier) {
const i = access_keywords.indexOf(modifier);
return i < 0 ? 0 : (1 << i);
}
/**
* @param {string[]} modifiers
* @param {boolean} [varargs]
*/
function getAccessMods(modifiers, varargs = false) {
let m = 0;
modifiers.forEach(modifier => m |= getModifierBit(modifier));
if (varargs) {
m |= getModifierBit('transient');
}
return m;
}
const TypeModifiers = { const TypeModifiers = {
public: 0b0000_0000_0000_0001, // 0x1 public: 0b0000_0000_0000_0001, // 0x1
final: 0b0000_0000_0001_0000, // 0x10 final: 0b0000_0000_0001_0000, // 0x10
@@ -620,6 +963,29 @@ function typemods(modifier_bits) {
return modifiers.join(' '); return modifiers.join(' ');
} }
/**
* @param {string[]} modifiers
* @param {MTITypeKind} typeKind
*/
function getTypeMods(modifiers, typeKind) {
let m = 0;
if (modifiers.includes('public')) m |= TypeModifiers.public;
if (modifiers.includes('final')) m |= TypeModifiers.final;
if (modifiers.includes('abstract')) m |= TypeModifiers.abstract;
switch (typeKind) {
case "interface":
m |= TypeModifiers.interface | TypeModifiers.abstract;
break;
case "@interface":
m |= TypeModifiers['@interface'] | TypeModifiers.abstract;
break;
case "enum":
m |= TypeModifiers.enum | TypeModifiers.final;
break;
}
return m;
}
/** /**
* List of known/common packages. * List of known/common packages.
* These are used/encoded as pkgrefs between 0 and 15. * These are used/encoded as pkgrefs between 0 and 15.

498
langserver/java/parser.js Normal file
View File

@@ -0,0 +1,498 @@
const Annotation = require('./parsetypes/annotation');
const Declaration = require('./parsetypes/declaration');
const FMCDeclaration = require('./parsetypes/fmc');
const ImportDeclaration = require('./parsetypes/import');
const PackageDeclaration = require('./parsetypes/package');
const ParameterDeclaration = require('./parsetypes/parameter');
const ParseProblem = require('./parsetypes/parse-problem');
const ParseResult = require('./parsetypes/parse-result');
const ParseSyntaxError = require('./parsetypes/parse-error');
const ProblemSeverity = require('./parsetypes/problem-severity');
const Token = require('./parsetypes/token');
const TypeDeclaration = require('./parsetypes/type');
const TypeIdent = require('./parsetypes/typeident');
const TypeParameters = require('./parsetypes/type-parameters');
/**
* @typedef {import('./parsetypes/modifier')} Modifier
*/
/**
* @param {Token[]} tokens
* @param {number} idx
*/
function findToken(tokens, idx) {
return tokens.find(t => t.simplified_text_idx === idx);
}
/**
* @param {string} simplified
* @param {number} lastIndex
*/
function parseToBracketEnd(simplified, lastIndex) {
// parse until close bracket
let re = /[()]/g, balance = 1;
const start = re.lastIndex = lastIndex;
for (let m; m = re.exec(simplified);) {
if (m[0] === '(') balance++;
else if (--balance === 0) {
re.lastIndex++;
break;
}
}
return {
start,
end: re.lastIndex,
}
}
/**
* @param {string} simplified
* @param {Token[]} tokens
* @param {{start: number, end: number}} simplified_range
* @param {*[]} invalids
*/
function parseParameters(simplified, tokens, simplified_range, invalids) {
const decls = [
/[ X]+/g,
/@ *W( *\. *W)*( *\()?/g,
/M/g,
/W(?: *\. *W)*(?: *<.*?>)?(?: *\[ *\])*(?: +|( *\.\.\. *))W(?: *\[ *\])*( *,)?/g, // parameter decl
/(\)|$)/g, // end of params
];
const parameters = [];
/** @type {Modifier[]} */
const modifiers = [];
let lastIndex = simplified_range.start;
for(;;) {
/** @type {{idx:number, d: RegExp, m:RegExpMatchArray}} */
let best_match = null, next_best = null;
decls.find((d,idx) => {
d.lastIndex = lastIndex;
const m = d.exec(simplified);
if (!m) return;
if (m.index === lastIndex) {
best_match = {idx, d, m};
return true;
}
if (idx === 0) {
return;
}
if (!next_best || m.index < next_best.m.index) {
next_best = {idx, d, m};
}
});
if (!best_match) {
const errorToken = findToken(tokens, lastIndex);
const error = new ParseSyntaxError(null, modifiers.splice(0), errorToken);
invalids.push(error);
best_match = next_best;
if (!next_best) {
break;
}
}
lastIndex = best_match.d.lastIndex;
if (best_match.idx === 1) {
// annotation
const at = findToken(tokens, best_match.m.index);
const name = findToken(tokens, best_match.m.index + best_match.m[0].indexOf('W'));
const annotation = new Annotation(at, name);
modifiers.push(annotation);
if (best_match.m[0].endsWith('(')) {
lastIndex = parseToBracketEnd(simplified, lastIndex).end;
}
}
else if (best_match.idx === 2) {
// modifier
const modifier = findToken(tokens, best_match.m.index);
modifiers.push(modifier);
}
else if (best_match.idx === 3) {
// parameter
const name = findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf('W'));
const varargs = best_match.m[1] ? findToken(tokens, best_match.m.index + best_match.m[0].indexOf('...')) : null;
const comma = best_match.m[2] ? findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf(',')) : null;
const typetokens = [];
const first_type_token = findToken(tokens, best_match.m.index + best_match.m[0].indexOf('W'));
for (let t = first_type_token, i = tokens.indexOf(t); t !== name; t = tokens[++i]) {
if (t.simplified_text !== ' ')
typetokens.push(t);
}
const param = new ParameterDeclaration(modifiers.splice(0), new TypeIdent(typetokens), varargs, name, comma);
parameters.push(param);
} else if (best_match.idx === 4) {
// end of parameters
break;
}
}
return parameters;
}
/**
* @param {Token[]} typelist_tokens
*/
function parseTypeIdentList(typelist_tokens) {
// split the typelist into typetoken chunks, separated by commas
let typeargs_balance = 0, array_balance = 0;
/** @type {Token[][]} */
let types = [[]];
typelist_tokens.forEach(t => {
switch(t.text) {
case ' ':
if (types[0].length === 0) {
return;
}
break;
case ',':
if (typeargs_balance <= 0 && array_balance <= 0) {
while (types[0][types[0].length - 1].text === ' ') {
types[0].pop();
}
typeargs_balance = array_balance = 0;
types.unshift([]);
return;
}
break;
case '<':
typeargs_balance++;
break;
case '>':
typeargs_balance--;
break;
case ']':
array_balance++;
break;
case '[':
array_balance--;
break;
}
types[0].push(t);
});
// remove any blank entries (start comma or sequential commas)
return types.filter(t => t.length).reverse().map(tokens => new TypeIdent(tokens));
}
/**
* @param {string} source
*/
function parse(source) {
const re = /(\/\*[\d\D]*?\*\/)|(\/\*)|(\*\/)|((?:\/\/.*)|(?:\s+))|(".*?")|('.'?)|\b(package|import|class|enum|interface|extends|implements|throws)\b|\b(public|private|protected|static|final|abstract|native|volatile|transient|synchronized|strictfp)\b|(\.{3}|[@{}()<>,;?*\[\].])|\b(super|new)\b|\b([A-Za-z_]\w*)|(\d[\w.]*)/g;
let source_idx = 0, simplified_text_idx = 0;
/** @type {Token[]} */
let tokens = [];
function mapSimplified(
_,
mlc,
unterminated_mlc,
mlc_end,
slc_ws,
string,
char,
decl_keyword,
modifier,
symbol,
kw,
word
/* number, */
) {
if (mlc) return 'X';//mlc.replace(/[^\n]+/g, '') || ' ';
if (unterminated_mlc) return ' ';
if (mlc_end) return ' ';
if (slc_ws) return ' '; //slc_ws.replace(/[^\n]+/g, '').replace(/ +/,' ') || ' ';
if (string) return 'S';
if (char) return 'C';
if (decl_keyword) return decl_keyword;
if (modifier) return 'M';
if (symbol) return symbol;
if (kw) return kw;
if (word) return 'W';
return 'N';
}
const simplified = source.replace(re, (...args) => {
let text = args[0];
let next_idx = source.indexOf(text, source_idx);
simplified_text_idx += (next_idx - source_idx);
source_idx = next_idx;
const simplified_text = mapSimplified.apply(null, args);
tokens.push(new Token(source_idx, text, simplified_text, simplified_text_idx));
source_idx += text.length;
simplified_text_idx += simplified_text.length;
return simplified_text;
});
// console.log(simplified);
const decls = [
/ +/g,
/package +W(?: *\. *W)*( *;)?/g,
/import +(M +)?W(?: *\. *W)*( *\.\*)?( *;)?/g,
/@ *W( *\. *W)*( *\()?/g,
/M/g,
/(class|enum|interface|@ *interface) +W(.+?(?= *[a-z{]))/g, // type declaration
/(implements|extends|throws) +W(.+?(?= *[a-z{]))/g, // decl
/W(?: *\. *W)*(?: *<.*?>)?(?: *\[ *\])* +W(?: *\[ *\])*( *[=;(,])?/g, // field/method
/W *\(/g, // constructor
/[{}]/g, // scope
/X/g, // multi-line comment
/<.*?>(?= *[WM@])/g, // type variables
/$/g, // end of file
]
let lastIndex = 0;
let loc = ['base'];
let package_decl = null;
let imports = [];
let modifiers = [];
let types = [];
let invalids = [];
let lastMLC = null;
/** @type {TypeDeclaration[]} */
let type_stack = [null];
for(;;) {
/** @type {{idx:number, d: RegExp, m:RegExpMatchArray}} */
let best_match = null, next_best = null;
decls.find((d,idx) => {
d.lastIndex = lastIndex;
const m = d.exec(simplified);
if (!m) return;
if (m.index === lastIndex) {
best_match = {idx, d, m};
return true;
}
if (idx === 0) {
return;
}
if (!next_best || m.index < next_best.m.index) {
next_best = {idx, d, m};
}
});
if (!best_match) {
const errorToken = findToken(tokens, lastIndex);
const error = new ParseSyntaxError(lastMLC, modifiers.splice(0), errorToken);
invalids.push(error);
lastMLC = null;
console.log(simplified.slice(lastIndex, lastIndex + 100));
best_match = next_best;
if (!next_best) {
break;
}
}
lastIndex = best_match.d.lastIndex;
function parseToExpressionEnd() {
// parse expression
let re = /[(){};]/g, balance = [0,0];
re.lastIndex = lastIndex;
for (let m; m = re.exec(simplified);) {
if (m[0] === '{') balance[0]++;
else if (m[0] === '(') balance[1]++;
else if (m[0] === '}') balance[0]--;
else if (m[0] === ')') balance[1]--;
else if (balance[0] <= 0 && balance[1] <= 0) {
break;
}
}
// console.log(simplified.slice(lastIndex, re.lastIndex));
lastIndex = re.lastIndex;
}
if (best_match.idx === 1) {
// package - map all the name parts
const nameparts = [];
for (let m, re=/W/g; m = re.exec(best_match.m[0]); ) {
const ident = findToken(tokens, best_match.m.index + m.index);
nameparts.push(ident);
}
const semicolon = best_match.m[1] ? findToken(tokens, best_match.m.index + best_match.m[0].length - 1) : null;
if (!package_decl) {
package_decl = new PackageDeclaration(lastMLC, modifiers.splice(0), nameparts, semicolon);
}
lastMLC = null;
}
if (best_match.idx === 2) {
// import - map all the name parts
const nameparts = [];
for (let m, re=/W/g; m = re.exec(best_match.m[0]); ) {
const ident = findToken(tokens, best_match.m.index + m.index);
nameparts.push(ident);
}
const static = best_match.m[1] ? findToken(tokens, best_match.m.index + best_match.m[0].indexOf('M')) : null;
const asterisk = best_match.m[2] ? findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf('*')) : null
const semicolon = best_match.m[3] ? findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf(';')) : null;
let import_decl = new ImportDeclaration(lastMLC, modifiers.splice(0), nameparts, static, asterisk, semicolon);
imports.push(import_decl);
lastMLC = null;
}
if (best_match.idx === 3) {
// annotation
const at = findToken(tokens, best_match.m.index);
const name = findToken(tokens, best_match.m.index + best_match.m[0].indexOf('W'));
const annotation = new Annotation(at, name);
modifiers.push(annotation);
if (best_match.m[0].endsWith('(')) {
lastIndex = parseToBracketEnd(simplified, lastIndex).end;
}
}
if (best_match.idx === 4) {
// modifier
const modifier = findToken(tokens, best_match.m.index);
modifiers.push(modifier);
}
if (best_match.idx === 5) {
// type declaration
const name = findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf('W'));
/** @type {'class'|'interface'|'enum'|'@interface'} */
// @ts-ignore
const kind = best_match.m[1].replace(/ /g, '');
const type = new TypeDeclaration(type_stack[0], lastMLC, modifiers.splice(0), kind, name);
lastMLC = null;
types.push(type);
type_stack.unshift(type);
loc.unshift('typedecl');
}
if (best_match.idx === 6) {
// extends/implements/throws
const decl_kw = findToken(tokens, best_match.m.index);
const startidx = tokens.indexOf(findToken(tokens, best_match.m.index + best_match.m[0].indexOf('W')));
const endidx = tokens.indexOf(findToken(tokens,best_match.m.index + best_match.m[0].length - 1));
const typelist = parseTypeIdentList(tokens.slice(startidx, endidx + 1));
switch(decl_kw.text) {
case 'throws':
break;
case 'extends':
case 'implements':
if (loc[0] === 'typedecl') {
type_stack[0].super_declarations.push({ decl_kw, typelist });
}
}
}
if (best_match.idx === 7) {
// field or method
const name = findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf('W'));
const typetokens = [];
for (let t = findToken(tokens, best_match.m.index), i = tokens.indexOf(t); t !== name; t = tokens[++i]) {
if (t.simplified_text !== ' ')
typetokens.push(t);
}
let parameters, equals_comma_sc = null;
switch (best_match.m[0].slice(-1)) {
case '(':
// method
let params_source_range = parseToBracketEnd(simplified, lastIndex);
lastIndex = params_source_range.end;
parameters = parseParameters(simplified, tokens, params_source_range, invalids);
break;
case '=':
// initialised field
equals_comma_sc = findToken(tokens, best_match.m.index + best_match.m[0].length);
parseToExpressionEnd();
break;
case ',':
// multi-declaration field
equals_comma_sc = findToken(tokens, best_match.m.index + best_match.m[0].length);
throw new Error('not implemented');
case ';':
// single field
equals_comma_sc = findToken(tokens, best_match.m.index + best_match.m[0].length);
break;
default:
// invalid - but treat as a single field
break;
}
if (type_stack[0]) {
const fmc = new FMCDeclaration(type_stack[0], lastMLC, modifiers.splice(0), best_match.m[0].endsWith('(') ? 'method' : 'field', name, new TypeIdent(typetokens), equals_comma_sc, parameters);
type_stack[0].declarations.push(fmc);
}
lastMLC = null;
}
if (best_match.idx === 8) {
// constructor (if the name matches the type)
let params_source_range = parseToBracketEnd(simplified, lastIndex);
lastIndex = params_source_range.end;
const parameters = parseParameters(simplified, tokens, params_source_range, invalids);
const name = findToken(tokens, best_match.m.index);
if (type_stack[0] && name.text === type_stack[0].name.text) {
const fmc = new FMCDeclaration(type_stack[0], lastMLC, modifiers.splice(0), 'constructor', name, null, null, parameters);
type_stack[0].declarations.push(fmc);
} else {
invalids.push(new ParseSyntaxError(lastMLC, modifiers.splice(0), name));
}
lastMLC = null;
}
if (best_match.idx === 9) {
// open/close scope
if (best_match.m[0] === '{') {
if (loc[0] === 'typedecl') loc[0] = 'typebody';
else if (loc[0] === 'typebody') {
// static initer / method body
let re = /[{}]/g, balance = 1;
re.lastIndex = lastIndex;
for (let m; m = re.exec(simplified);) {
if (m[0] === '{') balance++;
else if (--balance === 0) {
re.lastIndex++;
break;
}
}
lastIndex = re.lastIndex;
}
} else {
// end scope
if (/^type/.test(loc[0])) {
loc.shift();
type_stack.shift();
}
}
}
if (best_match.idx === 10) {
// mlc
lastMLC = findToken(tokens, best_match.m.index);
}
if (best_match.idx === 11) {
// type parameters
const open = findToken(tokens, best_match.m.index);
const close = findToken(tokens, best_match.m.index + best_match.m[0].length - 1);
modifiers.push(new TypeParameters(open, close));
}
if (best_match.idx === 12) {
// end of file
break;
}
}
return new ParseResult(package_decl, imports, types, invalids);
}
module.exports = {
Annotation,
Declaration,
FMCDeclaration,
ImportDeclaration,
PackageDeclaration,
parse,
ParseProblem,
ParseResult,
ProblemSeverity,
Token,
TypeDeclaration,
TypeParameters,
}

View File

@@ -0,0 +1,16 @@
/**
* @typedef {import('./token')} Token
*/
class Annotation {
/**
* @param {Token} at
* @param {Token} name
*/
constructor(at, name) {
this.at = at;
this.name = name;
}
}
module.exports = Annotation;

View File

@@ -0,0 +1,49 @@
const Token = require('./token');
/**
* @typedef {import('./modifier')} Modifier
* @typedef {import('./type')} TypeDeclaration
*/
/**
* Base class for Java declarations.
*/
class Declaration {
/**
* @param {TypeDeclaration} owner_type the type this declaration belongs to (if any)
* @param {Token} docs JavaDocs associated with the declaration
* @param {Modifier[]} modifiers annotations, modifier keywords and type parameters
*/
constructor(owner_type, docs, modifiers) {
this.owner_type = owner_type;
this.docs = docs;
this.modifiers = modifiers;
}
/**
* returns the raw JavaDoc string or an empty string if no doc is present
*/
getDocString() {
return this.docs ? this.docs.text : '';
}
/**
* Returns the raw access modifier text values
* @returns {string[]}
*/
getAccessModifierValues() {
// @ts-ignore
return this.modifiers.filter(m => m instanceof Token).map(t => t.text);
}
/**
* Finds the token matching the specified modifier
* @param {string} name
* @returns {Token}
*/
findModifier(name) {
// @ts-ignore
return this.modifiers.find(m => (m instanceof Token) && (m.text === name));
}
}
module.exports = Declaration;

View File

@@ -0,0 +1,90 @@
/**
* @typedef {import('./modifier')} Modifier
* @typedef {import('./parameter')} ParameterDeclaration
* @typedef {import('./token')} Token
* @typedef {import('./type')} TypeDeclaration
* @typedef {import('./typeident')} TypeIdent
*/
const Declaration = require('./declaration');
const ParseProblem = require('./parse-problem');
const ProblemSeverity = require('./problem-severity');
/**
* Field, method or constructor declaration
*/
class FMCDeclaration extends Declaration {
/**
*
* @param {TypeDeclaration} owner_type
* @param {Token} docs
* @param {Modifier[]} modifiers
* @param {'field'|'method'|'constructor'} kind
* @param {Token} name
* @param {TypeIdent} type
* @param {Token} equals_comma_sc
* @param {ParameterDeclaration[]} parameters
*/
constructor(owner_type, docs, modifiers, kind, name, type, equals_comma_sc, parameters) {
super(owner_type, docs, modifiers);
this.kind = kind;
this.name = name;
this.type = type;
this.equals_comma_sc = equals_comma_sc;
this.parameters = parameters || [];
}
validate() {
const checkDuplicateParameterNames = () => {
const done = new Set();
return this.parameters
.filter(p => {
if (done.has(p.name.text)) {
return true;
}
done.add(p.name.text);
})
.map(p =>
new ParseProblem(p.name, `Duplicate parameter name: '${p.name.text}'`, ProblemSeverity.Error)
);
};
const checkParameterCommas = () => {
const last_param_idx = this.parameters.length - 1;
return this.parameters.map((p, idx) => {
if ((idx < last_param_idx) && !p.comma) {
return new ParseProblem(p.lastToken(), 'Missing comma', ProblemSeverity.Error);
}
else if ((idx === last_param_idx) && p.comma) {
return ParseProblem.syntaxError(p.comma);
}
});
}
const checkFieldSemicolon = () => {
if (this.kind === 'field') {
if (!this.equals_comma_sc) {
return new ParseProblem(this.name, `Missing operator or semicolon`, ProblemSeverity.Error);
}
}
return null;
}
const checkVarargsIsLastParameter = () => {
return this.parameters
.slice(0, -1)
.filter(p => p.varargs)
.map(p =>
new ParseProblem(p.varargs, 'A variable arity parameter must be declared last', ProblemSeverity.Error)
);
};
const problems = [
...ParseProblem.checkAccessModifiers(this.modifiers, this.kind),
...ParseProblem.checkDuplicateModifiers(this.modifiers),
...ParseProblem.checkConflictingModifiers(this.modifiers),
...checkParameterCommas(),
...checkDuplicateParameterNames(),
...checkVarargsIsLastParameter(),
checkFieldSemicolon(),
];
return problems;
}
}
module.exports = FMCDeclaration;

View File

@@ -0,0 +1,68 @@
const Declaration = require('./declaration');
const ParseProblem = require('./parse-problem');
const Token = require('./token');
const TypeParameters = require('./type-parameters');
/**
* @typedef {import('./modifier')} Modifier
*/
class ImportDeclaration extends Declaration {
/**
* @param {Token} docs
* @param {Modifier[]} modifiers
* @param {Token[]} nameparts
* @param {Token} static_
* @param {Token} asterisk
* @param {Token} semicolon
*/
constructor(docs, modifiers, nameparts, static_, asterisk, semicolon) {
super(null, docs, modifiers);
this.nameparts = nameparts;
this.static_ = static_;
this.asterisk = asterisk;
this.semicolon = semicolon;
}
/**
* Returns the dotted portion of the import declaration (excluding any demand-load part)
*/
getDottedName() {
return this.nameparts.map(x => x.text).join('.');
}
lastToken() {
return this.semicolon || this.asterisk || this.nameparts.slice(-1)[0];
}
validate() {
const checkModifierIsStatic = () => {
if (this.static_ && this.static_.text !== 'static') {
return ParseProblem.syntaxError(this.static_);
}
}
const checkNoInvalidModifiers = () => {
return this.modifiers.map(modifier => {
if (modifier instanceof Token) {
return ParseProblem.syntaxError(modifier);
}
if (modifier instanceof TypeParameters) {
return ParseProblem.syntaxError(modifier.open);
}
})
}
/** @type {ParseProblem[]} */
const problems = [
checkModifierIsStatic(),
...ParseProblem.checkNonKeywordIdents(this.nameparts),
ParseProblem.checkSemicolon(this),
...checkNoInvalidModifiers(),
];
return problems;
}
}
module.exports = ImportDeclaration;

View File

@@ -0,0 +1,15 @@
/**
* @typedef {import('./annotation')} Annotation
* @typedef {import('./type-parameters')} TypeParameters
* @typedef {import('./token')} Token
*
* Each Modifier is one of
* - a token representing a modifier keyword (e.g public, static, etc)
* - an Annotation (eg. @Override)
* - or a TypeParameters section (eg <T extends Object>)
* These can typically appear in any order before a declaration
*
* @typedef {Token|Annotation|TypeParameters} Modifier
*/
module.exports = {}

View File

@@ -0,0 +1,39 @@
const Declaration = require('./declaration');
const ParseProblem = require('./parse-problem');
/**
* @typedef {import('./modifier')} Modifier
* @typedef {import('./token')} Token
*/
class PackageDeclaration extends Declaration {
/**
* @param {Token} docs
* @param {Modifier[]} modifiers
* @param {Token[]} nameparts
* @param {Token} semicolon
*/
constructor(docs, modifiers, nameparts, semicolon) {
super(null, docs, modifiers);
this.nameparts = nameparts;
this.semicolon = semicolon;
}
dottedName() {
return this.nameparts.map(t => t.text).join('.');
}
lastToken() {
return this.semicolon || this.nameparts.slice(-1)[0];
}
validate() {
/** @type {ParseProblem[]} */
const problems = [
ParseProblem.checkSemicolon(this),
...ParseProblem.checkNonKeywordIdents(this.nameparts),
];
return problems;
}
}
module.exports = PackageDeclaration;

View File

@@ -0,0 +1,33 @@
const Declaration = require('./declaration');
/**
* @typedef {import('./modifier')} Modifier
* @typedef {import('./typeident')} TypeIdent
* @typedef {import('./token')} Token
*/
/**
* A single parameter declaration
*/
class ParameterDeclaration extends Declaration {
/**
* @param {Modifier[]} modifiers
* @param {TypeIdent} type
* @param {Token} varargs
* @param {Token} name
* @param {Token} comma
*/
constructor(modifiers, type, varargs, name, comma) {
super(null, null, modifiers);
this.name = name;
this.type = type;
this.varargs = varargs;
this.comma = comma;
}
lastToken() {
return this.comma || this.name;
}
}
module.exports = ParameterDeclaration;

View File

@@ -0,0 +1,29 @@
const Declaration = require('./declaration');
const ParseProblem = require('./parse-problem');
/**
* @typedef {import('./modifier')} Modifier
* @typedef {import('./token')} Token
*/
class ParseSyntaxError extends Declaration {
/**
* @param {Token} docs
* @param {Modifier[]} modifiers
* @param {Token} errorToken
*/
constructor(docs, modifiers, errorToken) {
super(null, docs, modifiers);
this.errorToken = errorToken;
}
validate() {
if (!this.errorToken) {
return [];
}
return [
ParseProblem.syntaxError(this.errorToken),
]
}
}
module.exports = ParseSyntaxError;

View File

@@ -0,0 +1,134 @@
const ProblemSeverity = require('./problem-severity');
const Token = require('./token');
/**
* @typedef {import('./import')} ImportDeclaration
* @typedef {import('./modifier')} Modifier
* @typedef {import('./package')} PackageDeclaration
* @typedef {import('./problem-severity').Severity} Severity
*/
class ParseProblem {
/**
* @param {Token|Token[]} token
* @param {string} message
* @param {Severity} severity
*/
constructor(token, message, severity) {
this.startIdx = (Array.isArray(token) ? token[0] : token).source_idx;
const lastToken = (Array.isArray(token) ? token[token.length - 1] : token);
this.endIdx = lastToken.source_idx + lastToken.text.length;
this.message = message;
this.severity = severity;
}
/**
* @param {Modifier[]} mods
*/
static checkDuplicateModifiers(mods) {
const done = new Set();
const res = [];
for (let mod of mods) {
if (mod instanceof Token) {
if (done.has(mod.text)) {
res.push(new ParseProblem(mod, `Duplicate modifier: ${mod.text}`, ProblemSeverity.Error));
}
done.add(mod.text);
}
}
return res;
}
static checkConflictingModifiers(mods) {
const modmap = new Map();
let res = [];
mods.filter(m => m instanceof Token).forEach(m => modmap.set(m.text, m));
const names = [...modmap.keys()];
const visibilities = names.filter(m => /^(public|private|protected)$/.test(m));
if (visibilities.length > 1) {
const visnames = visibilities.map(m => `'${m}'`).join(', ').replace(/, (?='\w+'$)/, ' and ');
res = visibilities.map(m => new ParseProblem(modmap.get(m), `Conflicting modifiers: ${visnames}`, ProblemSeverity.Error));
}
if (names.includes('abstract')) {
if (names.includes('final')) {
res.push(new ParseProblem(modmap.get('final'), `Declarations cannot be both 'abstract' and 'final`, ProblemSeverity.Error));
}
if (names.includes('native')) {
res.push(new ParseProblem(modmap.get('native'), `Declarations cannot be both 'abstract' and 'native`, ProblemSeverity.Error));
}
}
return res;
}
/**
* @param {Modifier[]} mods
* @param {'class'|'interface'|'enum'|'@interface'|'field'|'method'|'constructor'|'initializer'} decl_kind
*/
static checkAccessModifiers(mods, decl_kind) {
let valid_mods = /^$/;
switch (decl_kind) {
case 'class': valid_mods = /^(public|final|abstract|strictfp)$/; break;
case 'interface': valid_mods = /^(public|abstract|strictfp)$/; break;
case '@interface': valid_mods = /^(public)$/; break;
case 'enum': valid_mods = /^(public|final)$/; break;
case 'field': valid_mods = /^(public|private|protected|static|final|volatile|transient)$/; break;
case 'method': valid_mods = /^(public|private|protected|static|final|abstract|native|strictfp|synchronized)$/; break;
case 'constructor': valid_mods = /^(public|protected|native)$/; break;
case 'initializer': valid_mods = /^(static)$/; break;
}
const problems = [];
for (let mod of mods) {
if (mod instanceof Token) {
if (!valid_mods.test(mod.text)) {
problems.push(new ParseProblem(mod, `'${mod.text}' is not a valid modifier for ${decl_kind} type declarations`, ProblemSeverity.Warning));
}
const redundant = (mod.text === 'abstract' && decl_kind === 'interface')
|| (mod.text === 'final' && decl_kind === 'enum');
if (redundant) {
problems.push(new ParseProblem(mod, `'${mod.text}' is redundant for a ${decl_kind} declaration`, ProblemSeverity.Hint));
}
}
}
return problems;
}
/**
* @param {PackageDeclaration|ImportDeclaration} o
*/
static checkSemicolon(o) {
if (!o.semicolon) {
const lastToken = o.lastToken();
return new ParseProblem(lastToken, 'Missing operator or semicolon', ProblemSeverity.Error);
}
}
/**
* @param {Token[]} tokens
*/
static checkNonKeywordIdents(tokens) {
const res = [];
const KEYWORDS = /^(abstract|assert|break|case|catch|class|const|continue|default|do|else|enum|extends|final|finally|for|goto|if|implements|import|interface|native|new|package|private|protected|public|return|static|strictfp|super|switch|synchronized|throw|throws|transient|try|volatile|while)$/;
const PRIMITIVE_TYPE_KEYWORDS = /^(int|boolean|byte|char|double|float|long|short|void)$/
const LITERAL_VALUE_KEYWORDS = /^(this|true|false|null)$/;
const OPERATOR_KEYWORDS = /^(instanceof)$/;
for (let token of tokens) {
let iskw = KEYWORDS.test(token.text) || PRIMITIVE_TYPE_KEYWORDS.test(token.text) || LITERAL_VALUE_KEYWORDS.test(token.text) || OPERATOR_KEYWORDS.test(token.text);
if (iskw) {
const problem = new ParseProblem(token, `'${token.text}' is a keyword and cannot be used as an identifier`, ProblemSeverity.Error);
res.push(problem);
}
}
return res;
}
/**
* @param {Token} token
*/
static syntaxError(token) {
if (!token) return null;
return new ParseProblem(token, 'Unsupported, invalid or incomplete declaration', ProblemSeverity.Error);
}
}
module.exports = ParseProblem;

View File

@@ -0,0 +1,24 @@
/**
* @typedef {import('./import')} ImportDeclaration
* @typedef {import('./package')} PackageDeclaration
* @typedef {import('./parse-error')} ParseSyntaxError
* @typedef {import('./type')} TypeDeclaration
*/
class ParseResult {
/**
*
* @param {PackageDeclaration} package_decl
* @param {ImportDeclaration[]} imports
* @param {TypeDeclaration[]} types
* @param {ParseSyntaxError[]} invalids
*/
constructor(package_decl, imports, types, invalids) {
this.package = package_decl;
this.imports = imports;
this.types = types;
this.invalids = invalids;
}
}
module.exports = ParseResult;

View File

@@ -0,0 +1,8 @@
/**
* @typedef {1|2|3|4} Severity
* @type {{ Error:1, Warning:2, Information:3, Hint:4 }}
* these match the vscode DiagnosticSeverity values
*/
const ProblemSeverity = { Error:1, Warning:2, Information:3, Hint:4 };
module.exports = ProblemSeverity;

View File

@@ -0,0 +1,45 @@
/**
* @typedef {import('./import')} ImportDeclaration
*/
/**
* Class representing a resolved import.
*
* Each instance holds an array of types that would be resolved by the specified import.
* Each type is mapped to an MTI which lists the implementation details of the type (fields, methods, etc).
*
*/
class ResolvedImport {
/**
* @param {ImportDeclaration} import_decl
* @param {RegExpMatchArray} matches
* @param {'owner-package'|'import'|'implicit-import'} import_kind;
*/
constructor(import_decl, matches, typemap, import_kind) {
/**
* The associated import declaration.
* - this value is null for owner-package and implicit-imports
*/
this.import = import_decl;
/**
* Array of fully qualified type names in JRE format resolved in this import
*/
this.fullyQualifiedNames = Array.from(matches);
/**
* THe map of fully-qualified type names to MTIs
*/
this.types = new Map(matches.map(name => [name, typemap.get(name)]));
/**
* What kind of import this is:
* - `"owner-package"`: types that are implicitly imported from the same package as the declared module
* - `"import"`: types that are inclduded via an import declaration specified in the module
* - `"implicit-import"`: types that are included without any explicit import (`java.lang.*` for example)
*/
this.import_kind = import_kind;
}
}
module.exports = ResolvedImport;

View File

@@ -0,0 +1,105 @@
/**
* @typedef {import('./token')} Token
* @typedef {import('./type')} TypeDeclaration
*/
/**
* Class representing a parsed and resolved type
*
* Each `ResolvedType` consists of a linked set of parsed `TypeParts` and an array dimensions count.
* Each `TypePart` is a single dotted type with optional type arguments.
*
* When parsing, the first type part matches all dotted idents up to the first type with arguments - after
* that, there is a single type part for each further enclosed type.
*
* Examples:
*
* int -> one TypePart, arrdims = 0
* int[][] -> one TypePart, arrdims = 2
* List<String> -> one type part with one typeargs entry
* List<String>.InnerType -> two type parts (List<String> / InnerType)
* List<String>.InnerType.AnotherInner -> three type parts (List<String> / InnerType / AnotherInner)
* java.util.List<String>.InnerType<Object>.AnotherInner -> three type parts (java.util.List<String> / InnerType<Object> / AnotherInner)
* java.util.List.InnerType.AnotherInner -> one type part
*
* The reason for the non-obvious splitting is that the first part of the type could incorporate a package name - we
* cannot tell which parts of the name are packages and which are types/enclosed types until we try to resolve it.
* But type arguments are only allowed on types, so any qualifiers that appear after type arguments can only be a type and
* so we split on each single identifier.
*
*/
class ResolvedType {
static TypePart = class TypePart {
/**
* The list of type arguments
* @type {ResolvedType[]}
*/
typeargs = null;
/**
* The outer type if this is an enclosed generic type
* @type {ResolvedType.TypePart}
*/
outer = null;
inner = null;
/**
* @param {ResolvedType} owner
* @param {string} name
* @param {ResolvedType.TypePart} outer
*/
constructor(owner, name, outer) {
this.owner = owner;
this.name = name;
this.outer = outer;
}
get label() {
return this.name + (this.typeargs ? `<${this.typeargs.map(arg => arg.label).join(',')}>` : '');
}
}
/** @type {ResolvedType.TypePart[]} */
parts = [];
/**
* number of array dimensions for this type
*/
arrdims = 0;
/**
* Error reason if parsing failed.
*/
error = '';
/**
* The resolved MTIs that match this type. This will be an empty array if the type cannot be found.
* @type {import('../mti').Type[]}
*/
mtis = [];
/**
* During parsing, add a new type part
* @param {string} [name]
* @param {ResolvedType.TypePart} [outer]
*/
addTypePart(name = '', outer = null) {
const p = new ResolvedType.TypePart(this, name, outer);
this.parts.push(p);
return p;
}
getDottedRawType() {
// most types will only have one part
if (this.parts.length === 1)
return this.parts[0].name;
return this.parts.map(p => p.name).join('.');
}
get label() {
return this.parts.map(p => p.label).join('.') + '[]'.repeat(this.arrdims);
}
};
module.exports = ResolvedType;

View File

@@ -0,0 +1,17 @@
class Token {
/**
*
* @param {number} source_idx
* @param {string} text
* @param {string} simplified_text
* @param {number} simplified_text_idx
*/
constructor(source_idx, text, simplified_text, simplified_text_idx) {
this.source_idx = source_idx;
this.text = text;
this.simplified_text = simplified_text;
this.simplified_text_idx = simplified_text_idx;
}
}
module.exports = Token;

View File

@@ -0,0 +1,17 @@
/**
* @typedef {import('./token')} Token
*/
class TypeParameters {
/**
*
* @param {Token} open
* @param {Token} close
*/
constructor(open, close) {
this.open = open;
this.close = close;
}
}
module.exports = TypeParameters;

View File

@@ -0,0 +1,229 @@
const Declaration = require('./declaration');
const ParseProblem = require('./parse-problem');
const ProblemSeverity = require('./problem-severity');
const ResolvedImport = require('../import-resolver').ResolvedImport;
const { resolveTypeIdents } = require('../type-resolver');
const Token = require('./token');
/**
* @typedef {import('./import')} ImportDeclaration
* @typedef {import('./fmc')} FMCDeclaration
* @typedef {import('./modifier')} Modifier
* @typedef {import('./parameter')} ParameterDeclaration
* @typedef {import('./typeident')} TypeIdent
*/
/**
* Represents a single Java type (class, interface, enum or @-interface) declaration
*/
class TypeDeclaration extends Declaration {
/**
*
* @param {TypeDeclaration} owner_type
* @param {Token} docs
* @param {Modifier[]} modifiers
* @param {'class'|'interface'|'enum'|'@interface'} kind
* @param {Token} name
*/
constructor(owner_type, docs, modifiers, kind, name) {
super(owner_type, docs, modifiers);
this.kind = kind;
this.name = name;
/** @type {FMCDeclaration[]} */
this.declarations = [];
/** @type {{decl_kw:Token, typelist:TypeIdent[]}[]} */
this.super_declarations = [];
}
/**
* returns the $-qualified name of this type (excluding package)
*/
qualifiedName() {
if (!this.owner_type) {
// top-level type
return this.name.text;
}
const parts = [];
for (let t = this; t;) {
parts.unshift(t.name.text);
// @ts-ignore
t = t.owner_type;
}
return parts.join('$');
}
qualifiedDottedName() {
return this.qualifiedName().replace(/[$]/g, '.');
}
validate() {
const checkSuperDeclarations = () => {
const res = {
extends: [],
implements: [],
first: this.super_declarations[0],
};
const problems = [];
this.super_declarations.forEach((sd) => res[sd.decl_kw.text].push(sd));
for (let i = 1; i < res.extends.length; i++) {
problems.push(new ParseProblem(res.extends[i].decl_kw, `Types cannot have multiple 'extends' declarations`, ProblemSeverity.Error));
}
for (let i = 1; i < res.implements.length; i++) {
problems.push(new ParseProblem(res.extends[i].decl_kw, `Types cannot have multiple 'implements' declarations`, ProblemSeverity.Error));
}
if (res.extends.length > 0 && res.implements.length > 0 && res.first.decl_kw.text !== 'extends') {
problems.push(new ParseProblem(res.extends[0].decl_kw, `'extends' declaration must appear before 'implements'`, ProblemSeverity.Error));
}
if (this.kind === 'class' && res.extends.length === 1 && res.extends[0].typelist.length > 1) {
problems.push(new ParseProblem(res.extends[0].decl_kw, `Class types cannot extend from multiple super types`, ProblemSeverity.Error));
}
return problems;
};
const checkDuplicateFieldNames = () => {
// get list of fields, sorted by name
const fields = this.declarations
.filter((d) => d.kind === 'field')
.slice()
.sort((a, b) => a.name.text.localeCompare(b.name.text));
const probs = [];
let name = '';
fields.forEach((decl, idx, arr) => {
const next = arr[idx + 1];
if ((next && decl.name.text === next.name.text) || decl.name.text === name) {
probs.push(new ParseProblem(decl.name, `Duplicate field name: '${decl.name.text}'`, ProblemSeverity.Error));
}
name = decl.name.text;
});
return probs;
};
let problems = [
...ParseProblem.checkDuplicateModifiers(this.modifiers),
...ParseProblem.checkConflictingModifiers(this.modifiers),
...ParseProblem.checkAccessModifiers(this.modifiers, this.kind),
...ParseProblem.checkNonKeywordIdents([this.name]),
...ParseProblem.checkNonKeywordIdents(this.declarations.map((d) => d.name)),
...checkDuplicateFieldNames(),
...checkSuperDeclarations(),
...this.declarations.reduce((probs, d) => {
return [...probs, ...d.validate()];
}, []),
];
return problems;
}
/**
* @param {string} package_name
* @param {ResolvedImport[]} imports
* @param {Map<string,*>} typemap
*/
validateTypes(package_name, imports, typemap) {
const problems = [];
const fqtypename = package_name ? `${package_name}.${this.qualifiedName()}` : this.qualifiedName();
/** @type {TypeIdent[]} */
let typeidents = [];
// check extends
this.super_declarations.filter(sd => sd.decl_kw.text === 'extends').forEach(sd => {
sd.typelist.forEach(typeident => typeidents.push(typeident));
})
const resolved_extends = resolveTypeIdents(typeidents, package_name, imports, typemap);
resolved_extends.forEach((rt,i) => {
checkResolvedType(rt, typeidents[i]);
if (this.kind === 'class' && rt.mtis.length === 1) {
// class extend type must be a class
if (rt.mtis[0].typeKind !== 'class') {
problems.push(new ParseProblem(typeidents[i].tokens, `Class '${this.name.text}' cannot extend from ${rt.mtis[0].typeKind} '${rt.label}'; the specified type must be a non-final class.`, ProblemSeverity.Error));
}
// class extend type cannot be final
else if (rt.mtis[0].hasModifier('final')) {
problems.push(new ParseProblem(typeidents[i].tokens, `Class '${this.name.text}' cannot extend from final class '${rt.mtis[0].fullyDottedRawName}'.`, ProblemSeverity.Error));
}
}
});
// check implements
typeidents = [];
this.super_declarations.filter(sd => sd.decl_kw.text === 'implements').forEach(sd => {
sd.typelist.forEach(typeident => typeidents.push(typeident));
if (this.kind !== 'class' && this.kind !== 'enum') {
problems.push(new ParseProblem(sd.decl_kw, `implements declarations are not permitted for ${this.kind} types`, ProblemSeverity.Error));
}
})
const resolved_implements = resolveTypeIdents(typeidents, package_name, imports, typemap);
resolved_implements.forEach((rt,i) => {
checkResolvedType(rt, typeidents[i]);
if (/class|enum/.test(this.kind) && rt.mtis.length === 1) {
// class implements types must be interfaces
if (rt.mtis[0].typeKind !== 'interface') {
problems.push(new ParseProblem(typeidents[i].tokens, `Type '${this.name.text}' cannot implement ${rt.mtis[0].typeKind} type '${rt.mtis[0].fullyDottedRawName}'; the specified type must be an interface.`, ProblemSeverity.Error));
}
else if (!this.findModifier('abstract')) {
// if the class is not abstract, it must implement all the methods in the interface
// - we can't check this until the MTI for the class is complete
const unimplemented_methods = rt.mtis[0].methods.filter(m => true);
unimplemented_methods.forEach(method => {
problems.push(new ParseProblem(typeidents[i].tokens, `Type '${this.name.text}' is not abstract and does not implement method '${method.toDeclSource()}' declared in interface '${rt.mtis[0].fullyDottedRawName}'.`, ProblemSeverity.Error));
})
}
}
});
// check field, method-return and parameter types
typeidents = [];
this.declarations.forEach((d) => {
if (d.kind !== 'constructor') {
typeidents.push(d.type);
}
if (d.parameters) {
d.parameters.forEach((p) => {
typeidents.push(p.type);
});
}
});
const resolved_types = resolveTypeIdents(typeidents, fqtypename, imports, typemap);
// warn about missing and ambiguous types
function checkResolvedType(rt, typeident) {
if (rt.error) {
problems.push(new ParseProblem(typeident.tokens, rt.error, ProblemSeverity.Error));
return;
}
if (rt.mtis.length === 0) {
problems.push(new ParseProblem(typeident.tokens, `Type not found: ${rt.label}`, ProblemSeverity.Error));
return;
}
if (rt.mtis.length > 1) {
const names = rt.mtis.map(mti => mti.fullyDottedRawName).join(`' or '`);
problems.push(new ParseProblem(typeident.tokens, `Ambiguous type: ${rt.label} - could be '${names}'.`, ProblemSeverity.Error));
return;
}
rt.mtis.forEach(mti => {
// void arrays are illegal
if (mti.name.startsWith('void[')) {
problems.push(new ParseProblem(typeident.tokens, `primitive void arrays are not a valid type.`, ProblemSeverity.Error));
}
})
}
resolved_types.forEach((rt,i) => {
checkResolvedType(rt, typeidents[i]);
// check any type arguments
rt.parts.filter(p => p.typeargs).forEach(p => {
p.typeargs.forEach(typearg => {
checkResolvedType(typearg, typeidents[i]);
// check type arguments are not primitives (primitive arrays are ok)
if (typearg.mtis.length === 1) {
if (typearg.mtis[0].typeKind === 'primitive') {
problems.push(new ParseProblem(typeidents[i].tokens, `Type arguments cannot be primitive types.`, ProblemSeverity.Error));
}
}
})
});
});
return problems;
}
}
module.exports = TypeDeclaration;

View File

@@ -0,0 +1,24 @@
/**
* @typedef {import('./token')} Token
* @typedef {import('./resolved-type')} ResolvedType
*/
/**
* Class to represent a declared type in methods, fields, parameters and variables
*/
class TypeIdent {
/**
* @param {Token[]} tokens
*/
constructor(tokens) {
this.tokens = tokens;
/** @type {ResolvedType} */
this.resolved = null;
}
lastToken() {
return this.tokens[this.tokens.length - 1];
}
}
module.exports = TypeIdent;

View File

@@ -0,0 +1,264 @@
const { ResolvedImport } = require('./import-resolver');
const MTI = require('./mti');
const ResolvedType = require('./parsetypes/resolved-type');
/**
* Parse a type into its various components
* @param {string} label
* @returns {{type:ResolvedType, error:string}}
*/
function parse_type(label) {
const type = new ResolvedType();
let re = /([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)|(\.[a-zA-Z_]\w*)|[<,>]|((?:\[\])+)|( +)|./g;
let parts = [type.addTypePart()];
for (let m; m = re.exec(label);) {
if (m[4]) {
// ignore ws
continue;
}
if (!parts[0].name) {
if (m[1]) {
parts[0].name = m[1];
continue;
}
return { type, error: 'Missing type identifier' };
}
if (m[0] === '<') {
if (!parts[0].typeargs && !parts[0].owner.arrdims) {
// start of type arguments - start a new type
const t = new ResolvedType();
parts[0].typeargs = [t];
parts.unshift(t.addTypePart());
continue;
}
return { type, error: `Unexpected '<' character` };
}
if (m[0] === ',') {
if (parts[1] && parts[1].typeargs) {
// type argument separator - replace the type on the stack
const t = new ResolvedType();
parts[1].typeargs.push(t);
parts[0] = t.addTypePart();
continue;
}
return { type, error: `Unexpected ',' character` };
}
if (m[0] === '>') {
if (parts[1] && parts[1].typeargs) {
// end of type arguments
parts.shift();
continue;
}
return { type, error: `Unexpected '>' character` };
}
if (m[2]) {
if (parts[0].typeargs || parts[0].outer) {
// post-type-args enclosed type
parts[0] = parts[0].inner = parts[0].owner.addTypePart(m[2].slice(1), parts[0]);
continue;
}
return { type, error: `Unexpected '.' character` };
}
if (m[3]) {
parts[0].owner.arrdims = m[3].length / 2;
continue;
}
return { type, error: `Invalid type` };
}
if (parts.length !== 1) {
// one or more missing >
return { type, error: `Missing >` };
}
return { type, error: '' };
}
/**
* Construct a regex to search for an enclosed type in the current and outer scopes of a given type
*
* @param {string} fully_qualified_scope the JRE name (a.b.X$Y) of the current type scope
* @param {string} dotted_raw_typename the dotted name of the type we are searching for
*/
function createTypeScopeRegex(fully_qualified_scope, dotted_raw_typename) {
// split the type name across enclosed type boundaries
const scopes = fully_qualified_scope.split('$');
// the first scope is the dotted package name and top-level type - we need to escape the package-qualifier dots for regex
scopes[0] = scopes[0].replace(/\./g,'[.]');
// if the typename we are searching represents an enclosed type, the type-qualifier dots must be replaced with $
const enclosed_raw_typename = dotted_raw_typename.replace(/\./g,'[$]');
// bulld up the list of possible type matches based upon each outer scope of the type
const enclosed_type_regexes = [];
while (scopes.length) {
enclosed_type_regexes.push(`${scopes.join('[$]')}[$]${enclosed_raw_typename}`);
scopes.pop();
}
// the final regex is an exact match of possible type names, sorted from inner scope to outer (top-level) scope
return new RegExp(`^(${enclosed_type_regexes.join('|')})$`);
}
/**
* Locate MTIs that match a type.
* @param {string} typename The type to resolve
* @param {string} fully_qualified_scope The fully-qualified JRE name of the current type scope.
* @param {ResolvedImport[]} resolved_imports The list of types resolved from the imports
* @param {Map<string,MTI.Type>} typemap the global list of types
*/
function resolveType(typename, fully_qualified_scope, resolved_imports, typemap) {
const { type, error } = parse_type(typename);
if (error) {
// don't try to find the type if the parsing failed
type.error = error;
return type;
}
// locate the MTIs for the type and type arguments
resolveCompleteType(type, fully_qualified_scope, resolved_imports, typemap);
return type;
}
/**
*
* @param {ResolvedType} type
* @param {string} fully_qualified_scope
* @param {ResolvedImport[]} resolved_imports
* @param {Map<string,MTI.Type>} typemap
*/
function resolveCompleteType(type, fully_qualified_scope, resolved_imports, typemap) {
type.mtis = findTypeMTIs(type.getDottedRawType(), type.arrdims, fully_qualified_scope, resolved_imports, typemap);
// resolve type arguments
type.parts.filter(p => p.typeargs).forEach(p => {
p.typeargs.forEach(typearg => {
resolveCompleteType(typearg, fully_qualified_scope, resolved_imports, typemap);
})
})
}
/**
* @param {string} dotted_raw_typename
* @param {number} arraydims
* @param {string} fully_qualified_scope The fully-qualified JRE name of the current type scope.
* @param {ResolvedImport[]} resolved_imports The list of types resolved from the imports
* @param {Map<string,MTI.Type>} typemap
*/
function findTypeMTIs(dotted_raw_typename, arraydims, fully_qualified_scope, resolved_imports, typemap) {
let mtis = findRawTypeMTIs(dotted_raw_typename, fully_qualified_scope, resolved_imports, typemap);
if (arraydims > 0) {
// convert matches to array MTIs
mtis.forEach((mti,idx,arr) => {
arr[idx] = MTI.makeArrayType(mti, arraydims);
})
}
return mtis;
}
/**
* Match a dotted type name to one or more MTIs
* @param {string} dotted_raw_typename
* @param {string} fully_qualified_scope The fully-qualified JRE name of the current type scope.
* @param {Map<string,MTI.Type>} typemap
* @param {ResolvedImport[]} resolved_imports The list of types resolved from the imports
*/
function findRawTypeMTIs(dotted_raw_typename, fully_qualified_scope, resolved_imports, typemap) {
// first check if it's a simple primitive
if (/^(int|char|boolean|void|long|byte|short|float|double)$/.test(dotted_raw_typename)) {
// return the primitive type
return [MTI.fromPrimitive(dotted_raw_typename)];
}
// create a regex to search for the type name
// - the first search is for exact type matches inside the current type scope (and any parent type scopes)
let search = createTypeScopeRegex(fully_qualified_scope, dotted_raw_typename);
let matched_types =
resolved_imports.map(ri => ({
ri,
mtis: ri.fullyQualifiedNames.filter(fqn => search.test(fqn)).map(fqn => ri.types.get(fqn))
}))
.filter(x => x.mtis.length);
if (!matched_types.length) {
// if the type was not found in the current type scope, construct a new search for the imported types.
// - since we don't know if the type name includes package qualifiers or not, this regex allows for implicit
// package prefixes (todo - need to figure out static type imports)
search = new RegExp(`^(.+?[.])?${dotted_raw_typename.replace(/\./g,'[.$]')}$`);
// search the imports for the type
matched_types =
resolved_imports.map(ri => ({
ri,
mtis: ri.fullyQualifiedNames.filter(fqn => search.test(fqn)).map(fqn => ri.types.get(fqn))
}))
.filter(x => x.mtis.length);
}
// if the type matches multiple import entries, exact imports take prioirity over demand-load imports
let exact_import_matches = matched_types.filter(x => x.ri.import && !x.ri.import.asterisk);
if (exact_import_matches.length) {
if (exact_import_matches.length < matched_types.length) {
matched_types = exact_import_matches;
}
}
if (!matched_types.length) {
// if the type doesn't match any import, the final option is a fully qualified match across all types in all libraries
search = new RegExp(`^${dotted_raw_typename.replace(/\./g,'[.$]')}$`);
for (let typename of typemap.keys()) {
if (search.test(typename)) {
matched_types = [{
ri: null,
mtis: [typemap.get(typename)]
}];
break;
}
}
}
// at this point, we should (hopefully) have a single matched type
// - if the matched_types array is empty, the type is not found
// - if the matched_type array has more than one entry, the type matches types across multiple imports
// - if the matched_type array has one entry and multiple MTIs, the type matches multiple types in a single import
return matched_types.reduce((mtis,mt) => [...mtis, ...mt.mtis] , []);
}
/**
* Converts an array of type name strings to resolved types
* @param {string[]} types
* @param {string} fully_qualified_scope the JRE name of the type scope we are resolving in
* @param {ResolvedImport[]} resolved_imports the list of resolved imports (and types associated with them)
* @param {Map<string,MTI.Type>} typemap
*/
function resolveTypes(types, fully_qualified_scope, resolved_imports, typemap) {
return types.map(typename => resolveType(typename, fully_qualified_scope, resolved_imports, typemap));
}
/**
* Converts an array of TypeIdent instances to resolved types
* @param {import('./parsetypes/typeident')[]} types
* @param {string} fully_qualified_scope the JRE name of the type scope we are resolving in
* @param {ResolvedImport[]} resolved_imports the list of resolved imports (and types associated with them)
* @param {Map<string,MTI.Type>} typemap
*/
function resolveTypeIdents(types, fully_qualified_scope, resolved_imports, typemap) {
const names = types.map(typeident =>
typeident.tokens.map(token => token.text).join('')
);
return resolveTypes(names, fully_qualified_scope, resolved_imports, typemap);
}
module.exports = {
parse_type,
resolveTypes,
resolveTypeIdents,
ResolvedType,
}

View File

@@ -11,52 +11,106 @@ const {
CompletionItem, CompletionItem,
CompletionItemKind, CompletionItemKind,
TextDocumentSyncKind, TextDocumentSyncKind,
Position,
//TextDocumentPositionParams //TextDocumentPositionParams
} = require('vscode-languageserver'); } = require('vscode-languageserver');
const { TextDocument } = require('vscode-languageserver-textdocument'); const { TextDocument } = require('vscode-languageserver-textdocument');
const MTI = require('./java/mti'); const MTI = require('./java/mti');
let androidLibrary = null; const { parse, ParseProblem, ProblemSeverity, ParseResult } = require('./java/parser');
function loadAndroidLibrary(retry) { const { resolveImports } = require('./java/import-resolver');
let androidLibrary = null;
function loadAndroidLibrary(retry) {
try { try {
androidLibrary = MTI.unpackJSON('/tmp/jarscanner/android-25/android-25.json'); androidLibrary = MTI.unpackJSON('/tmp/jarscanner/android-25/android-25.json');
connection.console.log(`Android type cache loaded: ${androidLibrary.types.length} types from ${androidLibrary.packages.length} packages.`); connection.console.log(`Android type cache loaded: ${androidLibrary.types.length} types from ${androidLibrary.packages.length} packages.`);
} catch (e) { } catch (e) {
connection.console.log(`Failed to load android type cache`); connection.console.log(`Failed to load android type cache`);
if (retry) { if (retry) {
return; return;
}
connection.console.log(`Rebuilding type cache...`);
const jarscanner = require(`jarscanner/jarscanner`);
fs.mkdir('/tmp/jarscanner', err => {
if (err) {
connection.console.log(`Cannot create type cache folder. ${err.message}.`);
return
} }
jarscanner.process_android_sdk_source({ connection.console.log(`Rebuilding type cache...`);
destpath: '/tmp/jarscanner', const jarscanner = require(`jarscanner/jarscanner`);
sdkpath: process.env['ANDROID_SDK'], fs.mkdir('/tmp/jarscanner', (err) => {
api: 25, if (err && err.errno !== -17) {
cleandest: true, connection.console.log(`Cannot create type cache folder. ${err.message}.`);
}, (err) => { return;
if (err) { }
connection.console.log(`Android cache build failed. ${err.message}.`); jarscanner.process_android_sdk_source(
return {
} destpath: '/tmp/jarscanner',
loadAndroidLibrary(true); sdkpath: process.env['ANDROID_SDK'],
}) api: 25,
}) cleandest: true,
},
(err) => {
if (err) {
connection.console.log(`Android cache build failed. ${err.message}.`);
return;
}
loadAndroidLibrary(true);
}
);
});
} }
} }
// Create a connection for the server. The connection uses Node's IPC as a transport. // Create a connection for the server. The connection uses Node's IPC as a transport.
// Also include all preview / proposed LSP features. // Also include all preview / proposed LSP features.
let connection = createConnection(ProposedFeatures.all); let connection = createConnection(ProposedFeatures.all);
// Create a simple text document manager. The text document manager /**
// supports full document sync only * @typedef LiveParseInfo
let documents = new TextDocuments({ * @property {string} uri
* @property {JavaTokenizer.LineInfo[]} lines
* @property {{startState: string, states: string[], endState: string}[]} states
*/
///** @type {LiveParseInfo[]} */
//const liveParsers = [];
/** @type {{content: string, uri: string, result: ParseResult, positionAt:(n) => Position, indexAt:(p:Position) => number}} */
let parsed = null;
function reparse(uri, content) {
const result = parse(content);
parsed = {
content,
uri,
result,
positionAt(n) {
let line = 0,
last_nl_idx = 0,
character = 0;
if (n <= 0) return { line, character };
for (let idx = 0; ;) {
idx = this.content.indexOf('\n', idx) + 1;
if (idx === 0 || idx > n) {
if (idx === 0) n = content.length;
character = n - last_nl_idx;
return { line, character };
}
last_nl_idx = idx;
line++;
}
},
indexAt(pos) {
let idx = 0;
for (let i = 0; i < pos.line; i++) {
idx = this.content.indexOf('\n', idx) + 1;
if (idx === 0) {
return this.content.length;
}
}
return Math.min(idx + pos.character, this.content.length);
},
};
}
// Create a simple text document manager. The text document manager
// supports full document sync only
let documents = new TextDocuments({
/** /**
* *
* @param {string} uri * @param {string} uri
@@ -65,125 +119,231 @@ const {
* @param {string} content * @param {string} content
*/ */
create(uri, languageId, version, content) { create(uri, languageId, version, content) {
connection.console.log(JSON.stringify({what:'create',uri,languageId,version,content})); //connection.console.log(JSON.stringify({what:'create',uri,languageId,version,content}));
// tokenize the file content and build the initial parse state
connection.console.log(`create parse ${version}`);
reparse(uri, content);
//connection.console.log(res.imports.length.toString());
// const lines = JavaTokenizer.get().tokenizeSource(content);
// const initialParse = new JavaParser().parseLines(lines);
// liveParsers.push({
// uri,
// lines,
// states: initialParse,
// })
// console.log(initialParse.map(x => x.decls).filter(x => x.length).map(x => JSON.stringify(x, null, ' ')));
return { uri };
}, },
/** /**
* *
* @param {*} document * @param {TextDocument} document
* @param {import('vscode-languageserver').TextDocumentContentChangeEvent[]} changes * @param {import('vscode-languageserver').TextDocumentContentChangeEvent[]} changes
* @param {number} version * @param {number} version
*/ */
update(document, changes, version) { update(document, changes, version) {
connection.console.log(JSON.stringify({what:'update',changes,version})); connection.console.log(JSON.stringify({ what: 'update', changes, version }));
} //connection.console.log(`update ${version}`);
//return document;
if (parsed && document && parsed.uri === document.uri) {
changes.forEach((change) => {
/** @type {import('vscode-languageserver').Range} */
const r = change['range'];
if (r) {
const start_index = parsed.indexAt(r.start);
let end_index = start_index + (r.end.character - r.start.character);
if (r.end.line !== r.start.line) end_index = parsed.indexAt(r.end);
parsed.content = `${parsed.content.slice(0, start_index)}${change.text}${parsed.content.slice(end_index)}`;
}
});
//connection.console.log(JSON.stringify(parsed.content));
reparse(document.uri, parsed.content);
}
return document;
},
});
}); let hasConfigurationCapability = false;
let hasWorkspaceFolderCapability = false;
let hasDiagnosticRelatedInformationCapability = false;
let hasConfigurationCapability = false; connection.onInitialize((params) => {
let hasWorkspaceFolderCapability = false;
let hasDiagnosticRelatedInformationCapability = false;
connection.onInitialize((params) => {
process.nextTick(loadAndroidLibrary); process.nextTick(loadAndroidLibrary);
let capabilities = params.capabilities; let capabilities = params.capabilities;
// Does the client support the `workspace/configuration` request? // Does the client support the `workspace/configuration` request?
// If not, we will fall back using global settings // If not, we will fall back using global settings
hasConfigurationCapability = hasConfigurationCapability = capabilities.workspace && !!capabilities.workspace.configuration;
capabilities.workspace && !!capabilities.workspace.configuration;
hasWorkspaceFolderCapability = hasWorkspaceFolderCapability = capabilities.workspace && !!capabilities.workspace.workspaceFolders;
capabilities.workspace && !!capabilities.workspace.workspaceFolders;
hasDiagnosticRelatedInformationCapability = hasDiagnosticRelatedInformationCapability =
capabilities.textDocument && capabilities.textDocument && capabilities.textDocument.publishDiagnostics && capabilities.textDocument.publishDiagnostics.relatedInformation;
capabilities.textDocument.publishDiagnostics &&
capabilities.textDocument.publishDiagnostics.relatedInformation;
return { return {
capabilities: { capabilities: {
textDocumentSync: TextDocumentSyncKind.Incremental, textDocumentSync: TextDocumentSyncKind.Incremental,
// Tell the client that the server supports code completion // Tell the client that the server supports code completion
completionProvider: { completionProvider: {
resolveProvider: true resolveProvider: true,
} },
} },
}; };
}); });
connection.onInitialized(() => { connection.onInitialized(() => {
if (hasConfigurationCapability) { if (hasConfigurationCapability) {
// Register for all configuration changes. // Register for all configuration changes.
connection.client.register(DidChangeConfigurationNotification.type, undefined); connection.client.register(DidChangeConfigurationNotification.type, undefined);
} }
if (hasWorkspaceFolderCapability) { if (hasWorkspaceFolderCapability) {
connection.workspace.onDidChangeWorkspaceFolders(_event => { connection.workspace.onDidChangeWorkspaceFolders((_event) => {
connection.console.log('Workspace folder change event received.'); connection.console.log('Workspace folder change event received.');
}); });
} }
}); });
// The example settings // The example settings
/** /**
* @typedef ExampleSettings * @typedef ExampleSettings
* @property {number} maxNumberOfProblems * @property {number} maxNumberOfProblems
*/ */
// The global settings, used when the `workspace/configuration` request is not supported by the client. // The global settings, used when the `workspace/configuration` request is not supported by the client.
// Please note that this is not the case when using this server with the client provided in this example // Please note that this is not the case when using this server with the client provided in this example
// but could happen with other clients. // but could happen with other clients.
const defaultSettings = { maxNumberOfProblems: 1000 }; const defaultSettings = { maxNumberOfProblems: 1000 };
let globalSettings = defaultSettings; let globalSettings = defaultSettings;
// Cache the settings of all open documents // Cache the settings of all open documents
/** @type {Map<string, Thenable<ExampleSettings>>} */ /** @type {Map<string, Thenable<ExampleSettings>>} */
let documentSettings = new Map(); let documentSettings = new Map();
connection.onDidChangeConfiguration(change => { connection.onDidChangeConfiguration((change) => {
if (hasConfigurationCapability) { if (hasConfigurationCapability) {
// Reset all cached document settings // Reset all cached document settings
documentSettings.clear(); documentSettings.clear();
} else { } else {
globalSettings = ( globalSettings = change.settings.androidJavaLanguageServer || defaultSettings;
(change.settings.androidJavaLanguageServer || defaultSettings)
);
} }
// Revalidate all open text documents // Revalidate all open text documents
documents.all().forEach(validateTextDocument); documents.all().forEach(validateTextDocument);
}); });
function getDocumentSettings(resource) { function getDocumentSettings(resource) {
if (!hasConfigurationCapability) { if (!hasConfigurationCapability) {
return Promise.resolve(globalSettings); return Promise.resolve(globalSettings);
} }
let result = documentSettings.get(resource); let result = documentSettings.get(resource);
if (!result) { if (!result) {
result = connection.workspace.getConfiguration({ result = connection.workspace.getConfiguration({
scopeUri: resource, scopeUri: resource,
section: 'androidJavaLanguageServer' section: 'androidJavaLanguageServer',
}); });
documentSettings.set(resource, result); documentSettings.set(resource, result);
} }
return result; return result;
} }
// Only keep settings for open documents // Only keep settings for open documents
documents.onDidClose(e => { documents.onDidClose((e) => {
connection.console.log('doc closed');
parsed = null;
documentSettings.delete(e.document.uri); documentSettings.delete(e.document.uri);
}); });
// The content of a text document has changed. This event is emitted // The content of a text document has changed. This event is emitted
// when the text document first opened or when its content has changed. // when the text document first opened or when its content has changed.
// documents.onDidChangeContent(change => { documents.onDidChangeContent((change) => {
// connection.console.log(JSON.stringify(change)); connection.console.log(JSON.stringify(change));
//validateTextDocument(change.document); validateTextDocument(change.document);
// }); });
/** /**
* @param {TextDocument} textDocument * @param {{uri}} textDocument
*/ */
async function validateTextDocument(textDocument) { async function validateTextDocument(textDocument) {
/** @type {ParseProblem[]} */
let problems = [];
connection.console.log('validateTextDocument');
if (parsed && parsed.result) {
// package problem
if (parsed.result.package) {
problems = [...problems, ...parsed.result.package.validate()];
}
// import problems
problems = parsed.result.imports.reduce((problems, import_decl) => {
return [...problems, ...import_decl.validate()];
}, problems);
// type problems
problems = parsed.result.types.reduce((problems, type_decl) => {
return [...problems, ...type_decl.validate()];
}, problems);
// syntax problems
problems = parsed.result.invalids.reduce((problems, invalid) => {
return [...problems, ...invalid.validate()];
}, problems);
const package_name = parsed.result.package ? parsed.result.package.dottedName() : '';
const source_mtis = parsed.result.types.map(type_decl => {
return new MTI().addType(package_name, type_decl.getDocString(), type_decl.getAccessModifierValues(), type_decl.kind, type_decl.qualifiedName());
})
const imports = resolveImports(androidLibrary, parsed.result.imports, package_name, source_mtis);
// missing/invalid imports
problems = imports.unresolved.reduce((problems, unresolved) => {
const fqn = unresolved.nameparts.join('.');
return [...problems, new ParseProblem(unresolved.nameparts, `Unresolved import: ${fqn}`, ProblemSeverity.Warning)];
}, problems);
// resolved types
problems = parsed.result.types.reduce((problems, type_decl) => {
return [...problems, ...type_decl.validateTypes(package_name, imports.resolved, imports.typemap)];
}, problems);
// duplicate type names
/** @type {Map<string,import('./java/parsetypes/type')[]>} */
const typenames = new Map();
parsed.result.types.forEach(type_decl => {
const qname = type_decl.qualifiedName();
let list = typenames.get(qname);
if (!list) typenames.set(qname, list = []);
list.push(type_decl);
});
[...typenames.values()]
.filter(list => list.length > 1)
.forEach(list => {
problems = [...problems, ...list.map(type_decl => new ParseProblem(type_decl.name, `Duplicate type: ${type_decl.qualifiedDottedName()}`, ProblemSeverity.Error))];
});
}
const diagnostics = problems
.filter((p) => p)
.map((p) => {
const start = parsed.positionAt(p.startIdx);
const end = parsed.positionAt(p.endIdx);
/** @type {Diagnostic} */
let diagnostic = {
severity: p.severity,
range: {
start,
end,
},
message: p.message,
source: 'java-android',
};
return diagnostic;
});
connection.sendDiagnostics({ uri: textDocument.uri, diagnostics });
}
async function validateTextDocument2(textDocument) {
// In this simple example we get the settings for every validate run. // In this simple example we get the settings for every validate run.
//let settings = await getDocumentSettings(textDocument.uri); //let settings = await getDocumentSettings(textDocument.uri);
@@ -195,143 +355,154 @@ const {
let problems = 0; let problems = 0;
let diagnostics = []; let diagnostics = [];
while ((m = pattern.exec(text)) /* && problems < settings.maxNumberOfProblems */) { while ((m = pattern.exec(text)) /* && problems < settings.maxNumberOfProblems */) {
problems++; problems++;
/** @type {Diagnostic} */ /** @type {Diagnostic} */
let diagnostic = { let diagnostic = {
severity: DiagnosticSeverity.Warning, severity: DiagnosticSeverity.Warning,
range: { range: {
start: textDocument.positionAt(m.index), start: textDocument.positionAt(m.index),
end: textDocument.positionAt(m.index + m[0].length) end: textDocument.positionAt(m.index + m[0].length),
},
message: `${m[0]} is all uppercase.`,
source: 'ex'
};
if (hasDiagnosticRelatedInformationCapability) {
diagnostic.relatedInformation = [
{
location: {
uri: textDocument.uri,
range: Object.assign({}, diagnostic.range)
}, },
message: 'Spelling matters' message: `${m[0]} is all uppercase.`,
}, source: 'ex',
{ };
location: { if (hasDiagnosticRelatedInformationCapability) {
uri: textDocument.uri, diagnostic.relatedInformation = [
range: Object.assign({}, diagnostic.range) {
}, location: {
message: 'Particularly for names' uri: textDocument.uri,
} range: Object.assign({}, diagnostic.range),
]; },
} message: 'Spelling matters',
diagnostics.push(diagnostic); },
{
location: {
uri: textDocument.uri,
range: Object.assign({}, diagnostic.range),
},
message: 'Particularly for names',
},
];
}
diagnostics.push(diagnostic);
} }
// Send the computed diagnostics to VS Code. // Send the computed diagnostics to VS Code.
connection.sendDiagnostics({ uri: textDocument.uri, diagnostics }); connection.sendDiagnostics({ uri: textDocument.uri, diagnostics });
} }
connection.onDidChangeWatchedFiles(_change => { connection.onDidChangeWatchedFiles((_change) => {
// Monitored files have change in VS Code // Monitored files have change in VS Code
connection.console.log('We received a file change event'); connection.console.log('We received a file change event');
}); });
// This handler provides the initial list of the completion items. // This handler provides the initial list of the completion items.
let allCompletionTypes = null; let allCompletionTypes = null;
connection.onCompletion( connection.onCompletion(
/** /**
* @param {*} _textDocumentPosition TextDocumentPositionParams * @param {*} _textDocumentPosition TextDocumentPositionParams
*/ */
(_textDocumentPosition) => { (_textDocumentPosition) => {
// The pass parameter contains the position of the text document in // The pass parameter contains the position of the text document in
// which code complete got requested. For the example we ignore this // which code complete got requested. For the example we ignore this
// info and always provide the same completion items. // info and always provide the same completion items.
const lib = androidLibrary; const lib = androidLibrary;
if (!lib) return []; if (!lib) return [];
const typeKindMap = { const typeKindMap = {
'class':CompletionItemKind.Class, class: CompletionItemKind.Class,
'interface': CompletionItemKind.Interface, interface: CompletionItemKind.Interface,
'@interface': CompletionItemKind.Interface, '@interface': CompletionItemKind.Interface,
'enum': CompletionItemKind.Enum, enum: CompletionItemKind.Enum,
}; };
return allCompletionTypes || (allCompletionTypes = lib.types.map((t,idx) => return (
/** @type {CompletionItem} */ allCompletionTypes ||
({ (allCompletionTypes = [
label: t.dottedRawName, ...'boolean byte char double float int long short void'.split(' ').map((t) => ({
kind: typeKindMap[t.typeKind], label: t,
data: idx kind: CompletionItemKind.Keyword,
}) data: -1,
)); })),
return [ ...'public private protected static final abstract volatile native'.split(' ').map((t) => ({
{ label: t,
label: 'TypeScript', kind: CompletionItemKind.Keyword,
kind: CompletionItemKind.Text, data: -1,
data: 1 })),
}, ...'false true null'.split(' ').map((t) => ({
{ label: t,
label: 'JavaScript', kind: CompletionItemKind.Value,
kind: CompletionItemKind.Text, data: -1,
data: 2 })),
} ...lib.types.map(
]; (t, idx) =>
/** @type {CompletionItem} */
({
label: t.dottedRawName,
kind: typeKindMap[t.typeKind],
data: idx,
})
),
])
);
} }
); );
// This handler resolves additional information for the item selected in // This handler resolves additional information for the item selected in
// the completion list. // the completion list.
connection.onCompletionResolve( connection.onCompletionResolve(
/** /**
* @param {CompletionItem} item * @param {CompletionItem} item
*/ */
(item) => { (item) => {
const t = androidLibrary.types[item.data]; const t = androidLibrary.types[item.data];
item.detail = `${t.package}.${t.dottedRawName}`; if (!t) {
item.documentation = t.docs && { return item;
kind: "markdown", }
value: `${t.typeKind} **${t.dottedName}**\n\n${ item.detail = `${t.package}.${t.dottedRawName}`;
t.docs item.documentation = t.docs && {
.replace(/(<p ?.*?>)|(<\/?i>|<\/?em>)|(<\/?b>|<\/?strong>|<\/?dt>)|(<\/?tt>)|(<\/?code>|<\/?pre>)|(\{@link.+?\}|\{@code.+?\})|(<li>)|(<a href="\{@docRoot\}.*?">.+?<\/a>)|(<h\d>)|<\/?dd ?.*?>|<\/p ?.*?>|<\/h\d ?.*?>|<\/?div ?.*?>|<\/?[uo]l ?.*?>/gim, (_,p,i,b,tt,c,lc,li,a,h) => { kind: 'markdown',
return p ? '\n\n' value: `${t.typeKind} **${t.dottedName}**\n\n${
: i ? '*' t.docs
: b ? '**' .replace(/(<p ?.*?>)|(<\/?i>|<\/?em>)|(<\/?b>|<\/?strong>|<\/?dt>)|(<\/?tt>)|(<\/?code>|<\/?pre>)|(\{@link.+?\}|\{@code.+?\})|(<li>)|(<a href="\{@docRoot\}.*?">.+?<\/a>)|(<h\d>)|<\/?dd ?.*?>|<\/p ?.*?>|<\/h\d ?.*?>|<\/?div ?.*?>|<\/?[uo]l ?.*?>/gim, (_,p,i,b,tt,c,lc,li,a,h) => {
: tt ? '`' return p ? '\n\n'
: c ? '\n```' : i ? '*'
: lc ? lc.replace(/\{@\w+\s*(.+)\}/, (_,x) => `\`${x.trim()}\``) : b ? '**'
: li ? '\n- ' : tt ? '`'
: a ? a.replace(/.+?\{@docRoot\}(.*?)">(.+?)<\/a>/m, (_,p,t) => `[${t}](https://developer.android.com/${p})`) : c ? '\n```'
: h ? `\n${'#'.repeat(1 + parseInt(h.slice(2,-1),10))} ` : lc ? lc.replace(/\{@\w+\s*(.+)\}/, (_,x) => `\`${x.trim()}\``)
: ''; : li ? '\n- '
}) : a ? a.replace(/.+?\{@docRoot\}(.*?)">(.+?)<\/a>/m, (_,p,t) => `[${t}](https://developer.android.com/${p})`)
: h ? `\n${'#'.repeat(1 + parseInt(h.slice(2,-1),10))} `
: '';
})
}`, }`,
} };
return item; return item;
} }
); );
/* /*
connection.onDidOpenTextDocument((params) => { connection.onDidOpenTextDocument((params) => {
// A text document got opened in VS Code. // A text document got opened in VS Code.
// params.uri uniquely identifies the document. For documents store on disk this is a file URI. // params.uri uniquely identifies the document. For documents store on disk this is a file URI.
// params.text the initial full content of the document. // params.text the initial full content of the document.
connection.console.log(`${params.textDocument.uri} opened.`); connection.console.log(`${params.textDocument.uri} opened.`);
}); });
connection.onDidChangeTextDocument((params) => { connection.onDidChangeTextDocument((params) => {
// The content of a text document did change in VS Code. // The content of a text document did change in VS Code.
// params.uri uniquely identifies the document. // params.uri uniquely identifies the document.
// params.contentChanges describe the content changes to the document. // params.contentChanges describe the content changes to the document.
connection.console.log(`${params.textDocument.uri} changed: ${JSON.stringify(params.contentChanges)}`); connection.console.log(`${params.textDocument.uri} changed: ${JSON.stringify(params.contentChanges)}`);
}); });
connection.onDidCloseTextDocument((params) => { connection.onDidCloseTextDocument((params) => {
// A text document got closed in VS Code. // A text document got closed in VS Code.
// params.uri uniquely identifies the document. // params.uri uniquely identifies the document.
connection.console.log(`${params.textDocument.uri} closed.`); connection.console.log(`${params.textDocument.uri} closed.`);
}); });
*/ */
// Make the text document manager listen on the connection // Make the text document manager listen on the connection
// for open, change and close text document events // for open, change and close text document events
documents.listen(connection); documents.listen(connection);
// Listen on the connection
connection.listen();
// Listen on the connection
connection.listen();