mirror of
https://github.com/adelphes/android-dev-ext.git
synced 2025-12-23 01:48:18 +00:00
first hacky version of source parsing and type checking
This commit is contained in:
116
langserver/java/import-resolver.js
Normal file
116
langserver/java/import-resolver.js
Normal file
@@ -0,0 +1,116 @@
|
||||
/**
|
||||
* @typedef {import('./parsetypes/import')} ImportDeclaration
|
||||
*/
|
||||
const ResolvedImport = require('./parsetypes/resolved-import');
|
||||
|
||||
/**
|
||||
* Search a space-separated list of type names for values that match a dotted import.
|
||||
*
|
||||
* @param {string} typenames newline-separated list of fully qualified type names
|
||||
* @param {string} dotted_import fully-qualified import name (e.g "java.util")
|
||||
* @param {boolean} demandload true if this is a demand-load import
|
||||
*/
|
||||
function fetchImportedTypes(typenames, dotted_import, demandload) {
|
||||
const matcher = demandload
|
||||
// for demand-load, we search for any types that begin with the specified import name
|
||||
// - note that after the import text, only words and $ are allowed (because additional dots would imply a subpackage)
|
||||
? new RegExp(`^${dotted_import.replace(/\./g, '[.$]')}[.$][\\w$]+$`, 'gm')
|
||||
// for exact-load, we search for any types that precisely matches the specified import name
|
||||
: new RegExp(`^${dotted_import.replace(/\./g, '[.$]')}$`, 'gm');
|
||||
|
||||
// run the regex against the list of type names
|
||||
const matching_names = typenames.match(matcher);
|
||||
return matching_names;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} typenames newline-separated list of fully qualified type names
|
||||
* @param {import('./parsetypes/import')} import_decl import declaration
|
||||
*/
|
||||
function resolveImportTypes(typenames, import_decl) {
|
||||
const dotted = import_decl.getDottedName();
|
||||
return fetchImportedTypes(typenames, dotted, !!import_decl.asterisk);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a set of imports for a module.
|
||||
*
|
||||
* Note that the order of the resolved imports is important for correct type resolution:
|
||||
* - same-package imports are first,
|
||||
* - followed by import declarations (in order of declaration),
|
||||
* - followed by implicit packages
|
||||
*
|
||||
* @param {*} androidLibrary imported types from the Android platform library
|
||||
* @param {import('./parsetypes/import')[]} imports list of declared imports in the module
|
||||
* @param {string} package_name package name of the module
|
||||
* @param {import('./mti').Type[]} source_mtis MTIs representing types declared in the source
|
||||
* @param {string[]} [implicitPackages] list of implicit demand-load packages
|
||||
*/
|
||||
function resolveImports(androidLibrary, imports, package_name, source_mtis, implicitPackages = ['java.lang']) {
|
||||
/**
|
||||
* create a new Map that maps JRE type names to MTI instances
|
||||
* @type {Map<string, import('./mti').Type>}
|
||||
*/
|
||||
const typemap = new Map(
|
||||
androidLibrary.types.map(mti => [`${mti.package}.${mti.name}`, mti])
|
||||
);
|
||||
// add the source MTIs
|
||||
// todo - should we overwrite entries when source MTIs match types in the library?
|
||||
source_mtis.forEach(mti => typemap.set(`${mti.package}.${mti.name}`, mti))
|
||||
|
||||
// construct the list of typenames
|
||||
const typenames = [...typemap.keys()].join('\n');
|
||||
|
||||
/**
|
||||
* The list of explicit import declarations we are unable to resolve
|
||||
* @type {ImportDeclaration[]}
|
||||
*/
|
||||
const unresolved = [];
|
||||
|
||||
/** @type {ResolvedImport[]} */
|
||||
const resolved = [];
|
||||
|
||||
// import types matching the current package
|
||||
if (package_name) {
|
||||
const matches = fetchImportedTypes(typenames, package_name, true);
|
||||
if (matches)
|
||||
resolved.push(new ResolvedImport(null, matches, typemap, 'owner-package'));
|
||||
}
|
||||
|
||||
// import types from each import declaration
|
||||
imports.forEach(import_decl => {
|
||||
const matches = resolveImportTypes(typenames, import_decl);
|
||||
if (matches) {
|
||||
resolved.push(new ResolvedImport(import_decl, matches, typemap, 'import'));
|
||||
} else {
|
||||
// if we cannot match the import to any types, add it to the unresolved list so
|
||||
// we can flag it as a warning later.
|
||||
// Note that empty packages (packages with no types) will appear here - they
|
||||
// are technically valid, but represent useless imports
|
||||
unresolved.push(import_decl);
|
||||
}
|
||||
});
|
||||
|
||||
// import types from the implicit packages
|
||||
implicitPackages.forEach(package_name => {
|
||||
const matches = fetchImportedTypes(typenames, package_name, true);
|
||||
if (matches)
|
||||
resolved.push(new ResolvedImport(null, matches, typemap, 'implicit-import'));
|
||||
})
|
||||
|
||||
/**
|
||||
* return the resolved and unresolved imports.
|
||||
* The typemap is also included to support fully qualified type names that, by virtue of
|
||||
* being fully-qualified, don't require importing.
|
||||
*/
|
||||
return {
|
||||
resolved,
|
||||
unresolved,
|
||||
typemap,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
resolveImports,
|
||||
ResolvedImport,
|
||||
}
|
||||
@@ -16,22 +16,27 @@ function packageNameFromRef(ref, mti) {
|
||||
|
||||
/**
|
||||
* @param {number} ref
|
||||
* @param {MTI} mti
|
||||
* @param {MTI} unit
|
||||
*/
|
||||
function typeFromRef(ref, mti) {
|
||||
function typeFromRef(ref, unit) {
|
||||
if (typeof ref !== 'number') {
|
||||
return null;
|
||||
}
|
||||
if (ref < 16) {
|
||||
return KnownTypes[ref];
|
||||
}
|
||||
return mti.referenced.types[ref - 16];
|
||||
return unit.referenced.types[ref - 16];
|
||||
}
|
||||
|
||||
function indent(s) {
|
||||
return '\n' + s.split('\n').map(s => ` ${s}`).join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {MTIType|MTIArrayType|MTIPrimitiveType} Type
|
||||
* @typedef {'class'|'interface'|'enum'|'@interface'|'primitive'|'array'} MTITypeKind
|
||||
*/
|
||||
|
||||
class MinifiableInfo {
|
||||
|
||||
constructor(minified) {
|
||||
@@ -75,11 +80,132 @@ class MinifiableInfo {
|
||||
```
|
||||
*/
|
||||
class MTI extends MinifiableInfo {
|
||||
/**
|
||||
* @param {string} package_name
|
||||
* @param {string} docs
|
||||
* @param {string[]} modifiers
|
||||
* @param {'class'|'enum'|'interface'|'@interface'} typeKind
|
||||
* @param {string} name
|
||||
*/
|
||||
addType(package_name, docs, modifiers, typeKind, name) {
|
||||
const t = {
|
||||
d: docs,
|
||||
p: this.addPackage(package_name),
|
||||
m: getTypeMods(modifiers, typeKind),
|
||||
n: name.replace(/\./g,'$'),
|
||||
v: [],
|
||||
e: /interface/.test(typeKind) ? []
|
||||
: typeKind === 'enum' ? this.addRefType('java.lang', 'Enum')
|
||||
: this.addRefType('java.lang', 'Object'),
|
||||
i: [],
|
||||
f: [],
|
||||
c: [],
|
||||
g: [],
|
||||
}
|
||||
this.minified.it.push(t);
|
||||
const mtitype = new MTIType(this, t);
|
||||
this.types.push(mtitype);
|
||||
return mtitype;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {number} base_typeref
|
||||
* @param {number[]} type_args
|
||||
*/
|
||||
addGenericRefType(base_typeref, type_args) {
|
||||
const targs_key = type_args.join(',');
|
||||
let idx = this.minified.rt.findIndex(t => (t.n === base_typeref) && !t.a && t.g && (t.g.join(',') === targs_key));
|
||||
if (idx < 0) {
|
||||
const rt_mti = {
|
||||
n: base_typeref,
|
||||
g: type_args,
|
||||
};
|
||||
idx = this.minified.rt.push(rt_mti) - 1;
|
||||
this.referenced.types.push(new ReferencedType(this, rt_mti));
|
||||
}
|
||||
return idx + 16;
|
||||
}
|
||||
|
||||
addArrayRefType(element_typeref, dimensions) {
|
||||
let idx = this.minified.rt.findIndex(t => (t.n === element_typeref) && !t.g && (t.a === dimensions));
|
||||
if (idx < 0) {
|
||||
const rt_mti = {
|
||||
n: element_typeref,
|
||||
a: dimensions,
|
||||
};
|
||||
idx = this.minified.rt.push(rt_mti) - 1;
|
||||
this.referenced.types.push(new ReferencedType(this, rt_mti));
|
||||
}
|
||||
return idx + 16;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{rp:[], rt:[], it:[]}} mti
|
||||
* @param {string} package_name
|
||||
* @param {string} type_name
|
||||
*/
|
||||
constructor(mti) {
|
||||
addRefType(package_name, type_name) {
|
||||
let idx;
|
||||
if (!package_name || package_name === 'java.lang') {
|
||||
idx = KnownTypes.findIndex(t => t.name === type_name);
|
||||
if (idx >= 0) {
|
||||
return idx;
|
||||
}
|
||||
}
|
||||
const pkgref = this.addPackage(package_name);
|
||||
const jre_type_name = type_name.replace(/\./g, '$');
|
||||
idx = this.minified.rt.findIndex(t => t.p === pkgref && t.n === jre_type_name);
|
||||
if (idx < 0) {
|
||||
const rt_mti = {
|
||||
p: pkgref,
|
||||
n: jre_type_name,
|
||||
};
|
||||
idx = this.minified.rt.push(rt_mti) - 1;
|
||||
this.referenced.types.push(new ReferencedType(this, rt_mti))
|
||||
}
|
||||
return idx + 16;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} packagename
|
||||
*/
|
||||
addPackage(packagename) {
|
||||
let idx = KnownPackages.indexOf(packagename);
|
||||
if (idx >= 0) {
|
||||
return idx;
|
||||
}
|
||||
idx = this.minified.rp.indexOf(packagename);
|
||||
if (idx < 0) {
|
||||
idx = this.minified.rp.push(packagename) - 1;
|
||||
}
|
||||
return idx + 16;
|
||||
}
|
||||
|
||||
static get defaultPackageRef() {
|
||||
return KnownPackages.indexOf("");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
*/
|
||||
static fromPrimitive(name) {
|
||||
return MTIPrimitiveType.fromName(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Type} element
|
||||
*/
|
||||
static makeArrayType(element, dimensions) {
|
||||
let res = element;
|
||||
for (let i = 0; i < dimensions; i++) {
|
||||
res = new MTIArrayType(res);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{rp:string[], rt:*[], it:*[]}} mti
|
||||
*/
|
||||
constructor(mti = {rp:[],rt:[],it:[]}) {
|
||||
super(mti);
|
||||
// initialise the lists of referenced packages and types
|
||||
this.referenced = {
|
||||
@@ -162,7 +288,7 @@ class ReferencedType extends MinifiableInfo {
|
||||
baseType,
|
||||
|
||||
/** @type {ReferencedType[]} */
|
||||
typeParams: mti.g && mti.g.map(t => typeFromRef(t, unit)),
|
||||
typeArgs: mti.g && mti.g.map(t => typeFromRef(t, unit)),
|
||||
|
||||
/** @type {string} */
|
||||
arr: '[]'.repeat(mti.a | 0),
|
||||
@@ -177,10 +303,10 @@ class ReferencedType extends MinifiableInfo {
|
||||
get name() {
|
||||
// note: names in enclosed types are in x$y format
|
||||
const n = this.parsed.baseType ? this.parsed.baseType.name : this.minified.n;
|
||||
const type_params = this.parsed.typeParams
|
||||
? `<${this.parsed.typeParams.map(tp => tp.name).join(',')}>`
|
||||
const type_args = this.parsed.typeArgs
|
||||
? `<${this.parsed.typeArgs.map(tp => tp.name).join(',')}>`
|
||||
: ''
|
||||
return `${n}${type_params}${this.parsed.arr}`;
|
||||
return `${n}${type_args}${this.parsed.arr}`;
|
||||
}
|
||||
|
||||
get dottedName() {
|
||||
@@ -188,6 +314,97 @@ class ReferencedType extends MinifiableInfo {
|
||||
}
|
||||
}
|
||||
|
||||
class MTITypeBase extends MinifiableInfo {
|
||||
/**
|
||||
* type docs
|
||||
* @type {string}
|
||||
*/
|
||||
get docs() { return this.minified.d }
|
||||
|
||||
/**
|
||||
* type modifiers
|
||||
* @type {number}
|
||||
*/
|
||||
get modifiers() { return this.minified.m }
|
||||
|
||||
/**
|
||||
* type name (in x$y format for enclosed types)
|
||||
* @type {string}
|
||||
*/
|
||||
get name() { return this.minified.n }
|
||||
|
||||
/**
|
||||
* package this type belongs to
|
||||
*/
|
||||
get package() { return null }
|
||||
|
||||
/**
|
||||
* @type {MTIConstructor[]}
|
||||
*/
|
||||
get constructors() { return [] }
|
||||
|
||||
/**
|
||||
* @type {MTIField[]}
|
||||
*/
|
||||
get fields() { return [] }
|
||||
|
||||
/**
|
||||
* @type {MTIMethod[]}
|
||||
*/
|
||||
get methods() { return [] }
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
*/
|
||||
hasModifier(name) {
|
||||
return ((this.minified.m | 0) & getModifierBit(name)) !== 0;
|
||||
}
|
||||
|
||||
toSource() {
|
||||
return this.name;
|
||||
}
|
||||
}
|
||||
|
||||
class MTIArrayType extends MTITypeBase {
|
||||
/**
|
||||
* @param {Type} element_type
|
||||
*/
|
||||
constructor(element_type) {
|
||||
super({
|
||||
n: element_type.name + '[]',
|
||||
d: '',
|
||||
m: 0, // should array types be implicitly final?
|
||||
});
|
||||
this.element_type = element_type;
|
||||
}
|
||||
|
||||
get fullyDottedRawName() { return `${this.element_type.fullyDottedRawName}[]` }
|
||||
|
||||
/** @type {MTITypeKind} */
|
||||
get typeKind() { return 'array' }
|
||||
}
|
||||
|
||||
class MTIPrimitiveType extends MTITypeBase {
|
||||
|
||||
static _cached = new Map();
|
||||
static fromName(name) {
|
||||
let value = MTIPrimitiveType._cached.get(name);
|
||||
if (!value) {
|
||||
value = new MTIPrimitiveType({
|
||||
n: name,
|
||||
d: '',
|
||||
m: 0,
|
||||
});
|
||||
MTIPrimitiveType._cached.set(name, value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
get fullyDottedRawName() { return this.name }
|
||||
|
||||
/** @type {MTITypeKind} */
|
||||
get typeKind() { return 'primitive' }
|
||||
}
|
||||
|
||||
/**
|
||||
* MTIType encodes a complete type (class, interface or enum)
|
||||
@@ -205,7 +422,7 @@ class ReferencedType extends MinifiableInfo {
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
class MTIType extends MinifiableInfo {
|
||||
class MTIType extends MTITypeBase {
|
||||
|
||||
/**
|
||||
* @param {MTI} unit
|
||||
@@ -241,26 +458,13 @@ class MTIType extends MinifiableInfo {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* type docs
|
||||
* @type {string}
|
||||
*/
|
||||
get docs() { return this.minified.d }
|
||||
|
||||
/**
|
||||
* type modifiers
|
||||
* @type {number}
|
||||
*/
|
||||
get modifiers() { return this.minified.m }
|
||||
|
||||
/**
|
||||
* type name (in x$y format for enclosed types)
|
||||
* @type {string}
|
||||
*/
|
||||
get name() { return this.minified.n }
|
||||
|
||||
get dottedRawName() { return this.minified.n.replace(/[$]/g, '.') };
|
||||
|
||||
get fullyDottedRawName() {
|
||||
const pkg = this.package;
|
||||
return pkg ? `${pkg}.${this.dottedRawName}` : this.dottedRawName;
|
||||
};
|
||||
|
||||
get dottedName() {
|
||||
const t = this.typevars.map(t => t.name).join(',');
|
||||
return t ? `${this.dottedRawName}<${t}>` : this.dottedRawName;
|
||||
@@ -277,6 +481,7 @@ class MTIType extends MinifiableInfo {
|
||||
*/
|
||||
get package() { return this.parsed.package }
|
||||
|
||||
/** @type {MTITypeKind} */
|
||||
get typeKind() {
|
||||
const m = this.minified.m;
|
||||
return (m & TypeModifiers.enum)
|
||||
@@ -332,7 +537,9 @@ class MTIType extends MinifiableInfo {
|
||||
// only add extends if it's not derived from java.lang.Object
|
||||
if (this.extends !== KnownTypes[3]) {
|
||||
const x = Array.isArray(this.extends) ? this.extends : [this.extends];
|
||||
ex = `extends ${x.map(type => type.dottedName).join(', ')} `;
|
||||
if (x.length) {
|
||||
ex = `extends ${x.map(type => type.dottedName).join(', ')} `;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -348,6 +555,85 @@ class MTIType extends MinifiableInfo {
|
||||
`}`
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {MTI} unit
|
||||
* @param {number} typeref
|
||||
*/
|
||||
setExtends(unit, typeref) {
|
||||
if (Array.isArray(this.minified.e)) {
|
||||
this.minified.e.push(typeref);
|
||||
// @ts-ignore
|
||||
this.parsed.extends.push(typeFromRef(typeref, unit));
|
||||
} else {
|
||||
this.minified.e = typeref;
|
||||
this.parsed.extends = typeFromRef(typeref, unit);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {MTI} unit
|
||||
* @param {string} docs
|
||||
* @param {string[]} modifiers
|
||||
* @param {number} typeref
|
||||
* @param {string} name
|
||||
*/
|
||||
addField(unit, docs, modifiers, typeref, name) {
|
||||
const o = {
|
||||
d: docs,
|
||||
m: getAccessMods(modifiers),
|
||||
n: name,
|
||||
t: typeref,
|
||||
}
|
||||
this.minified.f.push(o);
|
||||
this.parsed.fields.push(new MTIField(unit, o));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {MTI} unit
|
||||
* @param {string} docs
|
||||
* @param {string[]} modifiers
|
||||
*/
|
||||
addConstructor(unit, docs, modifiers) {
|
||||
const o = {
|
||||
d: docs,
|
||||
m: getAccessMods(modifiers),
|
||||
p: [],
|
||||
}
|
||||
this.minified.c.push(o);
|
||||
const c = new MTIConstructor(unit, o);
|
||||
this.parsed.constructors.push(c);
|
||||
return c;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {MTI} unit
|
||||
* @param {MTIType} owner
|
||||
* @param {string} docs
|
||||
* @param {string[]} modifiers
|
||||
* @param {number} typeref
|
||||
* @param {string} name
|
||||
*/
|
||||
addMethod(unit, owner, docs, modifiers, typeref, name) {
|
||||
let g = this.minified.g.find(m => m.name === name);
|
||||
if (!g) {
|
||||
g = {
|
||||
n:name,
|
||||
s: [],
|
||||
}
|
||||
this.minified.g.push(g);
|
||||
}
|
||||
const o = {
|
||||
d: docs,
|
||||
m: getAccessMods(modifiers),
|
||||
t: typeref,
|
||||
p: [],
|
||||
};
|
||||
g.s.push(o);
|
||||
const method = new MTIMethod(unit, owner, name, o);
|
||||
this.parsed.methods.push(method);
|
||||
return method;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -442,6 +728,22 @@ class MTIConstructor extends MTIMethodBase {
|
||||
const typename = this.parsed.typename.split('$').pop();
|
||||
return `${this.fmtdocs()}${access(this.modifiers)}${typename}(${this.parameters.map(p => p.toSource()).join(', ')}) {}`
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {MTI} unit
|
||||
* @param {string[]} modifiers
|
||||
* @param {number} typeref
|
||||
* @param {string} name
|
||||
*/
|
||||
addParameter(unit, modifiers, typeref, name) {
|
||||
const o = {
|
||||
m: getAccessMods(modifiers),
|
||||
t: typeref,
|
||||
n: name,
|
||||
}
|
||||
this.minified.p.push(o);
|
||||
this.parsed.parameters.push(new MTIParameter(unit, o));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -518,6 +820,10 @@ class MTIConstructor extends MTIMethodBase {
|
||||
*/
|
||||
get parameters() { return this.parsed.parameters }
|
||||
|
||||
toDeclSource() {
|
||||
return `${this.return_type.dottedName} ${this.name}(${this.parameters.map(p => p.toSource()).join(', ')})`;
|
||||
}
|
||||
|
||||
toSource() {
|
||||
let m = this.modifiers, body = ' {}';
|
||||
if (m & 0x400) {
|
||||
@@ -530,6 +836,22 @@ class MTIConstructor extends MTIMethodBase {
|
||||
}
|
||||
return `${this.fmtdocs()}${access(m)}${this.return_type.dottedName} ${this.name}(${this.parameters.map(p => p.toSource()).join(', ')})${body}`
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {MTI} unit
|
||||
* @param {string[]} modifiers
|
||||
* @param {number} typeref
|
||||
* @param {string} name
|
||||
*/
|
||||
addParameter(unit, modifiers, typeref, name) {
|
||||
const o = {
|
||||
m: getAccessMods(modifiers),
|
||||
t: typeref,
|
||||
n: name,
|
||||
}
|
||||
this.minified.p.push(o);
|
||||
this.parsed.parameters.push(new MTIParameter(unit, o));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -589,6 +911,27 @@ function access(modifier_bits) {
|
||||
return decls.join(' ');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} modifier
|
||||
*/
|
||||
function getModifierBit(modifier) {
|
||||
const i = access_keywords.indexOf(modifier);
|
||||
return i < 0 ? 0 : (1 << i);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[]} modifiers
|
||||
* @param {boolean} [varargs]
|
||||
*/
|
||||
function getAccessMods(modifiers, varargs = false) {
|
||||
let m = 0;
|
||||
modifiers.forEach(modifier => m |= getModifierBit(modifier));
|
||||
if (varargs) {
|
||||
m |= getModifierBit('transient');
|
||||
}
|
||||
return m;
|
||||
}
|
||||
|
||||
const TypeModifiers = {
|
||||
public: 0b0000_0000_0000_0001, // 0x1
|
||||
final: 0b0000_0000_0001_0000, // 0x10
|
||||
@@ -620,6 +963,29 @@ function typemods(modifier_bits) {
|
||||
return modifiers.join(' ');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[]} modifiers
|
||||
* @param {MTITypeKind} typeKind
|
||||
*/
|
||||
function getTypeMods(modifiers, typeKind) {
|
||||
let m = 0;
|
||||
if (modifiers.includes('public')) m |= TypeModifiers.public;
|
||||
if (modifiers.includes('final')) m |= TypeModifiers.final;
|
||||
if (modifiers.includes('abstract')) m |= TypeModifiers.abstract;
|
||||
switch (typeKind) {
|
||||
case "interface":
|
||||
m |= TypeModifiers.interface | TypeModifiers.abstract;
|
||||
break;
|
||||
case "@interface":
|
||||
m |= TypeModifiers['@interface'] | TypeModifiers.abstract;
|
||||
break;
|
||||
case "enum":
|
||||
m |= TypeModifiers.enum | TypeModifiers.final;
|
||||
break;
|
||||
}
|
||||
return m;
|
||||
}
|
||||
|
||||
/**
|
||||
* List of known/common packages.
|
||||
* These are used/encoded as pkgrefs between 0 and 15.
|
||||
|
||||
498
langserver/java/parser.js
Normal file
498
langserver/java/parser.js
Normal file
@@ -0,0 +1,498 @@
|
||||
const Annotation = require('./parsetypes/annotation');
|
||||
const Declaration = require('./parsetypes/declaration');
|
||||
const FMCDeclaration = require('./parsetypes/fmc');
|
||||
const ImportDeclaration = require('./parsetypes/import');
|
||||
const PackageDeclaration = require('./parsetypes/package');
|
||||
const ParameterDeclaration = require('./parsetypes/parameter');
|
||||
const ParseProblem = require('./parsetypes/parse-problem');
|
||||
const ParseResult = require('./parsetypes/parse-result');
|
||||
const ParseSyntaxError = require('./parsetypes/parse-error');
|
||||
const ProblemSeverity = require('./parsetypes/problem-severity');
|
||||
const Token = require('./parsetypes/token');
|
||||
const TypeDeclaration = require('./parsetypes/type');
|
||||
const TypeIdent = require('./parsetypes/typeident');
|
||||
const TypeParameters = require('./parsetypes/type-parameters');
|
||||
/**
|
||||
* @typedef {import('./parsetypes/modifier')} Modifier
|
||||
*/
|
||||
|
||||
|
||||
/**
|
||||
* @param {Token[]} tokens
|
||||
* @param {number} idx
|
||||
*/
|
||||
function findToken(tokens, idx) {
|
||||
return tokens.find(t => t.simplified_text_idx === idx);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} simplified
|
||||
* @param {number} lastIndex
|
||||
*/
|
||||
function parseToBracketEnd(simplified, lastIndex) {
|
||||
// parse until close bracket
|
||||
let re = /[()]/g, balance = 1;
|
||||
const start = re.lastIndex = lastIndex;
|
||||
for (let m; m = re.exec(simplified);) {
|
||||
if (m[0] === '(') balance++;
|
||||
else if (--balance === 0) {
|
||||
re.lastIndex++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return {
|
||||
start,
|
||||
end: re.lastIndex,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} simplified
|
||||
* @param {Token[]} tokens
|
||||
* @param {{start: number, end: number}} simplified_range
|
||||
* @param {*[]} invalids
|
||||
*/
|
||||
function parseParameters(simplified, tokens, simplified_range, invalids) {
|
||||
const decls = [
|
||||
/[ X]+/g,
|
||||
/@ *W( *\. *W)*( *\()?/g,
|
||||
/M/g,
|
||||
/W(?: *\. *W)*(?: *<.*?>)?(?: *\[ *\])*(?: +|( *\.\.\. *))W(?: *\[ *\])*( *,)?/g, // parameter decl
|
||||
/(\)|$)/g, // end of params
|
||||
];
|
||||
const parameters = [];
|
||||
/** @type {Modifier[]} */
|
||||
const modifiers = [];
|
||||
let lastIndex = simplified_range.start;
|
||||
for(;;) {
|
||||
/** @type {{idx:number, d: RegExp, m:RegExpMatchArray}} */
|
||||
let best_match = null, next_best = null;
|
||||
decls.find((d,idx) => {
|
||||
d.lastIndex = lastIndex;
|
||||
const m = d.exec(simplified);
|
||||
if (!m) return;
|
||||
if (m.index === lastIndex) {
|
||||
best_match = {idx, d, m};
|
||||
return true;
|
||||
}
|
||||
if (idx === 0) {
|
||||
return;
|
||||
}
|
||||
if (!next_best || m.index < next_best.m.index) {
|
||||
next_best = {idx, d, m};
|
||||
}
|
||||
});
|
||||
if (!best_match) {
|
||||
const errorToken = findToken(tokens, lastIndex);
|
||||
const error = new ParseSyntaxError(null, modifiers.splice(0), errorToken);
|
||||
invalids.push(error);
|
||||
best_match = next_best;
|
||||
if (!next_best) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
lastIndex = best_match.d.lastIndex;
|
||||
|
||||
if (best_match.idx === 1) {
|
||||
// annotation
|
||||
const at = findToken(tokens, best_match.m.index);
|
||||
const name = findToken(tokens, best_match.m.index + best_match.m[0].indexOf('W'));
|
||||
const annotation = new Annotation(at, name);
|
||||
modifiers.push(annotation);
|
||||
if (best_match.m[0].endsWith('(')) {
|
||||
lastIndex = parseToBracketEnd(simplified, lastIndex).end;
|
||||
}
|
||||
}
|
||||
else if (best_match.idx === 2) {
|
||||
// modifier
|
||||
const modifier = findToken(tokens, best_match.m.index);
|
||||
modifiers.push(modifier);
|
||||
}
|
||||
else if (best_match.idx === 3) {
|
||||
// parameter
|
||||
const name = findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf('W'));
|
||||
const varargs = best_match.m[1] ? findToken(tokens, best_match.m.index + best_match.m[0].indexOf('...')) : null;
|
||||
const comma = best_match.m[2] ? findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf(',')) : null;
|
||||
const typetokens = [];
|
||||
const first_type_token = findToken(tokens, best_match.m.index + best_match.m[0].indexOf('W'));
|
||||
for (let t = first_type_token, i = tokens.indexOf(t); t !== name; t = tokens[++i]) {
|
||||
if (t.simplified_text !== ' ')
|
||||
typetokens.push(t);
|
||||
}
|
||||
const param = new ParameterDeclaration(modifiers.splice(0), new TypeIdent(typetokens), varargs, name, comma);
|
||||
parameters.push(param);
|
||||
} else if (best_match.idx === 4) {
|
||||
// end of parameters
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return parameters;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Token[]} typelist_tokens
|
||||
*/
|
||||
function parseTypeIdentList(typelist_tokens) {
|
||||
// split the typelist into typetoken chunks, separated by commas
|
||||
let typeargs_balance = 0, array_balance = 0;
|
||||
/** @type {Token[][]} */
|
||||
let types = [[]];
|
||||
typelist_tokens.forEach(t => {
|
||||
switch(t.text) {
|
||||
case ' ':
|
||||
if (types[0].length === 0) {
|
||||
return;
|
||||
}
|
||||
break;
|
||||
case ',':
|
||||
if (typeargs_balance <= 0 && array_balance <= 0) {
|
||||
while (types[0][types[0].length - 1].text === ' ') {
|
||||
types[0].pop();
|
||||
}
|
||||
typeargs_balance = array_balance = 0;
|
||||
types.unshift([]);
|
||||
return;
|
||||
}
|
||||
break;
|
||||
case '<':
|
||||
typeargs_balance++;
|
||||
break;
|
||||
case '>':
|
||||
typeargs_balance--;
|
||||
break;
|
||||
case ']':
|
||||
array_balance++;
|
||||
break;
|
||||
case '[':
|
||||
array_balance--;
|
||||
break;
|
||||
}
|
||||
types[0].push(t);
|
||||
});
|
||||
|
||||
// remove any blank entries (start comma or sequential commas)
|
||||
return types.filter(t => t.length).reverse().map(tokens => new TypeIdent(tokens));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} source
|
||||
*/
|
||||
function parse(source) {
|
||||
const re = /(\/\*[\d\D]*?\*\/)|(\/\*)|(\*\/)|((?:\/\/.*)|(?:\s+))|(".*?")|('.'?)|\b(package|import|class|enum|interface|extends|implements|throws)\b|\b(public|private|protected|static|final|abstract|native|volatile|transient|synchronized|strictfp)\b|(\.{3}|[@{}()<>,;?*\[\].])|\b(super|new)\b|\b([A-Za-z_]\w*)|(\d[\w.]*)/g;
|
||||
|
||||
let source_idx = 0, simplified_text_idx = 0;
|
||||
/** @type {Token[]} */
|
||||
let tokens = [];
|
||||
function mapSimplified(
|
||||
_,
|
||||
mlc,
|
||||
unterminated_mlc,
|
||||
mlc_end,
|
||||
slc_ws,
|
||||
string,
|
||||
char,
|
||||
decl_keyword,
|
||||
modifier,
|
||||
symbol,
|
||||
kw,
|
||||
word
|
||||
/* number, */
|
||||
) {
|
||||
if (mlc) return 'X';//mlc.replace(/[^\n]+/g, '') || ' ';
|
||||
if (unterminated_mlc) return ' ';
|
||||
if (mlc_end) return ' ';
|
||||
if (slc_ws) return ' '; //slc_ws.replace(/[^\n]+/g, '').replace(/ +/,' ') || ' ';
|
||||
if (string) return 'S';
|
||||
if (char) return 'C';
|
||||
if (decl_keyword) return decl_keyword;
|
||||
if (modifier) return 'M';
|
||||
if (symbol) return symbol;
|
||||
if (kw) return kw;
|
||||
if (word) return 'W';
|
||||
return 'N';
|
||||
|
||||
}
|
||||
const simplified = source.replace(re, (...args) => {
|
||||
let text = args[0];
|
||||
let next_idx = source.indexOf(text, source_idx);
|
||||
|
||||
simplified_text_idx += (next_idx - source_idx);
|
||||
source_idx = next_idx;
|
||||
|
||||
const simplified_text = mapSimplified.apply(null, args);
|
||||
tokens.push(new Token(source_idx, text, simplified_text, simplified_text_idx));
|
||||
|
||||
source_idx += text.length;
|
||||
simplified_text_idx += simplified_text.length;
|
||||
|
||||
return simplified_text;
|
||||
});
|
||||
|
||||
// console.log(simplified);
|
||||
|
||||
const decls = [
|
||||
/ +/g,
|
||||
/package +W(?: *\. *W)*( *;)?/g,
|
||||
/import +(M +)?W(?: *\. *W)*( *\.\*)?( *;)?/g,
|
||||
/@ *W( *\. *W)*( *\()?/g,
|
||||
/M/g,
|
||||
/(class|enum|interface|@ *interface) +W(.+?(?= *[a-z{]))/g, // type declaration
|
||||
/(implements|extends|throws) +W(.+?(?= *[a-z{]))/g, // decl
|
||||
/W(?: *\. *W)*(?: *<.*?>)?(?: *\[ *\])* +W(?: *\[ *\])*( *[=;(,])?/g, // field/method
|
||||
/W *\(/g, // constructor
|
||||
/[{}]/g, // scope
|
||||
/X/g, // multi-line comment
|
||||
/<.*?>(?= *[WM@])/g, // type variables
|
||||
/$/g, // end of file
|
||||
]
|
||||
let lastIndex = 0;
|
||||
let loc = ['base'];
|
||||
let package_decl = null;
|
||||
let imports = [];
|
||||
let modifiers = [];
|
||||
let types = [];
|
||||
let invalids = [];
|
||||
let lastMLC = null;
|
||||
/** @type {TypeDeclaration[]} */
|
||||
let type_stack = [null];
|
||||
|
||||
for(;;) {
|
||||
/** @type {{idx:number, d: RegExp, m:RegExpMatchArray}} */
|
||||
let best_match = null, next_best = null;
|
||||
decls.find((d,idx) => {
|
||||
d.lastIndex = lastIndex;
|
||||
const m = d.exec(simplified);
|
||||
if (!m) return;
|
||||
if (m.index === lastIndex) {
|
||||
best_match = {idx, d, m};
|
||||
return true;
|
||||
}
|
||||
if (idx === 0) {
|
||||
return;
|
||||
}
|
||||
if (!next_best || m.index < next_best.m.index) {
|
||||
next_best = {idx, d, m};
|
||||
}
|
||||
});
|
||||
if (!best_match) {
|
||||
const errorToken = findToken(tokens, lastIndex);
|
||||
const error = new ParseSyntaxError(lastMLC, modifiers.splice(0), errorToken);
|
||||
invalids.push(error);
|
||||
lastMLC = null;
|
||||
console.log(simplified.slice(lastIndex, lastIndex + 100));
|
||||
best_match = next_best;
|
||||
if (!next_best) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
lastIndex = best_match.d.lastIndex;
|
||||
|
||||
function parseToExpressionEnd() {
|
||||
// parse expression
|
||||
let re = /[(){};]/g, balance = [0,0];
|
||||
re.lastIndex = lastIndex;
|
||||
for (let m; m = re.exec(simplified);) {
|
||||
if (m[0] === '{') balance[0]++;
|
||||
else if (m[0] === '(') balance[1]++;
|
||||
else if (m[0] === '}') balance[0]--;
|
||||
else if (m[0] === ')') balance[1]--;
|
||||
else if (balance[0] <= 0 && balance[1] <= 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// console.log(simplified.slice(lastIndex, re.lastIndex));
|
||||
lastIndex = re.lastIndex;
|
||||
}
|
||||
|
||||
if (best_match.idx === 1) {
|
||||
// package - map all the name parts
|
||||
const nameparts = [];
|
||||
for (let m, re=/W/g; m = re.exec(best_match.m[0]); ) {
|
||||
const ident = findToken(tokens, best_match.m.index + m.index);
|
||||
nameparts.push(ident);
|
||||
}
|
||||
const semicolon = best_match.m[1] ? findToken(tokens, best_match.m.index + best_match.m[0].length - 1) : null;
|
||||
if (!package_decl) {
|
||||
package_decl = new PackageDeclaration(lastMLC, modifiers.splice(0), nameparts, semicolon);
|
||||
}
|
||||
lastMLC = null;
|
||||
}
|
||||
if (best_match.idx === 2) {
|
||||
// import - map all the name parts
|
||||
const nameparts = [];
|
||||
for (let m, re=/W/g; m = re.exec(best_match.m[0]); ) {
|
||||
const ident = findToken(tokens, best_match.m.index + m.index);
|
||||
nameparts.push(ident);
|
||||
}
|
||||
const static = best_match.m[1] ? findToken(tokens, best_match.m.index + best_match.m[0].indexOf('M')) : null;
|
||||
const asterisk = best_match.m[2] ? findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf('*')) : null
|
||||
const semicolon = best_match.m[3] ? findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf(';')) : null;
|
||||
let import_decl = new ImportDeclaration(lastMLC, modifiers.splice(0), nameparts, static, asterisk, semicolon);
|
||||
imports.push(import_decl);
|
||||
lastMLC = null;
|
||||
}
|
||||
if (best_match.idx === 3) {
|
||||
// annotation
|
||||
const at = findToken(tokens, best_match.m.index);
|
||||
const name = findToken(tokens, best_match.m.index + best_match.m[0].indexOf('W'));
|
||||
const annotation = new Annotation(at, name);
|
||||
modifiers.push(annotation);
|
||||
if (best_match.m[0].endsWith('(')) {
|
||||
lastIndex = parseToBracketEnd(simplified, lastIndex).end;
|
||||
}
|
||||
}
|
||||
if (best_match.idx === 4) {
|
||||
// modifier
|
||||
const modifier = findToken(tokens, best_match.m.index);
|
||||
modifiers.push(modifier);
|
||||
}
|
||||
|
||||
if (best_match.idx === 5) {
|
||||
// type declaration
|
||||
const name = findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf('W'));
|
||||
/** @type {'class'|'interface'|'enum'|'@interface'} */
|
||||
// @ts-ignore
|
||||
const kind = best_match.m[1].replace(/ /g, '');
|
||||
const type = new TypeDeclaration(type_stack[0], lastMLC, modifiers.splice(0), kind, name);
|
||||
lastMLC = null;
|
||||
types.push(type);
|
||||
type_stack.unshift(type);
|
||||
loc.unshift('typedecl');
|
||||
}
|
||||
|
||||
if (best_match.idx === 6) {
|
||||
// extends/implements/throws
|
||||
const decl_kw = findToken(tokens, best_match.m.index);
|
||||
const startidx = tokens.indexOf(findToken(tokens, best_match.m.index + best_match.m[0].indexOf('W')));
|
||||
const endidx = tokens.indexOf(findToken(tokens,best_match.m.index + best_match.m[0].length - 1));
|
||||
const typelist = parseTypeIdentList(tokens.slice(startidx, endidx + 1));
|
||||
switch(decl_kw.text) {
|
||||
case 'throws':
|
||||
break;
|
||||
case 'extends':
|
||||
case 'implements':
|
||||
if (loc[0] === 'typedecl') {
|
||||
type_stack[0].super_declarations.push({ decl_kw, typelist });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (best_match.idx === 7) {
|
||||
// field or method
|
||||
const name = findToken(tokens, best_match.m.index + best_match.m[0].lastIndexOf('W'));
|
||||
const typetokens = [];
|
||||
for (let t = findToken(tokens, best_match.m.index), i = tokens.indexOf(t); t !== name; t = tokens[++i]) {
|
||||
if (t.simplified_text !== ' ')
|
||||
typetokens.push(t);
|
||||
}
|
||||
let parameters, equals_comma_sc = null;
|
||||
switch (best_match.m[0].slice(-1)) {
|
||||
case '(':
|
||||
// method
|
||||
let params_source_range = parseToBracketEnd(simplified, lastIndex);
|
||||
lastIndex = params_source_range.end;
|
||||
parameters = parseParameters(simplified, tokens, params_source_range, invalids);
|
||||
break;
|
||||
case '=':
|
||||
// initialised field
|
||||
equals_comma_sc = findToken(tokens, best_match.m.index + best_match.m[0].length);
|
||||
parseToExpressionEnd();
|
||||
break;
|
||||
case ',':
|
||||
// multi-declaration field
|
||||
equals_comma_sc = findToken(tokens, best_match.m.index + best_match.m[0].length);
|
||||
throw new Error('not implemented');
|
||||
case ';':
|
||||
// single field
|
||||
equals_comma_sc = findToken(tokens, best_match.m.index + best_match.m[0].length);
|
||||
break;
|
||||
default:
|
||||
// invalid - but treat as a single field
|
||||
break;
|
||||
}
|
||||
if (type_stack[0]) {
|
||||
const fmc = new FMCDeclaration(type_stack[0], lastMLC, modifiers.splice(0), best_match.m[0].endsWith('(') ? 'method' : 'field', name, new TypeIdent(typetokens), equals_comma_sc, parameters);
|
||||
type_stack[0].declarations.push(fmc);
|
||||
}
|
||||
lastMLC = null;
|
||||
}
|
||||
|
||||
if (best_match.idx === 8) {
|
||||
// constructor (if the name matches the type)
|
||||
let params_source_range = parseToBracketEnd(simplified, lastIndex);
|
||||
lastIndex = params_source_range.end;
|
||||
const parameters = parseParameters(simplified, tokens, params_source_range, invalids);
|
||||
const name = findToken(tokens, best_match.m.index);
|
||||
if (type_stack[0] && name.text === type_stack[0].name.text) {
|
||||
const fmc = new FMCDeclaration(type_stack[0], lastMLC, modifiers.splice(0), 'constructor', name, null, null, parameters);
|
||||
type_stack[0].declarations.push(fmc);
|
||||
} else {
|
||||
invalids.push(new ParseSyntaxError(lastMLC, modifiers.splice(0), name));
|
||||
}
|
||||
lastMLC = null;
|
||||
}
|
||||
|
||||
if (best_match.idx === 9) {
|
||||
// open/close scope
|
||||
if (best_match.m[0] === '{') {
|
||||
if (loc[0] === 'typedecl') loc[0] = 'typebody';
|
||||
else if (loc[0] === 'typebody') {
|
||||
// static initer / method body
|
||||
let re = /[{}]/g, balance = 1;
|
||||
re.lastIndex = lastIndex;
|
||||
for (let m; m = re.exec(simplified);) {
|
||||
if (m[0] === '{') balance++;
|
||||
else if (--balance === 0) {
|
||||
re.lastIndex++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
lastIndex = re.lastIndex;
|
||||
}
|
||||
} else {
|
||||
// end scope
|
||||
if (/^type/.test(loc[0])) {
|
||||
loc.shift();
|
||||
type_stack.shift();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (best_match.idx === 10) {
|
||||
// mlc
|
||||
lastMLC = findToken(tokens, best_match.m.index);
|
||||
}
|
||||
|
||||
if (best_match.idx === 11) {
|
||||
// type parameters
|
||||
const open = findToken(tokens, best_match.m.index);
|
||||
const close = findToken(tokens, best_match.m.index + best_match.m[0].length - 1);
|
||||
modifiers.push(new TypeParameters(open, close));
|
||||
}
|
||||
|
||||
if (best_match.idx === 12) {
|
||||
// end of file
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return new ParseResult(package_decl, imports, types, invalids);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Annotation,
|
||||
Declaration,
|
||||
FMCDeclaration,
|
||||
ImportDeclaration,
|
||||
PackageDeclaration,
|
||||
parse,
|
||||
ParseProblem,
|
||||
ParseResult,
|
||||
ProblemSeverity,
|
||||
Token,
|
||||
TypeDeclaration,
|
||||
TypeParameters,
|
||||
}
|
||||
16
langserver/java/parsetypes/annotation.js
Normal file
16
langserver/java/parsetypes/annotation.js
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* @typedef {import('./token')} Token
|
||||
*/
|
||||
|
||||
class Annotation {
|
||||
/**
|
||||
* @param {Token} at
|
||||
* @param {Token} name
|
||||
*/
|
||||
constructor(at, name) {
|
||||
this.at = at;
|
||||
this.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Annotation;
|
||||
49
langserver/java/parsetypes/declaration.js
Normal file
49
langserver/java/parsetypes/declaration.js
Normal file
@@ -0,0 +1,49 @@
|
||||
const Token = require('./token');
|
||||
/**
|
||||
* @typedef {import('./modifier')} Modifier
|
||||
* @typedef {import('./type')} TypeDeclaration
|
||||
*/
|
||||
|
||||
/**
|
||||
* Base class for Java declarations.
|
||||
*/
|
||||
class Declaration {
|
||||
/**
|
||||
* @param {TypeDeclaration} owner_type the type this declaration belongs to (if any)
|
||||
* @param {Token} docs JavaDocs associated with the declaration
|
||||
* @param {Modifier[]} modifiers annotations, modifier keywords and type parameters
|
||||
*/
|
||||
constructor(owner_type, docs, modifiers) {
|
||||
this.owner_type = owner_type;
|
||||
this.docs = docs;
|
||||
this.modifiers = modifiers;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the raw JavaDoc string or an empty string if no doc is present
|
||||
*/
|
||||
getDocString() {
|
||||
return this.docs ? this.docs.text : '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the raw access modifier text values
|
||||
* @returns {string[]}
|
||||
*/
|
||||
getAccessModifierValues() {
|
||||
// @ts-ignore
|
||||
return this.modifiers.filter(m => m instanceof Token).map(t => t.text);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the token matching the specified modifier
|
||||
* @param {string} name
|
||||
* @returns {Token}
|
||||
*/
|
||||
findModifier(name) {
|
||||
// @ts-ignore
|
||||
return this.modifiers.find(m => (m instanceof Token) && (m.text === name));
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Declaration;
|
||||
90
langserver/java/parsetypes/fmc.js
Normal file
90
langserver/java/parsetypes/fmc.js
Normal file
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* @typedef {import('./modifier')} Modifier
|
||||
* @typedef {import('./parameter')} ParameterDeclaration
|
||||
* @typedef {import('./token')} Token
|
||||
* @typedef {import('./type')} TypeDeclaration
|
||||
* @typedef {import('./typeident')} TypeIdent
|
||||
*/
|
||||
const Declaration = require('./declaration');
|
||||
const ParseProblem = require('./parse-problem');
|
||||
const ProblemSeverity = require('./problem-severity');
|
||||
|
||||
/**
|
||||
* Field, method or constructor declaration
|
||||
*/
|
||||
class FMCDeclaration extends Declaration {
|
||||
/**
|
||||
*
|
||||
* @param {TypeDeclaration} owner_type
|
||||
* @param {Token} docs
|
||||
* @param {Modifier[]} modifiers
|
||||
* @param {'field'|'method'|'constructor'} kind
|
||||
* @param {Token} name
|
||||
* @param {TypeIdent} type
|
||||
* @param {Token} equals_comma_sc
|
||||
* @param {ParameterDeclaration[]} parameters
|
||||
*/
|
||||
constructor(owner_type, docs, modifiers, kind, name, type, equals_comma_sc, parameters) {
|
||||
super(owner_type, docs, modifiers);
|
||||
this.kind = kind;
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
this.equals_comma_sc = equals_comma_sc;
|
||||
this.parameters = parameters || [];
|
||||
}
|
||||
|
||||
validate() {
|
||||
const checkDuplicateParameterNames = () => {
|
||||
const done = new Set();
|
||||
return this.parameters
|
||||
.filter(p => {
|
||||
if (done.has(p.name.text)) {
|
||||
return true;
|
||||
}
|
||||
done.add(p.name.text);
|
||||
})
|
||||
.map(p =>
|
||||
new ParseProblem(p.name, `Duplicate parameter name: '${p.name.text}'`, ProblemSeverity.Error)
|
||||
);
|
||||
};
|
||||
const checkParameterCommas = () => {
|
||||
const last_param_idx = this.parameters.length - 1;
|
||||
return this.parameters.map((p, idx) => {
|
||||
if ((idx < last_param_idx) && !p.comma) {
|
||||
return new ParseProblem(p.lastToken(), 'Missing comma', ProblemSeverity.Error);
|
||||
}
|
||||
else if ((idx === last_param_idx) && p.comma) {
|
||||
return ParseProblem.syntaxError(p.comma);
|
||||
}
|
||||
});
|
||||
}
|
||||
const checkFieldSemicolon = () => {
|
||||
if (this.kind === 'field') {
|
||||
if (!this.equals_comma_sc) {
|
||||
return new ParseProblem(this.name, `Missing operator or semicolon`, ProblemSeverity.Error);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const checkVarargsIsLastParameter = () => {
|
||||
return this.parameters
|
||||
.slice(0, -1)
|
||||
.filter(p => p.varargs)
|
||||
.map(p =>
|
||||
new ParseProblem(p.varargs, 'A variable arity parameter must be declared last', ProblemSeverity.Error)
|
||||
);
|
||||
};
|
||||
const problems = [
|
||||
...ParseProblem.checkAccessModifiers(this.modifiers, this.kind),
|
||||
...ParseProblem.checkDuplicateModifiers(this.modifiers),
|
||||
...ParseProblem.checkConflictingModifiers(this.modifiers),
|
||||
...checkParameterCommas(),
|
||||
...checkDuplicateParameterNames(),
|
||||
...checkVarargsIsLastParameter(),
|
||||
checkFieldSemicolon(),
|
||||
];
|
||||
return problems;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FMCDeclaration;
|
||||
68
langserver/java/parsetypes/import.js
Normal file
68
langserver/java/parsetypes/import.js
Normal file
@@ -0,0 +1,68 @@
|
||||
const Declaration = require('./declaration');
|
||||
const ParseProblem = require('./parse-problem');
|
||||
const Token = require('./token');
|
||||
const TypeParameters = require('./type-parameters');
|
||||
|
||||
/**
|
||||
* @typedef {import('./modifier')} Modifier
|
||||
*/
|
||||
|
||||
class ImportDeclaration extends Declaration {
|
||||
/**
|
||||
* @param {Token} docs
|
||||
* @param {Modifier[]} modifiers
|
||||
* @param {Token[]} nameparts
|
||||
* @param {Token} static_
|
||||
* @param {Token} asterisk
|
||||
* @param {Token} semicolon
|
||||
*/
|
||||
constructor(docs, modifiers, nameparts, static_, asterisk, semicolon) {
|
||||
super(null, docs, modifiers);
|
||||
this.nameparts = nameparts;
|
||||
this.static_ = static_;
|
||||
this.asterisk = asterisk;
|
||||
this.semicolon = semicolon;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the dotted portion of the import declaration (excluding any demand-load part)
|
||||
*/
|
||||
getDottedName() {
|
||||
return this.nameparts.map(x => x.text).join('.');
|
||||
}
|
||||
|
||||
lastToken() {
|
||||
return this.semicolon || this.asterisk || this.nameparts.slice(-1)[0];
|
||||
}
|
||||
|
||||
validate() {
|
||||
const checkModifierIsStatic = () => {
|
||||
if (this.static_ && this.static_.text !== 'static') {
|
||||
return ParseProblem.syntaxError(this.static_);
|
||||
}
|
||||
}
|
||||
|
||||
const checkNoInvalidModifiers = () => {
|
||||
return this.modifiers.map(modifier => {
|
||||
if (modifier instanceof Token) {
|
||||
return ParseProblem.syntaxError(modifier);
|
||||
}
|
||||
if (modifier instanceof TypeParameters) {
|
||||
return ParseProblem.syntaxError(modifier.open);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/** @type {ParseProblem[]} */
|
||||
const problems = [
|
||||
checkModifierIsStatic(),
|
||||
...ParseProblem.checkNonKeywordIdents(this.nameparts),
|
||||
ParseProblem.checkSemicolon(this),
|
||||
...checkNoInvalidModifiers(),
|
||||
];
|
||||
|
||||
return problems;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ImportDeclaration;
|
||||
15
langserver/java/parsetypes/modifier.js
Normal file
15
langserver/java/parsetypes/modifier.js
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* @typedef {import('./annotation')} Annotation
|
||||
* @typedef {import('./type-parameters')} TypeParameters
|
||||
* @typedef {import('./token')} Token
|
||||
*
|
||||
* Each Modifier is one of
|
||||
* - a token representing a modifier keyword (e.g public, static, etc)
|
||||
* - an Annotation (eg. @Override)
|
||||
* - or a TypeParameters section (eg <T extends Object>)
|
||||
* These can typically appear in any order before a declaration
|
||||
*
|
||||
* @typedef {Token|Annotation|TypeParameters} Modifier
|
||||
*/
|
||||
|
||||
module.exports = {}
|
||||
39
langserver/java/parsetypes/package.js
Normal file
39
langserver/java/parsetypes/package.js
Normal file
@@ -0,0 +1,39 @@
|
||||
const Declaration = require('./declaration');
|
||||
const ParseProblem = require('./parse-problem');
|
||||
/**
|
||||
* @typedef {import('./modifier')} Modifier
|
||||
* @typedef {import('./token')} Token
|
||||
*/
|
||||
|
||||
class PackageDeclaration extends Declaration {
|
||||
/**
|
||||
* @param {Token} docs
|
||||
* @param {Modifier[]} modifiers
|
||||
* @param {Token[]} nameparts
|
||||
* @param {Token} semicolon
|
||||
*/
|
||||
constructor(docs, modifiers, nameparts, semicolon) {
|
||||
super(null, docs, modifiers);
|
||||
this.nameparts = nameparts;
|
||||
this.semicolon = semicolon;
|
||||
}
|
||||
|
||||
dottedName() {
|
||||
return this.nameparts.map(t => t.text).join('.');
|
||||
}
|
||||
|
||||
lastToken() {
|
||||
return this.semicolon || this.nameparts.slice(-1)[0];
|
||||
}
|
||||
|
||||
validate() {
|
||||
/** @type {ParseProblem[]} */
|
||||
const problems = [
|
||||
ParseProblem.checkSemicolon(this),
|
||||
...ParseProblem.checkNonKeywordIdents(this.nameparts),
|
||||
];
|
||||
return problems;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = PackageDeclaration;
|
||||
33
langserver/java/parsetypes/parameter.js
Normal file
33
langserver/java/parsetypes/parameter.js
Normal file
@@ -0,0 +1,33 @@
|
||||
const Declaration = require('./declaration');
|
||||
|
||||
/**
|
||||
* @typedef {import('./modifier')} Modifier
|
||||
* @typedef {import('./typeident')} TypeIdent
|
||||
* @typedef {import('./token')} Token
|
||||
*/
|
||||
|
||||
/**
|
||||
* A single parameter declaration
|
||||
*/
|
||||
class ParameterDeclaration extends Declaration {
|
||||
/**
|
||||
* @param {Modifier[]} modifiers
|
||||
* @param {TypeIdent} type
|
||||
* @param {Token} varargs
|
||||
* @param {Token} name
|
||||
* @param {Token} comma
|
||||
*/
|
||||
constructor(modifiers, type, varargs, name, comma) {
|
||||
super(null, null, modifiers);
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
this.varargs = varargs;
|
||||
this.comma = comma;
|
||||
}
|
||||
|
||||
lastToken() {
|
||||
return this.comma || this.name;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ParameterDeclaration;
|
||||
29
langserver/java/parsetypes/parse-error.js
Normal file
29
langserver/java/parsetypes/parse-error.js
Normal file
@@ -0,0 +1,29 @@
|
||||
const Declaration = require('./declaration');
|
||||
const ParseProblem = require('./parse-problem');
|
||||
/**
|
||||
* @typedef {import('./modifier')} Modifier
|
||||
* @typedef {import('./token')} Token
|
||||
*/
|
||||
|
||||
class ParseSyntaxError extends Declaration {
|
||||
/**
|
||||
* @param {Token} docs
|
||||
* @param {Modifier[]} modifiers
|
||||
* @param {Token} errorToken
|
||||
*/
|
||||
constructor(docs, modifiers, errorToken) {
|
||||
super(null, docs, modifiers);
|
||||
this.errorToken = errorToken;
|
||||
}
|
||||
|
||||
validate() {
|
||||
if (!this.errorToken) {
|
||||
return [];
|
||||
}
|
||||
return [
|
||||
ParseProblem.syntaxError(this.errorToken),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ParseSyntaxError;
|
||||
134
langserver/java/parsetypes/parse-problem.js
Normal file
134
langserver/java/parsetypes/parse-problem.js
Normal file
@@ -0,0 +1,134 @@
|
||||
const ProblemSeverity = require('./problem-severity');
|
||||
const Token = require('./token');
|
||||
|
||||
/**
|
||||
* @typedef {import('./import')} ImportDeclaration
|
||||
* @typedef {import('./modifier')} Modifier
|
||||
* @typedef {import('./package')} PackageDeclaration
|
||||
* @typedef {import('./problem-severity').Severity} Severity
|
||||
*/
|
||||
|
||||
|
||||
class ParseProblem {
|
||||
/**
|
||||
* @param {Token|Token[]} token
|
||||
* @param {string} message
|
||||
* @param {Severity} severity
|
||||
*/
|
||||
constructor(token, message, severity) {
|
||||
this.startIdx = (Array.isArray(token) ? token[0] : token).source_idx;
|
||||
const lastToken = (Array.isArray(token) ? token[token.length - 1] : token);
|
||||
this.endIdx = lastToken.source_idx + lastToken.text.length;
|
||||
this.message = message;
|
||||
this.severity = severity;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Modifier[]} mods
|
||||
*/
|
||||
static checkDuplicateModifiers(mods) {
|
||||
const done = new Set();
|
||||
const res = [];
|
||||
for (let mod of mods) {
|
||||
if (mod instanceof Token) {
|
||||
if (done.has(mod.text)) {
|
||||
res.push(new ParseProblem(mod, `Duplicate modifier: ${mod.text}`, ProblemSeverity.Error));
|
||||
}
|
||||
done.add(mod.text);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
static checkConflictingModifiers(mods) {
|
||||
const modmap = new Map();
|
||||
let res = [];
|
||||
mods.filter(m => m instanceof Token).forEach(m => modmap.set(m.text, m));
|
||||
const names = [...modmap.keys()];
|
||||
const visibilities = names.filter(m => /^(public|private|protected)$/.test(m));
|
||||
if (visibilities.length > 1) {
|
||||
const visnames = visibilities.map(m => `'${m}'`).join(', ').replace(/, (?='\w+'$)/, ' and ');
|
||||
res = visibilities.map(m => new ParseProblem(modmap.get(m), `Conflicting modifiers: ${visnames}`, ProblemSeverity.Error));
|
||||
}
|
||||
if (names.includes('abstract')) {
|
||||
if (names.includes('final')) {
|
||||
res.push(new ParseProblem(modmap.get('final'), `Declarations cannot be both 'abstract' and 'final`, ProblemSeverity.Error));
|
||||
}
|
||||
if (names.includes('native')) {
|
||||
res.push(new ParseProblem(modmap.get('native'), `Declarations cannot be both 'abstract' and 'native`, ProblemSeverity.Error));
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Modifier[]} mods
|
||||
* @param {'class'|'interface'|'enum'|'@interface'|'field'|'method'|'constructor'|'initializer'} decl_kind
|
||||
*/
|
||||
static checkAccessModifiers(mods, decl_kind) {
|
||||
let valid_mods = /^$/;
|
||||
switch (decl_kind) {
|
||||
case 'class': valid_mods = /^(public|final|abstract|strictfp)$/; break;
|
||||
case 'interface': valid_mods = /^(public|abstract|strictfp)$/; break;
|
||||
case '@interface': valid_mods = /^(public)$/; break;
|
||||
case 'enum': valid_mods = /^(public|final)$/; break;
|
||||
case 'field': valid_mods = /^(public|private|protected|static|final|volatile|transient)$/; break;
|
||||
case 'method': valid_mods = /^(public|private|protected|static|final|abstract|native|strictfp|synchronized)$/; break;
|
||||
case 'constructor': valid_mods = /^(public|protected|native)$/; break;
|
||||
case 'initializer': valid_mods = /^(static)$/; break;
|
||||
}
|
||||
const problems = [];
|
||||
for (let mod of mods) {
|
||||
if (mod instanceof Token) {
|
||||
if (!valid_mods.test(mod.text)) {
|
||||
problems.push(new ParseProblem(mod, `'${mod.text}' is not a valid modifier for ${decl_kind} type declarations`, ProblemSeverity.Warning));
|
||||
}
|
||||
const redundant = (mod.text === 'abstract' && decl_kind === 'interface')
|
||||
|| (mod.text === 'final' && decl_kind === 'enum');
|
||||
if (redundant) {
|
||||
problems.push(new ParseProblem(mod, `'${mod.text}' is redundant for a ${decl_kind} declaration`, ProblemSeverity.Hint));
|
||||
}
|
||||
}
|
||||
}
|
||||
return problems;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PackageDeclaration|ImportDeclaration} o
|
||||
*/
|
||||
static checkSemicolon(o) {
|
||||
if (!o.semicolon) {
|
||||
const lastToken = o.lastToken();
|
||||
return new ParseProblem(lastToken, 'Missing operator or semicolon', ProblemSeverity.Error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Token[]} tokens
|
||||
*/
|
||||
static checkNonKeywordIdents(tokens) {
|
||||
const res = [];
|
||||
const KEYWORDS = /^(abstract|assert|break|case|catch|class|const|continue|default|do|else|enum|extends|final|finally|for|goto|if|implements|import|interface|native|new|package|private|protected|public|return|static|strictfp|super|switch|synchronized|throw|throws|transient|try|volatile|while)$/;
|
||||
const PRIMITIVE_TYPE_KEYWORDS = /^(int|boolean|byte|char|double|float|long|short|void)$/
|
||||
const LITERAL_VALUE_KEYWORDS = /^(this|true|false|null)$/;
|
||||
const OPERATOR_KEYWORDS = /^(instanceof)$/;
|
||||
for (let token of tokens) {
|
||||
let iskw = KEYWORDS.test(token.text) || PRIMITIVE_TYPE_KEYWORDS.test(token.text) || LITERAL_VALUE_KEYWORDS.test(token.text) || OPERATOR_KEYWORDS.test(token.text);
|
||||
if (iskw) {
|
||||
const problem = new ParseProblem(token, `'${token.text}' is a keyword and cannot be used as an identifier`, ProblemSeverity.Error);
|
||||
res.push(problem);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Token} token
|
||||
*/
|
||||
static syntaxError(token) {
|
||||
if (!token) return null;
|
||||
return new ParseProblem(token, 'Unsupported, invalid or incomplete declaration', ProblemSeverity.Error);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ParseProblem;
|
||||
24
langserver/java/parsetypes/parse-result.js
Normal file
24
langserver/java/parsetypes/parse-result.js
Normal file
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* @typedef {import('./import')} ImportDeclaration
|
||||
* @typedef {import('./package')} PackageDeclaration
|
||||
* @typedef {import('./parse-error')} ParseSyntaxError
|
||||
* @typedef {import('./type')} TypeDeclaration
|
||||
*/
|
||||
|
||||
class ParseResult {
|
||||
/**
|
||||
*
|
||||
* @param {PackageDeclaration} package_decl
|
||||
* @param {ImportDeclaration[]} imports
|
||||
* @param {TypeDeclaration[]} types
|
||||
* @param {ParseSyntaxError[]} invalids
|
||||
*/
|
||||
constructor(package_decl, imports, types, invalids) {
|
||||
this.package = package_decl;
|
||||
this.imports = imports;
|
||||
this.types = types;
|
||||
this.invalids = invalids;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ParseResult;
|
||||
8
langserver/java/parsetypes/problem-severity.js
Normal file
8
langserver/java/parsetypes/problem-severity.js
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* @typedef {1|2|3|4} Severity
|
||||
* @type {{ Error:1, Warning:2, Information:3, Hint:4 }}
|
||||
* these match the vscode DiagnosticSeverity values
|
||||
*/
|
||||
const ProblemSeverity = { Error:1, Warning:2, Information:3, Hint:4 };
|
||||
|
||||
module.exports = ProblemSeverity;
|
||||
45
langserver/java/parsetypes/resolved-import.js
Normal file
45
langserver/java/parsetypes/resolved-import.js
Normal file
@@ -0,0 +1,45 @@
|
||||
/**
|
||||
* @typedef {import('./import')} ImportDeclaration
|
||||
*/
|
||||
|
||||
/**
|
||||
* Class representing a resolved import.
|
||||
*
|
||||
* Each instance holds an array of types that would be resolved by the specified import.
|
||||
* Each type is mapped to an MTI which lists the implementation details of the type (fields, methods, etc).
|
||||
*
|
||||
*/
|
||||
class ResolvedImport {
|
||||
/**
|
||||
* @param {ImportDeclaration} import_decl
|
||||
* @param {RegExpMatchArray} matches
|
||||
* @param {'owner-package'|'import'|'implicit-import'} import_kind;
|
||||
*/
|
||||
constructor(import_decl, matches, typemap, import_kind) {
|
||||
/**
|
||||
* The associated import declaration.
|
||||
* - this value is null for owner-package and implicit-imports
|
||||
*/
|
||||
this.import = import_decl;
|
||||
|
||||
/**
|
||||
* Array of fully qualified type names in JRE format resolved in this import
|
||||
*/
|
||||
this.fullyQualifiedNames = Array.from(matches);
|
||||
|
||||
/**
|
||||
* THe map of fully-qualified type names to MTIs
|
||||
*/
|
||||
this.types = new Map(matches.map(name => [name, typemap.get(name)]));
|
||||
|
||||
/**
|
||||
* What kind of import this is:
|
||||
* - `"owner-package"`: types that are implicitly imported from the same package as the declared module
|
||||
* - `"import"`: types that are inclduded via an import declaration specified in the module
|
||||
* - `"implicit-import"`: types that are included without any explicit import (`java.lang.*` for example)
|
||||
*/
|
||||
this.import_kind = import_kind;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ResolvedImport;
|
||||
105
langserver/java/parsetypes/resolved-type.js
Normal file
105
langserver/java/parsetypes/resolved-type.js
Normal file
@@ -0,0 +1,105 @@
|
||||
/**
|
||||
* @typedef {import('./token')} Token
|
||||
* @typedef {import('./type')} TypeDeclaration
|
||||
*/
|
||||
|
||||
/**
|
||||
* Class representing a parsed and resolved type
|
||||
*
|
||||
* Each `ResolvedType` consists of a linked set of parsed `TypeParts` and an array dimensions count.
|
||||
* Each `TypePart` is a single dotted type with optional type arguments.
|
||||
*
|
||||
* When parsing, the first type part matches all dotted idents up to the first type with arguments - after
|
||||
* that, there is a single type part for each further enclosed type.
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* int -> one TypePart, arrdims = 0
|
||||
* int[][] -> one TypePart, arrdims = 2
|
||||
* List<String> -> one type part with one typeargs entry
|
||||
* List<String>.InnerType -> two type parts (List<String> / InnerType)
|
||||
* List<String>.InnerType.AnotherInner -> three type parts (List<String> / InnerType / AnotherInner)
|
||||
* java.util.List<String>.InnerType<Object>.AnotherInner -> three type parts (java.util.List<String> / InnerType<Object> / AnotherInner)
|
||||
* java.util.List.InnerType.AnotherInner -> one type part
|
||||
*
|
||||
* The reason for the non-obvious splitting is that the first part of the type could incorporate a package name - we
|
||||
* cannot tell which parts of the name are packages and which are types/enclosed types until we try to resolve it.
|
||||
* But type arguments are only allowed on types, so any qualifiers that appear after type arguments can only be a type and
|
||||
* so we split on each single identifier.
|
||||
*
|
||||
*/
|
||||
class ResolvedType {
|
||||
|
||||
static TypePart = class TypePart {
|
||||
/**
|
||||
* The list of type arguments
|
||||
* @type {ResolvedType[]}
|
||||
*/
|
||||
typeargs = null;
|
||||
|
||||
/**
|
||||
* The outer type if this is an enclosed generic type
|
||||
* @type {ResolvedType.TypePart}
|
||||
*/
|
||||
outer = null;
|
||||
inner = null;
|
||||
|
||||
/**
|
||||
* @param {ResolvedType} owner
|
||||
* @param {string} name
|
||||
* @param {ResolvedType.TypePart} outer
|
||||
*/
|
||||
constructor(owner, name, outer) {
|
||||
this.owner = owner;
|
||||
this.name = name;
|
||||
this.outer = outer;
|
||||
}
|
||||
|
||||
get label() {
|
||||
return this.name + (this.typeargs ? `<${this.typeargs.map(arg => arg.label).join(',')}>` : '');
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {ResolvedType.TypePart[]} */
|
||||
parts = [];
|
||||
|
||||
/**
|
||||
* number of array dimensions for this type
|
||||
*/
|
||||
arrdims = 0;
|
||||
|
||||
/**
|
||||
* Error reason if parsing failed.
|
||||
*/
|
||||
error = '';
|
||||
|
||||
/**
|
||||
* The resolved MTIs that match this type. This will be an empty array if the type cannot be found.
|
||||
* @type {import('../mti').Type[]}
|
||||
*/
|
||||
mtis = [];
|
||||
|
||||
/**
|
||||
* During parsing, add a new type part
|
||||
* @param {string} [name]
|
||||
* @param {ResolvedType.TypePart} [outer]
|
||||
*/
|
||||
addTypePart(name = '', outer = null) {
|
||||
const p = new ResolvedType.TypePart(this, name, outer);
|
||||
this.parts.push(p);
|
||||
return p;
|
||||
}
|
||||
|
||||
getDottedRawType() {
|
||||
// most types will only have one part
|
||||
if (this.parts.length === 1)
|
||||
return this.parts[0].name;
|
||||
return this.parts.map(p => p.name).join('.');
|
||||
}
|
||||
|
||||
get label() {
|
||||
return this.parts.map(p => p.label).join('.') + '[]'.repeat(this.arrdims);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = ResolvedType;
|
||||
17
langserver/java/parsetypes/token.js
Normal file
17
langserver/java/parsetypes/token.js
Normal file
@@ -0,0 +1,17 @@
|
||||
class Token {
|
||||
/**
|
||||
*
|
||||
* @param {number} source_idx
|
||||
* @param {string} text
|
||||
* @param {string} simplified_text
|
||||
* @param {number} simplified_text_idx
|
||||
*/
|
||||
constructor(source_idx, text, simplified_text, simplified_text_idx) {
|
||||
this.source_idx = source_idx;
|
||||
this.text = text;
|
||||
this.simplified_text = simplified_text;
|
||||
this.simplified_text_idx = simplified_text_idx;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Token;
|
||||
17
langserver/java/parsetypes/type-parameters.js
Normal file
17
langserver/java/parsetypes/type-parameters.js
Normal file
@@ -0,0 +1,17 @@
|
||||
/**
|
||||
* @typedef {import('./token')} Token
|
||||
*/
|
||||
|
||||
class TypeParameters {
|
||||
/**
|
||||
*
|
||||
* @param {Token} open
|
||||
* @param {Token} close
|
||||
*/
|
||||
constructor(open, close) {
|
||||
this.open = open;
|
||||
this.close = close;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TypeParameters;
|
||||
229
langserver/java/parsetypes/type.js
Normal file
229
langserver/java/parsetypes/type.js
Normal file
@@ -0,0 +1,229 @@
|
||||
const Declaration = require('./declaration');
|
||||
const ParseProblem = require('./parse-problem');
|
||||
const ProblemSeverity = require('./problem-severity');
|
||||
const ResolvedImport = require('../import-resolver').ResolvedImport;
|
||||
const { resolveTypeIdents } = require('../type-resolver');
|
||||
const Token = require('./token');
|
||||
|
||||
/**
|
||||
* @typedef {import('./import')} ImportDeclaration
|
||||
* @typedef {import('./fmc')} FMCDeclaration
|
||||
* @typedef {import('./modifier')} Modifier
|
||||
* @typedef {import('./parameter')} ParameterDeclaration
|
||||
* @typedef {import('./typeident')} TypeIdent
|
||||
*/
|
||||
|
||||
/**
|
||||
* Represents a single Java type (class, interface, enum or @-interface) declaration
|
||||
*/
|
||||
class TypeDeclaration extends Declaration {
|
||||
/**
|
||||
*
|
||||
* @param {TypeDeclaration} owner_type
|
||||
* @param {Token} docs
|
||||
* @param {Modifier[]} modifiers
|
||||
* @param {'class'|'interface'|'enum'|'@interface'} kind
|
||||
* @param {Token} name
|
||||
*/
|
||||
constructor(owner_type, docs, modifiers, kind, name) {
|
||||
super(owner_type, docs, modifiers);
|
||||
this.kind = kind;
|
||||
this.name = name;
|
||||
/** @type {FMCDeclaration[]} */
|
||||
this.declarations = [];
|
||||
/** @type {{decl_kw:Token, typelist:TypeIdent[]}[]} */
|
||||
this.super_declarations = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the $-qualified name of this type (excluding package)
|
||||
*/
|
||||
qualifiedName() {
|
||||
if (!this.owner_type) {
|
||||
// top-level type
|
||||
return this.name.text;
|
||||
}
|
||||
const parts = [];
|
||||
for (let t = this; t;) {
|
||||
parts.unshift(t.name.text);
|
||||
// @ts-ignore
|
||||
t = t.owner_type;
|
||||
}
|
||||
return parts.join('$');
|
||||
}
|
||||
|
||||
qualifiedDottedName() {
|
||||
return this.qualifiedName().replace(/[$]/g, '.');
|
||||
}
|
||||
|
||||
validate() {
|
||||
const checkSuperDeclarations = () => {
|
||||
const res = {
|
||||
extends: [],
|
||||
implements: [],
|
||||
first: this.super_declarations[0],
|
||||
};
|
||||
const problems = [];
|
||||
this.super_declarations.forEach((sd) => res[sd.decl_kw.text].push(sd));
|
||||
for (let i = 1; i < res.extends.length; i++) {
|
||||
problems.push(new ParseProblem(res.extends[i].decl_kw, `Types cannot have multiple 'extends' declarations`, ProblemSeverity.Error));
|
||||
}
|
||||
for (let i = 1; i < res.implements.length; i++) {
|
||||
problems.push(new ParseProblem(res.extends[i].decl_kw, `Types cannot have multiple 'implements' declarations`, ProblemSeverity.Error));
|
||||
}
|
||||
if (res.extends.length > 0 && res.implements.length > 0 && res.first.decl_kw.text !== 'extends') {
|
||||
problems.push(new ParseProblem(res.extends[0].decl_kw, `'extends' declaration must appear before 'implements'`, ProblemSeverity.Error));
|
||||
}
|
||||
if (this.kind === 'class' && res.extends.length === 1 && res.extends[0].typelist.length > 1) {
|
||||
problems.push(new ParseProblem(res.extends[0].decl_kw, `Class types cannot extend from multiple super types`, ProblemSeverity.Error));
|
||||
}
|
||||
return problems;
|
||||
};
|
||||
const checkDuplicateFieldNames = () => {
|
||||
// get list of fields, sorted by name
|
||||
const fields = this.declarations
|
||||
.filter((d) => d.kind === 'field')
|
||||
.slice()
|
||||
.sort((a, b) => a.name.text.localeCompare(b.name.text));
|
||||
const probs = [];
|
||||
let name = '';
|
||||
fields.forEach((decl, idx, arr) => {
|
||||
const next = arr[idx + 1];
|
||||
if ((next && decl.name.text === next.name.text) || decl.name.text === name) {
|
||||
probs.push(new ParseProblem(decl.name, `Duplicate field name: '${decl.name.text}'`, ProblemSeverity.Error));
|
||||
}
|
||||
name = decl.name.text;
|
||||
});
|
||||
return probs;
|
||||
};
|
||||
let problems = [
|
||||
...ParseProblem.checkDuplicateModifiers(this.modifiers),
|
||||
...ParseProblem.checkConflictingModifiers(this.modifiers),
|
||||
...ParseProblem.checkAccessModifiers(this.modifiers, this.kind),
|
||||
...ParseProblem.checkNonKeywordIdents([this.name]),
|
||||
...ParseProblem.checkNonKeywordIdents(this.declarations.map((d) => d.name)),
|
||||
...checkDuplicateFieldNames(),
|
||||
...checkSuperDeclarations(),
|
||||
...this.declarations.reduce((probs, d) => {
|
||||
return [...probs, ...d.validate()];
|
||||
}, []),
|
||||
];
|
||||
return problems;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} package_name
|
||||
* @param {ResolvedImport[]} imports
|
||||
* @param {Map<string,*>} typemap
|
||||
*/
|
||||
validateTypes(package_name, imports, typemap) {
|
||||
const problems = [];
|
||||
const fqtypename = package_name ? `${package_name}.${this.qualifiedName()}` : this.qualifiedName();
|
||||
|
||||
/** @type {TypeIdent[]} */
|
||||
let typeidents = [];
|
||||
|
||||
// check extends
|
||||
this.super_declarations.filter(sd => sd.decl_kw.text === 'extends').forEach(sd => {
|
||||
sd.typelist.forEach(typeident => typeidents.push(typeident));
|
||||
})
|
||||
const resolved_extends = resolveTypeIdents(typeidents, package_name, imports, typemap);
|
||||
resolved_extends.forEach((rt,i) => {
|
||||
checkResolvedType(rt, typeidents[i]);
|
||||
if (this.kind === 'class' && rt.mtis.length === 1) {
|
||||
// class extend type must be a class
|
||||
if (rt.mtis[0].typeKind !== 'class') {
|
||||
problems.push(new ParseProblem(typeidents[i].tokens, `Class '${this.name.text}' cannot extend from ${rt.mtis[0].typeKind} '${rt.label}'; the specified type must be a non-final class.`, ProblemSeverity.Error));
|
||||
}
|
||||
// class extend type cannot be final
|
||||
else if (rt.mtis[0].hasModifier('final')) {
|
||||
problems.push(new ParseProblem(typeidents[i].tokens, `Class '${this.name.text}' cannot extend from final class '${rt.mtis[0].fullyDottedRawName}'.`, ProblemSeverity.Error));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// check implements
|
||||
typeidents = [];
|
||||
this.super_declarations.filter(sd => sd.decl_kw.text === 'implements').forEach(sd => {
|
||||
sd.typelist.forEach(typeident => typeidents.push(typeident));
|
||||
if (this.kind !== 'class' && this.kind !== 'enum') {
|
||||
problems.push(new ParseProblem(sd.decl_kw, `implements declarations are not permitted for ${this.kind} types`, ProblemSeverity.Error));
|
||||
}
|
||||
})
|
||||
const resolved_implements = resolveTypeIdents(typeidents, package_name, imports, typemap);
|
||||
resolved_implements.forEach((rt,i) => {
|
||||
checkResolvedType(rt, typeidents[i]);
|
||||
if (/class|enum/.test(this.kind) && rt.mtis.length === 1) {
|
||||
// class implements types must be interfaces
|
||||
if (rt.mtis[0].typeKind !== 'interface') {
|
||||
problems.push(new ParseProblem(typeidents[i].tokens, `Type '${this.name.text}' cannot implement ${rt.mtis[0].typeKind} type '${rt.mtis[0].fullyDottedRawName}'; the specified type must be an interface.`, ProblemSeverity.Error));
|
||||
}
|
||||
else if (!this.findModifier('abstract')) {
|
||||
// if the class is not abstract, it must implement all the methods in the interface
|
||||
// - we can't check this until the MTI for the class is complete
|
||||
const unimplemented_methods = rt.mtis[0].methods.filter(m => true);
|
||||
unimplemented_methods.forEach(method => {
|
||||
problems.push(new ParseProblem(typeidents[i].tokens, `Type '${this.name.text}' is not abstract and does not implement method '${method.toDeclSource()}' declared in interface '${rt.mtis[0].fullyDottedRawName}'.`, ProblemSeverity.Error));
|
||||
})
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// check field, method-return and parameter types
|
||||
typeidents = [];
|
||||
this.declarations.forEach((d) => {
|
||||
if (d.kind !== 'constructor') {
|
||||
typeidents.push(d.type);
|
||||
}
|
||||
if (d.parameters) {
|
||||
d.parameters.forEach((p) => {
|
||||
typeidents.push(p.type);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const resolved_types = resolveTypeIdents(typeidents, fqtypename, imports, typemap);
|
||||
// warn about missing and ambiguous types
|
||||
function checkResolvedType(rt, typeident) {
|
||||
if (rt.error) {
|
||||
problems.push(new ParseProblem(typeident.tokens, rt.error, ProblemSeverity.Error));
|
||||
return;
|
||||
}
|
||||
if (rt.mtis.length === 0) {
|
||||
problems.push(new ParseProblem(typeident.tokens, `Type not found: ${rt.label}`, ProblemSeverity.Error));
|
||||
return;
|
||||
}
|
||||
if (rt.mtis.length > 1) {
|
||||
const names = rt.mtis.map(mti => mti.fullyDottedRawName).join(`' or '`);
|
||||
problems.push(new ParseProblem(typeident.tokens, `Ambiguous type: ${rt.label} - could be '${names}'.`, ProblemSeverity.Error));
|
||||
return;
|
||||
}
|
||||
rt.mtis.forEach(mti => {
|
||||
// void arrays are illegal
|
||||
if (mti.name.startsWith('void[')) {
|
||||
problems.push(new ParseProblem(typeident.tokens, `primitive void arrays are not a valid type.`, ProblemSeverity.Error));
|
||||
}
|
||||
})
|
||||
}
|
||||
resolved_types.forEach((rt,i) => {
|
||||
checkResolvedType(rt, typeidents[i]);
|
||||
|
||||
// check any type arguments
|
||||
rt.parts.filter(p => p.typeargs).forEach(p => {
|
||||
p.typeargs.forEach(typearg => {
|
||||
checkResolvedType(typearg, typeidents[i]);
|
||||
// check type arguments are not primitives (primitive arrays are ok)
|
||||
if (typearg.mtis.length === 1) {
|
||||
if (typearg.mtis[0].typeKind === 'primitive') {
|
||||
problems.push(new ParseProblem(typeidents[i].tokens, `Type arguments cannot be primitive types.`, ProblemSeverity.Error));
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
});
|
||||
return problems;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TypeDeclaration;
|
||||
24
langserver/java/parsetypes/typeident.js
Normal file
24
langserver/java/parsetypes/typeident.js
Normal file
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* @typedef {import('./token')} Token
|
||||
* @typedef {import('./resolved-type')} ResolvedType
|
||||
*/
|
||||
|
||||
/**
|
||||
* Class to represent a declared type in methods, fields, parameters and variables
|
||||
*/
|
||||
class TypeIdent {
|
||||
/**
|
||||
* @param {Token[]} tokens
|
||||
*/
|
||||
constructor(tokens) {
|
||||
this.tokens = tokens;
|
||||
/** @type {ResolvedType} */
|
||||
this.resolved = null;
|
||||
}
|
||||
|
||||
lastToken() {
|
||||
return this.tokens[this.tokens.length - 1];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = TypeIdent;
|
||||
264
langserver/java/type-resolver.js
Normal file
264
langserver/java/type-resolver.js
Normal file
@@ -0,0 +1,264 @@
|
||||
const { ResolvedImport } = require('./import-resolver');
|
||||
const MTI = require('./mti');
|
||||
const ResolvedType = require('./parsetypes/resolved-type');
|
||||
|
||||
/**
|
||||
* Parse a type into its various components
|
||||
* @param {string} label
|
||||
* @returns {{type:ResolvedType, error:string}}
|
||||
*/
|
||||
function parse_type(label) {
|
||||
const type = new ResolvedType();
|
||||
let re = /([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)|(\.[a-zA-Z_]\w*)|[<,>]|((?:\[\])+)|( +)|./g;
|
||||
let parts = [type.addTypePart()];
|
||||
for (let m; m = re.exec(label);) {
|
||||
if (m[4]) {
|
||||
// ignore ws
|
||||
continue;
|
||||
}
|
||||
if (!parts[0].name) {
|
||||
if (m[1]) {
|
||||
parts[0].name = m[1];
|
||||
continue;
|
||||
}
|
||||
return { type, error: 'Missing type identifier' };
|
||||
}
|
||||
if (m[0] === '<') {
|
||||
if (!parts[0].typeargs && !parts[0].owner.arrdims) {
|
||||
// start of type arguments - start a new type
|
||||
const t = new ResolvedType();
|
||||
parts[0].typeargs = [t];
|
||||
parts.unshift(t.addTypePart());
|
||||
continue;
|
||||
}
|
||||
return { type, error: `Unexpected '<' character` };
|
||||
}
|
||||
if (m[0] === ',') {
|
||||
if (parts[1] && parts[1].typeargs) {
|
||||
// type argument separator - replace the type on the stack
|
||||
const t = new ResolvedType();
|
||||
parts[1].typeargs.push(t);
|
||||
parts[0] = t.addTypePart();
|
||||
continue;
|
||||
}
|
||||
return { type, error: `Unexpected ',' character` };
|
||||
}
|
||||
if (m[0] === '>') {
|
||||
if (parts[1] && parts[1].typeargs) {
|
||||
// end of type arguments
|
||||
parts.shift();
|
||||
continue;
|
||||
}
|
||||
return { type, error: `Unexpected '>' character` };
|
||||
}
|
||||
if (m[2]) {
|
||||
if (parts[0].typeargs || parts[0].outer) {
|
||||
// post-type-args enclosed type
|
||||
parts[0] = parts[0].inner = parts[0].owner.addTypePart(m[2].slice(1), parts[0]);
|
||||
continue;
|
||||
}
|
||||
return { type, error: `Unexpected '.' character` };
|
||||
}
|
||||
if (m[3]) {
|
||||
parts[0].owner.arrdims = m[3].length / 2;
|
||||
continue;
|
||||
}
|
||||
return { type, error: `Invalid type` };
|
||||
}
|
||||
|
||||
if (parts.length !== 1) {
|
||||
// one or more missing >
|
||||
return { type, error: `Missing >` };
|
||||
}
|
||||
|
||||
return { type, error: '' };
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Construct a regex to search for an enclosed type in the current and outer scopes of a given type
|
||||
*
|
||||
* @param {string} fully_qualified_scope the JRE name (a.b.X$Y) of the current type scope
|
||||
* @param {string} dotted_raw_typename the dotted name of the type we are searching for
|
||||
*/
|
||||
function createTypeScopeRegex(fully_qualified_scope, dotted_raw_typename) {
|
||||
// split the type name across enclosed type boundaries
|
||||
const scopes = fully_qualified_scope.split('$');
|
||||
|
||||
// the first scope is the dotted package name and top-level type - we need to escape the package-qualifier dots for regex
|
||||
scopes[0] = scopes[0].replace(/\./g,'[.]');
|
||||
|
||||
// if the typename we are searching represents an enclosed type, the type-qualifier dots must be replaced with $
|
||||
const enclosed_raw_typename = dotted_raw_typename.replace(/\./g,'[$]');
|
||||
|
||||
// bulld up the list of possible type matches based upon each outer scope of the type
|
||||
const enclosed_type_regexes = [];
|
||||
while (scopes.length) {
|
||||
enclosed_type_regexes.push(`${scopes.join('[$]')}[$]${enclosed_raw_typename}`);
|
||||
scopes.pop();
|
||||
}
|
||||
// the final regex is an exact match of possible type names, sorted from inner scope to outer (top-level) scope
|
||||
return new RegExp(`^(${enclosed_type_regexes.join('|')})$`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Locate MTIs that match a type.
|
||||
* @param {string} typename The type to resolve
|
||||
* @param {string} fully_qualified_scope The fully-qualified JRE name of the current type scope.
|
||||
* @param {ResolvedImport[]} resolved_imports The list of types resolved from the imports
|
||||
* @param {Map<string,MTI.Type>} typemap the global list of types
|
||||
*/
|
||||
function resolveType(typename, fully_qualified_scope, resolved_imports, typemap) {
|
||||
const { type, error } = parse_type(typename);
|
||||
if (error) {
|
||||
// don't try to find the type if the parsing failed
|
||||
type.error = error;
|
||||
return type;
|
||||
}
|
||||
|
||||
// locate the MTIs for the type and type arguments
|
||||
resolveCompleteType(type, fully_qualified_scope, resolved_imports, typemap);
|
||||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {ResolvedType} type
|
||||
* @param {string} fully_qualified_scope
|
||||
* @param {ResolvedImport[]} resolved_imports
|
||||
* @param {Map<string,MTI.Type>} typemap
|
||||
*/
|
||||
function resolveCompleteType(type, fully_qualified_scope, resolved_imports, typemap) {
|
||||
|
||||
type.mtis = findTypeMTIs(type.getDottedRawType(), type.arrdims, fully_qualified_scope, resolved_imports, typemap);
|
||||
|
||||
// resolve type arguments
|
||||
type.parts.filter(p => p.typeargs).forEach(p => {
|
||||
p.typeargs.forEach(typearg => {
|
||||
resolveCompleteType(typearg, fully_qualified_scope, resolved_imports, typemap);
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @param {string} dotted_raw_typename
|
||||
* @param {number} arraydims
|
||||
* @param {string} fully_qualified_scope The fully-qualified JRE name of the current type scope.
|
||||
* @param {ResolvedImport[]} resolved_imports The list of types resolved from the imports
|
||||
* @param {Map<string,MTI.Type>} typemap
|
||||
*/
|
||||
function findTypeMTIs(dotted_raw_typename, arraydims, fully_qualified_scope, resolved_imports, typemap) {
|
||||
let mtis = findRawTypeMTIs(dotted_raw_typename, fully_qualified_scope, resolved_imports, typemap);
|
||||
|
||||
if (arraydims > 0) {
|
||||
// convert matches to array MTIs
|
||||
mtis.forEach((mti,idx,arr) => {
|
||||
arr[idx] = MTI.makeArrayType(mti, arraydims);
|
||||
})
|
||||
}
|
||||
|
||||
return mtis;
|
||||
}
|
||||
|
||||
/**
|
||||
* Match a dotted type name to one or more MTIs
|
||||
* @param {string} dotted_raw_typename
|
||||
* @param {string} fully_qualified_scope The fully-qualified JRE name of the current type scope.
|
||||
* @param {Map<string,MTI.Type>} typemap
|
||||
* @param {ResolvedImport[]} resolved_imports The list of types resolved from the imports
|
||||
*/
|
||||
function findRawTypeMTIs(dotted_raw_typename, fully_qualified_scope, resolved_imports, typemap) {
|
||||
|
||||
// first check if it's a simple primitive
|
||||
if (/^(int|char|boolean|void|long|byte|short|float|double)$/.test(dotted_raw_typename)) {
|
||||
// return the primitive type
|
||||
return [MTI.fromPrimitive(dotted_raw_typename)];
|
||||
}
|
||||
|
||||
// create a regex to search for the type name
|
||||
// - the first search is for exact type matches inside the current type scope (and any parent type scopes)
|
||||
let search = createTypeScopeRegex(fully_qualified_scope, dotted_raw_typename);
|
||||
let matched_types =
|
||||
resolved_imports.map(ri => ({
|
||||
ri,
|
||||
mtis: ri.fullyQualifiedNames.filter(fqn => search.test(fqn)).map(fqn => ri.types.get(fqn))
|
||||
}))
|
||||
.filter(x => x.mtis.length);
|
||||
|
||||
if (!matched_types.length) {
|
||||
// if the type was not found in the current type scope, construct a new search for the imported types.
|
||||
// - since we don't know if the type name includes package qualifiers or not, this regex allows for implicit
|
||||
// package prefixes (todo - need to figure out static type imports)
|
||||
search = new RegExp(`^(.+?[.])?${dotted_raw_typename.replace(/\./g,'[.$]')}$`);
|
||||
|
||||
// search the imports for the type
|
||||
matched_types =
|
||||
resolved_imports.map(ri => ({
|
||||
ri,
|
||||
mtis: ri.fullyQualifiedNames.filter(fqn => search.test(fqn)).map(fqn => ri.types.get(fqn))
|
||||
}))
|
||||
.filter(x => x.mtis.length);
|
||||
}
|
||||
|
||||
// if the type matches multiple import entries, exact imports take prioirity over demand-load imports
|
||||
let exact_import_matches = matched_types.filter(x => x.ri.import && !x.ri.import.asterisk);
|
||||
if (exact_import_matches.length) {
|
||||
if (exact_import_matches.length < matched_types.length) {
|
||||
matched_types = exact_import_matches;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matched_types.length) {
|
||||
// if the type doesn't match any import, the final option is a fully qualified match across all types in all libraries
|
||||
search = new RegExp(`^${dotted_raw_typename.replace(/\./g,'[.$]')}$`);
|
||||
for (let typename of typemap.keys()) {
|
||||
if (search.test(typename)) {
|
||||
matched_types = [{
|
||||
ri: null,
|
||||
mtis: [typemap.get(typename)]
|
||||
}];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// at this point, we should (hopefully) have a single matched type
|
||||
// - if the matched_types array is empty, the type is not found
|
||||
// - if the matched_type array has more than one entry, the type matches types across multiple imports
|
||||
// - if the matched_type array has one entry and multiple MTIs, the type matches multiple types in a single import
|
||||
return matched_types.reduce((mtis,mt) => [...mtis, ...mt.mtis] , []);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an array of type name strings to resolved types
|
||||
* @param {string[]} types
|
||||
* @param {string} fully_qualified_scope the JRE name of the type scope we are resolving in
|
||||
* @param {ResolvedImport[]} resolved_imports the list of resolved imports (and types associated with them)
|
||||
* @param {Map<string,MTI.Type>} typemap
|
||||
*/
|
||||
function resolveTypes(types, fully_qualified_scope, resolved_imports, typemap) {
|
||||
return types.map(typename => resolveType(typename, fully_qualified_scope, resolved_imports, typemap));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an array of TypeIdent instances to resolved types
|
||||
* @param {import('./parsetypes/typeident')[]} types
|
||||
* @param {string} fully_qualified_scope the JRE name of the type scope we are resolving in
|
||||
* @param {ResolvedImport[]} resolved_imports the list of resolved imports (and types associated with them)
|
||||
* @param {Map<string,MTI.Type>} typemap
|
||||
*/
|
||||
function resolveTypeIdents(types, fully_qualified_scope, resolved_imports, typemap) {
|
||||
const names = types.map(typeident =>
|
||||
typeident.tokens.map(token => token.text).join('')
|
||||
);
|
||||
return resolveTypes(names, fully_qualified_scope, resolved_imports, typemap);
|
||||
}
|
||||
|
||||
|
||||
module.exports = {
|
||||
parse_type,
|
||||
resolveTypes,
|
||||
resolveTypeIdents,
|
||||
ResolvedType,
|
||||
}
|
||||
Reference in New Issue
Block a user