allow multiple source files to be used in parsing

This commit is contained in:
Dave Holoway
2020-06-26 10:32:32 +01:00
parent 175ce1d644
commit 7038bf7090
4 changed files with 97 additions and 59 deletions

View File

@@ -193,50 +193,59 @@ function extractSourceTypes(tokens, typemap) {
} }
/** /**
* @param {string} source * @param {{uri:string, content:string, version:number}[]} docs
* @param {SourceUnit[]} cached_units
* @param {Map<string,CEIType>} typemap * @param {Map<string,CEIType>} typemap
* @returns {SourceUnit[]}
*/ */
function parse(source, typemap) { function parse(docs, cached_units, typemap) {
const unit = new SourceUnit();
/** @type {ParseProblem[]} */ const timers = new Set();
let problems = [];
let tokens, timers = new Set();
const time = name => (timers.add(name), console.time(name)); const time = name => (timers.add(name), console.time(name));
const timeEnd = name => (timers.delete(name), console.timeEnd(name)); const timeEnd = name => (timers.delete(name), console.timeEnd(name));
try {
time('tokenize'); time('tokenize');
tokens = new TokenList(unit.tokens = tokenize(source)); const sources = docs.reduce((arr, doc) => {
problems = tokens.problems; try {
const unit = new SourceUnit();
unit.uri = doc.uri;
const tokens = new TokenList(unit.tokens = tokenize(doc.content));
arr.push({ unit, tokens });
} catch(err) {
}
return arr;
}, [])
timeEnd('tokenize'); timeEnd('tokenize');
// add the cached types to the type map
cached_units.forEach(unit => {
unit.types.forEach(t => typemap.set(t.shortSignature, t));
})
// in order to resolve types as we parse, we must extract the set of source types first // in order to resolve types as we parse, we must extract the set of source types first
const source_types = extractSourceTypes(tokens, typemap); sources.forEach(source => {
const source_types = extractSourceTypes(source.tokens, typemap);
// add them to the type map // add them to the type map
source_types.forEach(t => typemap.set(t.shortSignature, t)); source_types.forEach(t => typemap.set(t.shortSignature, t));
})
// parse all the tokenized sources
time('parse'); time('parse');
parseUnit(tokens, unit, typemap); sources.forEach(source => {
try {
parseUnit(source.tokens, source.unit, typemap);
// once all the types have been parsed, resolve any field initialisers
// const ri = new ResolveInfo(typemap, tokens.problems);
// unit.types.forEach(t => {
// t.fields.filter(f => f.init).forEach(f => checkAssignment(ri, f.type, f.init));
// });
} catch (err) {
addproblem(source.tokens, ParseProblem.Error(source.tokens.current, `Parse failed: ${err.message}`));
}
});
timeEnd('parse'); timeEnd('parse');
// once all the types have been parsed, resolve any field initialisers return sources.map(s => s.unit);
const ri = new ResolveInfo(typemap, tokens.problems);
unit.types.forEach(t => {
t.fields.filter(f => f.init).forEach(f => checkAssignment(ri, f.type, f.init));
});
} catch(err) {
timers.forEach(timeEnd);
if (tokens && tokens.current) {
addproblem(tokens, ParseProblem.Error(tokens.current, `Parse failed: ${err.message}`));
} else {
console.log(`Parse failed: ${err.message}`);
}
}
return {
unit,
problems,
}
} }
/** /**

View File

@@ -555,6 +555,8 @@ class SourceImport {
} }
class SourceUnit { class SourceUnit {
/** @type {string} */
uri = '';
/** @type {Token[]} */ /** @type {Token[]} */
tokens = []; tokens = [];
/** @type {SourcePackage} */ /** @type {SourcePackage} */
@@ -614,6 +616,13 @@ class SourceUnit {
method, method,
}; };
} }
/**
* Return the name of the package this unit belongs to
*/
get packageName() {
return (this.package_ && this.package_.name) || '';
}
} }
class SourceArrayType extends ArrayType { class SourceArrayType extends ArrayType {

View File

@@ -5,22 +5,22 @@ const { parseBody } = require('./body-parser3');
/** /**
* @param {SourceUnit} unit * @param {SourceUnit} unit
* @param {Map<string, CEIType>} androidLibrary * @param {Map<string, CEIType>} typemap
*/ */
function parseMethodBodies(unit, androidLibrary) { function parseMethodBodies(unit, typemap) {
const resolved_types = [ const resolved_types = [
...resolveImports(androidLibrary, [], [], null).resolved, ...resolveImports(typemap, [], [], unit.packageName).resolved,
...unit.imports.filter(i => i.resolved).map(i => i.resolved), ...unit.imports.filter(i => i.resolved).map(i => i.resolved),
] ]
unit.types.forEach(t => { unit.types.forEach(t => {
t.initers.forEach(i => { t.initers.forEach(i => {
i.parsed = parseBody(i, resolved_types, androidLibrary); i.parsed = parseBody(i, resolved_types, typemap);
}) })
t.constructors.forEach(c => { t.constructors.forEach(c => {
c.parsed = parseBody(c, resolved_types, androidLibrary); c.parsed = parseBody(c, resolved_types, typemap);
}) })
t.sourceMethods.forEach(m => { t.sourceMethods.forEach(m => {
m.parsed = parseBody(m, resolved_types, androidLibrary); m.parsed = parseBody(m, resolved_types, typemap);
}) })
}) })
} }

View File

@@ -111,23 +111,34 @@ const liveParsers = new Map();
/** /**
* *
* @param {string} uri * @param {string[]} uris
*/ */
function reparse(uri) { function reparse(uris) {
if (androidLibrary instanceof Promise) { if (androidLibrary instanceof Promise) {
return; return;
} }
const doc = liveParsers.get(uri); const cached_units = [], parsers = [];
if (!doc) { for (let docinfo of liveParsers.values()) {
if (uris.includes(docinfo.uri)) {
// make a copy of the content in case doc changes while we're parsing
parsers.push({uri: docinfo.uri, content: docinfo.content, version: docinfo.version});
} else if (docinfo.parsed) {
cached_units.push(docinfo.parsed.unit);
}
}
if (!parsers.length) {
return; return;
} }
const { content, version } = doc;
const typemap = new Map(androidLibrary); const typemap = new Map(androidLibrary);
const result = parse(content, typemap); const units = parse(parsers, cached_units, typemap);
if (result) { units.forEach(unit => {
parseMethodBodies(result.unit, typemap); const parser = parsers.find(p => p.uri === unit.uri);
} if (!parser) return;
doc.parsed = new ParsedInfo(uri, content, version, typemap, result.unit, result.problems); const doc = liveParsers.get(unit.uri);
if (!doc) return;
doc.parsed = new ParsedInfo(doc.uri, parser.content, parser.version, typemap, unit, []);
parseMethodBodies(unit, typemap);
});
} }
// Create a simple text document manager. The text document manager // Create a simple text document manager. The text document manager
@@ -144,7 +155,7 @@ let documents = new TextDocuments({
// tokenize the file content and build the initial parse state // tokenize the file content and build the initial parse state
connection.console.log(`create parse ${version}`); connection.console.log(`create parse ${version}`);
liveParsers.set(uri, new JavaDocInfo(uri, content, version)); liveParsers.set(uri, new JavaDocInfo(uri, content, version));
reparse(uri); reparse([uri]);
return { uri }; return { uri };
}, },
/** /**
@@ -173,7 +184,7 @@ let documents = new TextDocuments({
docinfo.content = `${docinfo.content.slice(0, start_index)}${change.text}${docinfo.content.slice(end_index)}`; docinfo.content = `${docinfo.content.slice(0, start_index)}${change.text}${docinfo.content.slice(end_index)}`;
} }
}); });
reparse(document.uri); reparse([document.uri]);
return document; return document;
}, },
}); });
@@ -650,7 +661,7 @@ connection.onCompletion(
return []; return [];
} }
const parsed = docinfo.parsed; const parsed = docinfo.parsed;
const lib = lastCompletionTypeMap = (parsed && parsed.typemap) || androidLibrary; lastCompletionTypeMap = (parsed && parsed.typemap) || androidLibrary;
let locals = [], sourceTypes = [], show_instances = false; let locals = [], sourceTypes = [], show_instances = false;
if (parsed.unit) { if (parsed.unit) {
const index = indexAt(_textDocumentPosition.position, parsed.content); const index = indexAt(_textDocumentPosition.position, parsed.content);
@@ -680,17 +691,26 @@ connection.onCompletion(
sortText: p.name, sortText: p.name,
})) }))
} }
sourceTypes = parsed.unit.types.map(t => ({
label: t.dottedTypeName,
kind: typeKindMap[t.typeKind],
data: { type:t.shortSignature },
sortText: t.dottedTypeName,
}))
} }
if (!defaultCompletionTypes) { if (!defaultCompletionTypes) {
initDefaultCompletionTypes(androidLibrary); initDefaultCompletionTypes(androidLibrary);
} }
liveParsers.forEach(doc => {
if (!doc.parsed) {
return;
}
doc.parsed.unit.types.forEach(
t => sourceTypes.push({
label: t.dottedTypeName,
kind: typeKindMap[t.typeKind],
data: { type:t.shortSignature },
sortText: t.dottedTypeName,
})
)
});
return [ return [
...locals, ...locals,
...(show_instances ? defaultCompletionTypes.instances : []), ...(show_instances ? defaultCompletionTypes.instances : []),