diff --git a/langserver/java/body-parser3.js b/langserver/java/body-parser3.js index b7721b7..c845fc4 100644 --- a/langserver/java/body-parser3.js +++ b/langserver/java/body-parser3.js @@ -193,50 +193,59 @@ function extractSourceTypes(tokens, typemap) { } /** - * @param {string} source + * @param {{uri:string, content:string, version:number}[]} docs + * @param {SourceUnit[]} cached_units * @param {Map} typemap + * @returns {SourceUnit[]} */ -function parse(source, typemap) { - const unit = new SourceUnit(); - /** @type {ParseProblem[]} */ - let problems = []; - let tokens, timers = new Set(); +function parse(docs, cached_units, typemap) { + + const timers = new Set(); const time = name => (timers.add(name), console.time(name)); const timeEnd = name => (timers.delete(name), console.timeEnd(name)); - try { - time('tokenize'); - tokens = new TokenList(unit.tokens = tokenize(source)); - problems = tokens.problems; - timeEnd('tokenize'); - // in order to resolve types as we parse, we must extract the set of source types first - const source_types = extractSourceTypes(tokens, typemap); + time('tokenize'); + const sources = docs.reduce((arr, doc) => { + try { + const unit = new SourceUnit(); + unit.uri = doc.uri; + const tokens = new TokenList(unit.tokens = tokenize(doc.content)); + arr.push({ unit, tokens }); + } catch(err) { + } + return arr; + }, []) + timeEnd('tokenize'); + + // add the cached types to the type map + cached_units.forEach(unit => { + unit.types.forEach(t => typemap.set(t.shortSignature, t)); + }) + + // in order to resolve types as we parse, we must extract the set of source types first + sources.forEach(source => { + const source_types = extractSourceTypes(source.tokens, typemap); // add them to the type map source_types.forEach(t => typemap.set(t.shortSignature, t)); + }) - time('parse'); - parseUnit(tokens, unit, typemap); - timeEnd('parse'); - - // once all the types have been parsed, resolve any field initialisers - const ri = new ResolveInfo(typemap, tokens.problems); - unit.types.forEach(t => { - t.fields.filter(f => f.init).forEach(f => checkAssignment(ri, f.type, f.init)); - }); - - } catch(err) { - timers.forEach(timeEnd); - if (tokens && tokens.current) { - addproblem(tokens, ParseProblem.Error(tokens.current, `Parse failed: ${err.message}`)); - } else { - console.log(`Parse failed: ${err.message}`); + // parse all the tokenized sources + time('parse'); + sources.forEach(source => { + try { + parseUnit(source.tokens, source.unit, typemap); + // once all the types have been parsed, resolve any field initialisers + // const ri = new ResolveInfo(typemap, tokens.problems); + // unit.types.forEach(t => { + // t.fields.filter(f => f.init).forEach(f => checkAssignment(ri, f.type, f.init)); + // }); + } catch (err) { + addproblem(source.tokens, ParseProblem.Error(source.tokens.current, `Parse failed: ${err.message}`)); } - } + }); + timeEnd('parse'); - return { - unit, - problems, - } + return sources.map(s => s.unit); } /** diff --git a/langserver/java/source-types.js b/langserver/java/source-types.js index bc3d609..7d4720d 100644 --- a/langserver/java/source-types.js +++ b/langserver/java/source-types.js @@ -555,6 +555,8 @@ class SourceImport { } class SourceUnit { + /** @type {string} */ + uri = ''; /** @type {Token[]} */ tokens = []; /** @type {SourcePackage} */ @@ -614,6 +616,13 @@ class SourceUnit { method, }; } + + /** + * Return the name of the package this unit belongs to + */ + get packageName() { + return (this.package_ && this.package_.name) || ''; + } } class SourceArrayType extends ArrayType { diff --git a/langserver/java/validater.js b/langserver/java/validater.js index 636df08..dc70294 100644 --- a/langserver/java/validater.js +++ b/langserver/java/validater.js @@ -5,22 +5,22 @@ const { parseBody } = require('./body-parser3'); /** * @param {SourceUnit} unit - * @param {Map} androidLibrary + * @param {Map} typemap */ -function parseMethodBodies(unit, androidLibrary) { +function parseMethodBodies(unit, typemap) { const resolved_types = [ - ...resolveImports(androidLibrary, [], [], null).resolved, + ...resolveImports(typemap, [], [], unit.packageName).resolved, ...unit.imports.filter(i => i.resolved).map(i => i.resolved), ] unit.types.forEach(t => { t.initers.forEach(i => { - i.parsed = parseBody(i, resolved_types, androidLibrary); + i.parsed = parseBody(i, resolved_types, typemap); }) t.constructors.forEach(c => { - c.parsed = parseBody(c, resolved_types, androidLibrary); + c.parsed = parseBody(c, resolved_types, typemap); }) t.sourceMethods.forEach(m => { - m.parsed = parseBody(m, resolved_types, androidLibrary); + m.parsed = parseBody(m, resolved_types, typemap); }) }) } diff --git a/langserver/server.js b/langserver/server.js index 7557dfe..c86147b 100644 --- a/langserver/server.js +++ b/langserver/server.js @@ -111,23 +111,34 @@ const liveParsers = new Map(); /** * - * @param {string} uri + * @param {string[]} uris */ -function reparse(uri) { +function reparse(uris) { if (androidLibrary instanceof Promise) { return; } - const doc = liveParsers.get(uri); - if (!doc) { + const cached_units = [], parsers = []; + for (let docinfo of liveParsers.values()) { + if (uris.includes(docinfo.uri)) { + // make a copy of the content in case doc changes while we're parsing + parsers.push({uri: docinfo.uri, content: docinfo.content, version: docinfo.version}); + } else if (docinfo.parsed) { + cached_units.push(docinfo.parsed.unit); + } + } + if (!parsers.length) { return; } - const { content, version } = doc; const typemap = new Map(androidLibrary); - const result = parse(content, typemap); - if (result) { - parseMethodBodies(result.unit, typemap); - } - doc.parsed = new ParsedInfo(uri, content, version, typemap, result.unit, result.problems); + const units = parse(parsers, cached_units, typemap); + units.forEach(unit => { + const parser = parsers.find(p => p.uri === unit.uri); + if (!parser) return; + const doc = liveParsers.get(unit.uri); + if (!doc) return; + doc.parsed = new ParsedInfo(doc.uri, parser.content, parser.version, typemap, unit, []); + parseMethodBodies(unit, typemap); + }); } // Create a simple text document manager. The text document manager @@ -144,7 +155,7 @@ let documents = new TextDocuments({ // tokenize the file content and build the initial parse state connection.console.log(`create parse ${version}`); liveParsers.set(uri, new JavaDocInfo(uri, content, version)); - reparse(uri); + reparse([uri]); return { uri }; }, /** @@ -173,7 +184,7 @@ let documents = new TextDocuments({ docinfo.content = `${docinfo.content.slice(0, start_index)}${change.text}${docinfo.content.slice(end_index)}`; } }); - reparse(document.uri); + reparse([document.uri]); return document; }, }); @@ -650,7 +661,7 @@ connection.onCompletion( return []; } const parsed = docinfo.parsed; - const lib = lastCompletionTypeMap = (parsed && parsed.typemap) || androidLibrary; + lastCompletionTypeMap = (parsed && parsed.typemap) || androidLibrary; let locals = [], sourceTypes = [], show_instances = false; if (parsed.unit) { const index = indexAt(_textDocumentPosition.position, parsed.content); @@ -680,17 +691,26 @@ connection.onCompletion( sortText: p.name, })) } - sourceTypes = parsed.unit.types.map(t => ({ - label: t.dottedTypeName, - kind: typeKindMap[t.typeKind], - data: { type:t.shortSignature }, - sortText: t.dottedTypeName, - })) } if (!defaultCompletionTypes) { initDefaultCompletionTypes(androidLibrary); } + + liveParsers.forEach(doc => { + if (!doc.parsed) { + return; + } + doc.parsed.unit.types.forEach( + t => sourceTypes.push({ + label: t.dottedTypeName, + kind: typeKindMap[t.typeKind], + data: { type:t.shortSignature }, + sortText: t.dottedTypeName, + }) + ) + }); + return [ ...locals, ...(show_instances ? defaultCompletionTypes.instances : []),