Remove sources for all packages except SpaServices.Extensions, since that's all this OOB release branch needs to build

This commit is contained in:
Steve Sanderson
2017-11-16 10:19:04 +00:00
parent f2175e6c23
commit dcbe4b1c33
116 changed files with 0 additions and 9534 deletions

View File

@@ -8,12 +8,6 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{27304DDE-AFB
src\Directory.Build.props = src\Directory.Build.props src\Directory.Build.props = src\Directory.Build.props
EndProjectSection EndProjectSection
EndProject EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.NodeServices", "src\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj", "{66B77203-1469-41DF-92F2-2BE6900BD36F}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.NodeServices.Sockets", "src\Microsoft.AspNetCore.NodeServices.Sockets\Microsoft.AspNetCore.NodeServices.Sockets.csproj", "{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.SpaServices", "src\Microsoft.AspNetCore.SpaServices\Microsoft.AspNetCore.SpaServices.csproj", "{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "misc", "misc", "{99EAF1FE-22C8-4526-BE78-74B24125D37F}" Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "misc", "misc", "{99EAF1FE-22C8-4526-BE78-74B24125D37F}"
ProjectSection(SolutionItems) = preProject ProjectSection(SolutionItems) = preProject
.gitignore = .gitignore .gitignore = .gitignore
@@ -35,18 +29,6 @@ Global
Release|Any CPU = Release|Any CPU Release|Any CPU = Release|Any CPU
EndGlobalSection EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution GlobalSection(ProjectConfigurationPlatforms) = postSolution
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Release|Any CPU.Build.0 = Release|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Release|Any CPU.Build.0 = Release|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Debug|Any CPU.Build.0 = Debug|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Release|Any CPU.ActiveCfg = Release|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Release|Any CPU.Build.0 = Release|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.Build.0 = Debug|Any CPU {D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Release|Any CPU.ActiveCfg = Release|Any CPU {D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Release|Any CPU.ActiveCfg = Release|Any CPU
@@ -56,9 +38,6 @@ Global
HideSolutionNode = FALSE HideSolutionNode = FALSE
EndGlobalSection EndGlobalSection
GlobalSection(NestedProjects) = preSolution GlobalSection(NestedProjects) = preSolution
{66B77203-1469-41DF-92F2-2BE6900BD36F} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
{D40BD1C4-6A6F-4213-8535-1057F3EB3400} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C} {D40BD1C4-6A6F-4213-8535-1057F3EB3400} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
EndGlobalSection EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution GlobalSection(ExtensibilityGlobals) = postSolution

View File

@@ -1,3 +0,0 @@
/bin/
/node_modules/
yarn.lock

View File

@@ -1,524 +0,0 @@
(function(e, a) { for(var i in a) e[i] = a[i]; }(exports, /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(1);
/***/ },
/* 1 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
__webpack_require__(2);
var net = __webpack_require__(3);
var path = __webpack_require__(4);
var readline = __webpack_require__(5);
var ArgsUtil_1 = __webpack_require__(6);
var ExitWhenParentExits_1 = __webpack_require__(7);
var virtualConnectionServer = __webpack_require__(8);
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
var dynamicRequire = eval('require');
// Signal to the .NET side when we're ready to accept invocations
var server = net.createServer().on('listening', function () {
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
// Each virtual connection represents a separate invocation
virtualConnectionServer.createInterface(server).on('connection', function (connection) {
readline.createInterface(connection, null).on('line', function (line) {
try {
// Get a reference to the function to invoke
var invocation = JSON.parse(line);
var invokedModule = dynamicRequire(path.resolve(process.cwd(), invocation.moduleName));
var invokedFunction = invocation.exportedFunctionName ? invokedModule[invocation.exportedFunctionName] : invokedModule;
// Prepare a callback for accepting non-streamed JSON responses
var hasInvokedCallback_1 = false;
var invocationCallback = function (errorValue, successValue) {
if (hasInvokedCallback_1) {
throw new Error('Cannot supply more than one result. The callback has already been invoked,'
+ ' or the result stream has already been accessed');
}
hasInvokedCallback_1 = true;
connection.end(JSON.stringify({
result: successValue,
errorMessage: errorValue && (errorValue.message || errorValue),
errorDetails: errorValue && (errorValue.stack || null)
}));
};
// Also support streamed binary responses
Object.defineProperty(invocationCallback, 'stream', {
enumerable: true,
get: function () {
hasInvokedCallback_1 = true;
return connection;
}
});
// Actually invoke it, passing through any supplied args
invokedFunction.apply(null, [invocationCallback].concat(invocation.args));
}
catch (ex) {
connection.end(JSON.stringify({
errorMessage: ex.message,
errorDetails: ex.stack
}));
}
});
});
// Begin listening now. The underlying transport varies according to the runtime platform.
// On Windows it's Named Pipes; on Linux/OSX it's Domain Sockets.
var useWindowsNamedPipes = /^win/.test(process.platform);
var parsedArgs = ArgsUtil_1.parseArgs(process.argv);
var listenAddress = (useWindowsNamedPipes ? '\\\\.\\pipe\\' : '/tmp/') + parsedArgs.listenAddress;
server.listen(listenAddress);
ExitWhenParentExits_1.exitWhenParentExits(parseInt(parsedArgs.parentPid));
/***/ },
/* 2 */
/***/ function(module, exports) {
// When Node writes to stdout/strerr, we capture that and convert the lines into calls on the
// active .NET ILogger. But by default, stdout/stderr don't have any way of distinguishing
// linebreaks inside log messages from the linebreaks that delimit separate log messages,
// so multiline strings will end up being written to the ILogger as multiple independent
// log messages. This makes them very hard to make sense of, especially when they represent
// something like stack traces.
//
// To fix this, we intercept stdout/stderr writes, and replace internal linebreaks with a
// marker token. When .NET receives the lines, it converts the marker tokens back to regular
// linebreaks within the logged messages.
//
// Note that it's better to do the interception at the stdout/stderr level, rather than at
// the console.log/console.error (etc.) level, because this takes place after any native
// message formatting has taken place (e.g., inserting values for % placeholders).
var findInternalNewlinesRegex = /\n(?!$)/g;
var encodedNewline = '__ns_newline__';
encodeNewlinesWrittenToStream(process.stdout);
encodeNewlinesWrittenToStream(process.stderr);
function encodeNewlinesWrittenToStream(outputStream) {
var origWriteFunction = outputStream.write;
outputStream.write = function (value) {
// Only interfere with the write if it's definitely a string
if (typeof value === 'string') {
var argsClone = Array.prototype.slice.call(arguments, 0);
argsClone[0] = encodeNewlinesInString(value);
origWriteFunction.apply(this, argsClone);
}
else {
origWriteFunction.apply(this, arguments);
}
};
}
function encodeNewlinesInString(str) {
return str.replace(findInternalNewlinesRegex, encodedNewline);
}
/***/ },
/* 3 */
/***/ function(module, exports) {
module.exports = require("net");
/***/ },
/* 4 */
/***/ function(module, exports) {
module.exports = require("path");
/***/ },
/* 5 */
/***/ function(module, exports) {
module.exports = require("readline");
/***/ },
/* 6 */
/***/ function(module, exports) {
"use strict";
function parseArgs(args) {
// Very simplistic parsing which is sufficient for the cases needed. We don't want to bring in any external
// dependencies (such as an args-parsing library) to this file.
var result = {};
var currentKey = null;
args.forEach(function (arg) {
if (arg.indexOf('--') === 0) {
var argName = arg.substring(2);
result[argName] = undefined;
currentKey = argName;
}
else if (currentKey) {
result[currentKey] = arg;
currentKey = null;
}
});
return result;
}
exports.parseArgs = parseArgs;
/***/ },
/* 7 */
/***/ function(module, exports) {
/*
In general, we want the Node child processes to be terminated as soon as the parent .NET processes exit,
because we have no further use for them. If the .NET process shuts down gracefully, it will run its
finalizers, one of which (in OutOfProcessNodeInstance.cs) will kill its associated Node process immediately.
But if the .NET process is terminated forcefully (e.g., on Linux/OSX with 'kill -9'), then it won't have
any opportunity to shut down its child processes, and by default they will keep running. In this case, it's
up to the child process to detect this has happened and terminate itself.
There are many possible approaches to detecting when a parent process has exited, most of which behave
differently between Windows and Linux/OS X:
- On Windows, the parent process can mark its child as being a 'job' that should auto-terminate when
the parent does (http://stackoverflow.com/a/4657392). Not cross-platform.
- The child Node process can get a callback when the parent disconnects (process.on('disconnect', ...)).
But despite http://stackoverflow.com/a/16487966, no callback fires in any case I've tested (Windows / OS X).
- The child Node process can get a callback when its stdin/stdout are disconnected, as described at
http://stackoverflow.com/a/15693934. This works well on OS X, but calling stdout.resume() on Windows
causes the process to terminate prematurely.
- I don't know why, but on Windows, it's enough to invoke process.stdin.resume(). For some reason this causes
the child Node process to exit as soon as the parent one does, but I don't see this documented anywhere.
- You can poll to see if the parent process, or your stdin/stdout connection to it, is gone
- You can directly pass a parent process PID to the child, and then have the child poll to see if it's
still running (e.g., using process.kill(pid, 0), which doesn't kill it but just tests whether it exists,
as per https://nodejs.org/api/process.html#process_process_kill_pid_signal)
- Or, on each poll, you can try writing to process.stdout. If the parent has died, then this will throw.
However I don't see this documented anywhere. It would be nice if you could just poll for whether or not
process.stdout is still connected (without actually writing to it) but I haven't found any property whose
value changes until you actually try to write to it.
Of these, the only cross-platform approach that is actually documented as a valid strategy is simply polling
to check whether the parent PID is still running. So that's what we do here.
*/
"use strict";
var pollIntervalMs = 1000;
function exitWhenParentExits(parentPid) {
setInterval(function () {
if (!processExists(parentPid)) {
// Can't log anything at this point, because out stdout was connected to the parent,
// but the parent is gone.
process.exit();
}
}, pollIntervalMs);
}
exports.exitWhenParentExits = exitWhenParentExits;
function processExists(pid) {
try {
// Sending signal 0 - on all platforms - tests whether the process exists. As long as it doesn't
// throw, that means it does exist.
process.kill(pid, 0);
return true;
}
catch (ex) {
// If the reason for the error is that we don't have permission to ask about this process,
// report that as a separate problem.
if (ex.code === 'EPERM') {
throw new Error("Attempted to check whether process " + pid + " was running, but got a permissions error.");
}
return false;
}
}
/***/ },
/* 8 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var events_1 = __webpack_require__(9);
var VirtualConnection_1 = __webpack_require__(10);
// Keep this in sync with the equivalent constant in the .NET code. Both sides split up their transmissions into frames with this max length,
// and both will reject longer frames.
var MaxFrameBodyLength = 16 * 1024;
/**
* Accepts connections to a net.Server and adapts them to behave as multiplexed connections. That is, for each physical socket connection,
* we track a list of 'virtual connections' whose API is a Duplex stream. The remote clients may open and close as many virtual connections
* as they wish, reading and writing to them independently, without the overhead of establishing new physical connections each time.
*/
function createInterface(server) {
var emitter = new events_1.EventEmitter();
server.on('connection', function (socket) {
// For each physical socket connection, maintain a set of virtual connections. Issue a notification whenever
// a new virtual connections is opened.
var childSockets = new VirtualConnectionsCollection(socket, function (virtualConnection) {
emitter.emit('connection', virtualConnection);
});
});
return emitter;
}
exports.createInterface = createInterface;
/**
* Tracks the 'virtual connections' associated with a single physical socket connection.
*/
var VirtualConnectionsCollection = (function () {
function VirtualConnectionsCollection(_socket, _onVirtualConnectionCallback) {
var _this = this;
this._socket = _socket;
this._onVirtualConnectionCallback = _onVirtualConnectionCallback;
this._currentFrameHeader = null;
this._virtualConnections = {};
// If the remote end closes the physical socket, treat all the virtual connections as being closed remotely too
this._socket.on('close', function () {
Object.getOwnPropertyNames(_this._virtualConnections).forEach(function (id) {
// A 'null' frame signals that the connection was closed remotely
_this._virtualConnections[id].onReceivedData(null);
});
});
this._socket.on('readable', this._onIncomingDataAvailable.bind(this));
}
/**
* This is called whenever the underlying socket signals that it may have some data available to read. It will synchronously read as many
* message frames as it can from the underlying socket, opens virtual connections as needed, and dispatches data to them.
*/
VirtualConnectionsCollection.prototype._onIncomingDataAvailable = function () {
var exhaustedAllData = false;
while (!exhaustedAllData) {
// We might already have a pending frame header from the previous time this method ran, but if not, that's the next thing we need to read
if (this._currentFrameHeader === null) {
this._currentFrameHeader = this._readNextFrameHeader();
}
if (this._currentFrameHeader === null) {
// There's not enough data to fill a frameheader, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
}
else {
var frameBodyLength = this._currentFrameHeader.bodyLength;
var frameBodyOrNull = frameBodyLength > 0 ? this._socket.read(this._currentFrameHeader.bodyLength) : null;
if (frameBodyOrNull !== null || frameBodyLength === 0) {
// We have a complete frame header+body pair, so we can now dispatch this to a virtual connection. We set _currentFrameHeader back to null
// so that the next thing we try to read is the next frame header.
var headerCopy = this._currentFrameHeader;
this._currentFrameHeader = null;
this._onReceivedCompleteFrame(headerCopy, frameBodyOrNull);
}
else {
// There's not enough data to fill the pending frame body, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
}
}
}
};
VirtualConnectionsCollection.prototype._onReceivedCompleteFrame = function (header, bodyIfNotEmpty) {
// An incoming zero-length frame signals that there's no more data to read.
// Signal this to the Node stream APIs by pushing a 'null' chunk to it.
var virtualConnection = this._getOrOpenVirtualConnection(header);
virtualConnection.onReceivedData(header.bodyLength > 0 ? bodyIfNotEmpty : null);
};
VirtualConnectionsCollection.prototype._getOrOpenVirtualConnection = function (header) {
if (this._virtualConnections.hasOwnProperty(header.connectionIdString)) {
// It's an existing virtual connection
return this._virtualConnections[header.connectionIdString];
}
else {
// It's a new one
return this._openVirtualConnection(header);
}
};
VirtualConnectionsCollection.prototype._openVirtualConnection = function (header) {
var _this = this;
var beginWriteCallback = function (data, writeCompletedCallback) {
// Only send nonempty frames, since empty ones are a signal to close the virtual connection
if (data.length > 0) {
_this._sendFrame(header.connectionIdBinary, data, writeCompletedCallback);
}
};
var newVirtualConnection = new VirtualConnection_1.VirtualConnection(beginWriteCallback);
newVirtualConnection.on('end', function () {
// The virtual connection was closed remotely. Clean up locally.
_this._onVirtualConnectionWasClosed(header.connectionIdString);
});
newVirtualConnection.on('finish', function () {
// The virtual connection was closed locally. Clean up locally, and notify the remote that we're done.
_this._onVirtualConnectionWasClosed(header.connectionIdString);
_this._sendFrame(header.connectionIdBinary, new Buffer(0));
});
this._virtualConnections[header.connectionIdString] = newVirtualConnection;
this._onVirtualConnectionCallback(newVirtualConnection);
return newVirtualConnection;
};
/**
* Attempts to read a complete frame header, synchronously, from the underlying socket.
* If not enough data is available synchronously, returns null without consuming any data from the socket.
*/
VirtualConnectionsCollection.prototype._readNextFrameHeader = function () {
var headerBuf = this._socket.read(12);
if (headerBuf !== null) {
// We have enough data synchronously
var connectionIdBinary = headerBuf.slice(0, 8);
var connectionIdString = connectionIdBinary.toString('hex');
var bodyLength = headerBuf.readInt32LE(8);
if (bodyLength < 0 || bodyLength > MaxFrameBodyLength) {
// Throwing here is going to bring down the whole process, so this cannot be allowed to happen in real use.
// But it won't happen in real use, because this is only used with our .NET client, which doesn't violate this rule.
throw new Error('Illegal frame body length: ' + bodyLength);
}
return { connectionIdBinary: connectionIdBinary, connectionIdString: connectionIdString, bodyLength: bodyLength };
}
else {
// Not enough bytes are available synchronously, so none were consumed
return null;
}
};
VirtualConnectionsCollection.prototype._sendFrame = function (connectionIdBinary, data, callback) {
// For all sends other than the last one, only invoke the callback if it failed.
// Also, only invoke the callback at most once.
var hasInvokedCallback = false;
var finalCallback = callback && (function (error) {
if (!hasInvokedCallback) {
hasInvokedCallback = true;
callback(error);
}
});
var notFinalCallback = callback && (function (error) {
if (error) {
finalCallback(error);
}
});
// The amount of data we're writing might exceed MaxFrameBodyLength, so split into frames as needed.
// Note that we always send at least one frame, even if it's empty (because that's the close-virtual-connection signal).
// If needed, this could be changed to send frames asynchronously, so that large sends could proceed in parallel
// (though that would involve making a clone of 'data', to avoid the risk of it being mutated during the send).
var bytesSent = 0;
do {
var nextFrameBodyLength = Math.min(MaxFrameBodyLength, data.length - bytesSent);
var isFinalChunk = (bytesSent + nextFrameBodyLength) === data.length;
this._socket.write(connectionIdBinary, notFinalCallback);
this._sendInt32LE(nextFrameBodyLength, notFinalCallback);
this._socket.write(data.slice(bytesSent, bytesSent + nextFrameBodyLength), isFinalChunk ? finalCallback : notFinalCallback);
bytesSent += nextFrameBodyLength;
} while (bytesSent < data.length);
};
/**
* Sends a number serialized in the correct format for .NET to receive as a System.Int32
*/
VirtualConnectionsCollection.prototype._sendInt32LE = function (value, callback) {
var buf = new Buffer(4);
buf.writeInt32LE(value, 0);
this._socket.write(buf, callback);
};
VirtualConnectionsCollection.prototype._onVirtualConnectionWasClosed = function (id) {
if (this._virtualConnections.hasOwnProperty(id)) {
delete this._virtualConnections[id];
}
};
return VirtualConnectionsCollection;
}());
/***/ },
/* 9 */
/***/ function(module, exports) {
module.exports = require("events");
/***/ },
/* 10 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var stream_1 = __webpack_require__(11);
/**
* Represents a virtual connection. Multiple virtual connections may be multiplexed over a single physical socket connection.
*/
var VirtualConnection = (function (_super) {
__extends(VirtualConnection, _super);
function VirtualConnection(_beginWriteCallback) {
var _this = _super.call(this) || this;
_this._beginWriteCallback = _beginWriteCallback;
_this._flowing = false;
_this._receivedDataQueue = [];
return _this;
}
VirtualConnection.prototype._read = function () {
this._flowing = true;
// Keep pushing data until we run out, or the underlying framework asks us to stop.
// When we finish, the 'flowing' state is detemined by whether more data is still being requested.
while (this._flowing && this._receivedDataQueue.length > 0) {
var nextChunk = this._receivedDataQueue.shift();
this._flowing = this.push(nextChunk);
}
};
VirtualConnection.prototype._write = function (chunk, encodingIfString, callback) {
if (typeof chunk === 'string') {
chunk = new Buffer(chunk, encodingIfString);
}
this._beginWriteCallback(chunk, callback);
};
VirtualConnection.prototype.onReceivedData = function (dataOrNullToSignalEOF) {
if (this._flowing) {
this._flowing = this.push(dataOrNullToSignalEOF);
}
else {
this._receivedDataQueue.push(dataOrNullToSignalEOF);
}
};
return VirtualConnection;
}(stream_1.Duplex));
exports.VirtualConnection = VirtualConnection;
/***/ },
/* 11 */
/***/ function(module, exports) {
module.exports = require("stream");
/***/ }
/******/ ])));

View File

@@ -1,26 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Description>Socket-based RPC for Microsoft.AspNetCore.NodeServices.</Description>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<None Remove="node_modules\**\*" />
<EmbeddedResource Include="Content\**\*" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="System.Threading.Tasks.Dataflow" Version="$(SystemThreadingTasksDataflowPackageVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish" Condition=" '$(IsCrossTargetingBuild)' != 'true' ">
<Exec Command="npm install" />
<Exec Command="node node_modules/webpack/bin/webpack.js" />
</Target>
</Project>

View File

@@ -1,40 +0,0 @@
using System.IO;
using System.IO.Pipes;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
internal class NamedPipeConnection : StreamConnection
{
private bool _disposedValue = false;
private NamedPipeClientStream _namedPipeClientStream;
#pragma warning disable 1998 // Because in the NET451 code path, there's nothing to await
public override async Task<Stream> Open(string address)
{
_namedPipeClientStream = new NamedPipeClientStream(
".",
address,
PipeDirection.InOut,
PipeOptions.Asynchronous);
await _namedPipeClientStream.ConnectAsync().ConfigureAwait(false);
return _namedPipeClientStream;
}
#pragma warning restore 1998
public override void Dispose()
{
if (!_disposedValue)
{
if (_namedPipeClientStream != null)
{
_namedPipeClientStream.Dispose();
}
_disposedValue = true;
}
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using System.IO;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
internal abstract class StreamConnection : IDisposable
{
public abstract Task<Stream> Open(string address);
public abstract void Dispose();
public static StreamConnection Create()
{
var useNamedPipes = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows);
if (useNamedPipes)
{
return new NamedPipeConnection();
}
else
{
return new UnixDomainSocketConnection();
}
}
}
}

View File

@@ -1,40 +0,0 @@
using System.IO;
using System.Net.Sockets;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
internal class UnixDomainSocketConnection : StreamConnection
{
private bool _disposedValue = false;
private NetworkStream _networkStream;
private Socket _socket;
public override async Task<Stream> Open(string address)
{
var endPoint = new UnixDomainSocketEndPoint("/tmp/" + address);
_socket = new Socket(endPoint.AddressFamily, SocketType.Stream, ProtocolType.Unspecified);
await _socket.ConnectAsync(endPoint).ConfigureAwait(false);
_networkStream = new NetworkStream(_socket);
return _networkStream;
}
public override void Dispose()
{
if (!_disposedValue)
{
if (_networkStream != null)
{
_networkStream.Dispose();
}
if (_socket != null)
{
_socket.Dispose();
}
_disposedValue = true;
}
}
}
}

View File

@@ -1,86 +0,0 @@
using System;
using System.Net;
using System.Net.Sockets;
using System.Text;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
// From System.IO.Pipes/src/System/Net/Sockets/UnixDomainSocketEndPoint.cs (an internal class in System.IO.Pipes)
internal sealed class UnixDomainSocketEndPoint : EndPoint
{
private const AddressFamily EndPointAddressFamily = AddressFamily.Unix;
private static readonly Encoding s_pathEncoding = Encoding.UTF8;
private static readonly int s_nativePathOffset = 2; // = offsetof(struct sockaddr_un, sun_path). It's the same on Linux and OSX
private static readonly int s_nativePathLength = 91; // sockaddr_un.sun_path at http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/sys_un.h.html, -1 for terminator
private static readonly int s_nativeAddressSize = s_nativePathOffset + s_nativePathLength;
private readonly string _path;
private readonly byte[] _encodedPath;
public UnixDomainSocketEndPoint(string path)
{
if (path == null)
{
throw new ArgumentNullException(nameof(path));
}
_path = path;
_encodedPath = s_pathEncoding.GetBytes(_path);
if (path.Length == 0 || _encodedPath.Length > s_nativePathLength)
{
throw new ArgumentOutOfRangeException(nameof(path));
}
}
internal UnixDomainSocketEndPoint(SocketAddress socketAddress)
{
if (socketAddress == null)
{
throw new ArgumentNullException(nameof(socketAddress));
}
if (socketAddress.Family != EndPointAddressFamily ||
socketAddress.Size > s_nativeAddressSize)
{
throw new ArgumentOutOfRangeException(nameof(socketAddress));
}
if (socketAddress.Size > s_nativePathOffset)
{
_encodedPath = new byte[socketAddress.Size - s_nativePathOffset];
for (int i = 0; i < _encodedPath.Length; i++)
{
_encodedPath[i] = socketAddress[s_nativePathOffset + i];
}
_path = s_pathEncoding.GetString(_encodedPath, 0, _encodedPath.Length);
}
else
{
_encodedPath = Array.Empty<byte>();
_path = string.Empty;
}
}
public override SocketAddress Serialize()
{
var result = new SocketAddress(AddressFamily.Unix, s_nativeAddressSize);
for (int index = 0; index < _encodedPath.Length; index++)
{
result[s_nativePathOffset + index] = _encodedPath[index];
}
result[s_nativePathOffset + _encodedPath.Length] = 0; // path must be null-terminated
return result;
}
public override EndPoint Create(SocketAddress socketAddress) => new UnixDomainSocketEndPoint(socketAddress);
public override AddressFamily AddressFamily => EndPointAddressFamily;
public override string ToString() => _path;
}
}

View File

@@ -1,241 +0,0 @@
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.NodeServices.HostingModels;
using Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections;
using Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Microsoft.AspNetCore.NodeServices.Sockets
{
/// <summary>
/// A specialisation of the OutOfProcessNodeInstance base class that uses a lightweight binary streaming protocol
/// to perform RPC invocations. The physical transport is Named Pipes on Windows, or Domain Sockets on Linux/Mac.
/// For details on the binary streaming protocol, see
/// Microsoft.AspNetCore.NodeServices.HostingModels.VirtualConnections.VirtualConnectionClient.
/// The advantage versus using HTTP for RPC is that this is faster (not surprisingly - there's much less overhead
/// because we don't need most of the functionality of HTTP.
///
/// The address of the pipe/socket is selected randomly here on the .NET side and sent to the child process as a
/// command-line argument (the address space is wide enough that there's no real risk of a clash, unlike when
/// selecting TCP port numbers).
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.HostingModels.OutOfProcessNodeInstance" />
internal class SocketNodeInstance : OutOfProcessNodeInstance
{
private readonly static JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver(),
TypeNameHandling = TypeNameHandling.None
};
private readonly static int streamBufferSize = 16 * 1024;
private readonly static UTF8Encoding utf8EncodingWithoutBom = new UTF8Encoding(false);
private readonly SemaphoreSlim _connectionCreationSemaphore = new SemaphoreSlim(1);
private bool _connectionHasFailed;
private StreamConnection _physicalConnection;
private string _socketAddress;
private VirtualConnectionClient _virtualConnectionClient;
public SocketNodeInstance(NodeServicesOptions options, string socketAddress)
: base(
EmbeddedResourceReader.Read(
typeof(SocketNodeInstance),
"/Content/Node/entrypoint-socket.js"),
options.ProjectPath,
options.WatchFileExtensions,
MakeNewCommandLineOptions(socketAddress),
options.ApplicationStoppingToken,
options.NodeInstanceOutputLogger,
options.EnvironmentVariables,
options.InvocationTimeoutMilliseconds,
options.LaunchWithDebugging,
options.DebuggingPort)
{
_socketAddress = socketAddress;
}
protected override async Task<T> InvokeExportAsync<T>(NodeInvocationInfo invocationInfo, CancellationToken cancellationToken)
{
if (_connectionHasFailed)
{
// _connectionHasFailed implies a protocol-level error. The old instance is no longer of any use.
var allowConnectionDraining = false;
// This special exception type forces NodeServicesImpl to restart the Node instance
throw new NodeInvocationException(
"The SocketNodeInstance socket connection failed. See logs to identify the reason.",
details: null,
nodeInstanceUnavailable: true,
allowConnectionDraining: allowConnectionDraining);
}
if (_virtualConnectionClient == null)
{
// Although we could pass the cancellationToken into EnsureVirtualConnectionClientCreated and
// have it signal cancellations upstream, that would be a bad thing to do, because all callers
// wait for the same connection task. There's no reason why the first caller should have the
// special ability to cancel the connection process in a way that would affect subsequent
// callers. So, each caller just independently stops awaiting connection if that call is cancelled.
await ThrowOnCancellation(EnsureVirtualConnectionClientCreated(), cancellationToken);
}
// For each invocation, we open a new virtual connection. This gives an API equivalent to opening a new
// physical connection to the child process, but without the overhead of doing so, because it's really
// just multiplexed into the existing physical connection stream.
bool shouldDisposeVirtualConnection = true;
Stream virtualConnection = null;
try
{
virtualConnection = _virtualConnectionClient.OpenVirtualConnection();
// Send request
WriteJsonLine(virtualConnection, invocationInfo);
// Determine what kind of response format is expected
if (typeof(T) == typeof(Stream))
{
// Pass through streamed binary response
// It is up to the consumer to dispose this stream, so don't do so here
shouldDisposeVirtualConnection = false;
return (T)(object)virtualConnection;
}
else
{
// Parse and return non-streamed JSON response
var response = await ReadJsonAsync<RpcJsonResponse<T>>(virtualConnection, cancellationToken);
if (response.ErrorMessage != null)
{
throw new NodeInvocationException(response.ErrorMessage, response.ErrorDetails);
}
return response.Result;
}
}
finally
{
if (shouldDisposeVirtualConnection)
{
virtualConnection.Dispose();
}
}
}
private async Task EnsureVirtualConnectionClientCreated()
{
// Asynchronous equivalent to a 'lock(...) { ... }'
await _connectionCreationSemaphore.WaitAsync();
try
{
if (_virtualConnectionClient == null)
{
_physicalConnection = StreamConnection.Create();
var connection = await _physicalConnection.Open(_socketAddress);
_virtualConnectionClient = new VirtualConnectionClient(connection);
_virtualConnectionClient.OnError += (ex) =>
{
// This callback is fired only if there's a protocol-level failure (e.g., child process disconnected
// unexpectedly). It does *not* fire when RPC calls return errors. Since there's been a protocol-level
// failure, this Node instance is no longer usable and should be discarded.
_connectionHasFailed = true;
OutputLogger.LogError(0, ex, ex.Message);
};
}
}
finally
{
_connectionCreationSemaphore.Release();
}
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (_virtualConnectionClient != null)
{
_virtualConnectionClient.Dispose();
_virtualConnectionClient = null;
}
if (_physicalConnection != null)
{
_physicalConnection.Dispose();
_physicalConnection = null;
}
}
base.Dispose(disposing);
}
private static void WriteJsonLine(Stream stream, object serializableObject)
{
using (var streamWriter = new StreamWriter(stream, utf8EncodingWithoutBom, streamBufferSize, true))
using (var jsonWriter = new JsonTextWriter(streamWriter))
{
jsonWriter.CloseOutput = false;
jsonWriter.AutoCompleteOnClose = false;
var serializer = JsonSerializer.Create(jsonSerializerSettings);
serializer.Serialize(jsonWriter, serializableObject);
jsonWriter.Flush();
streamWriter.WriteLine();
streamWriter.Flush();
}
}
private static async Task<T> ReadJsonAsync<T>(Stream stream, CancellationToken cancellationToken)
{
var json = Encoding.UTF8.GetString(await ReadAllBytesAsync(stream, cancellationToken));
return JsonConvert.DeserializeObject<T>(json, jsonSerializerSettings);
}
private static async Task<byte[]> ReadAllBytesAsync(Stream input, CancellationToken cancellationToken)
{
byte[] buffer = new byte[streamBufferSize];
using (var ms = new MemoryStream())
{
int read;
while ((read = await input.ReadAsync(buffer, 0, buffer.Length, cancellationToken)) > 0)
{
ms.Write(buffer, 0, read);
}
return ms.ToArray();
}
}
private static string MakeNewCommandLineOptions(string listenAddress)
{
return $"--listenAddress {listenAddress}";
}
private static Task ThrowOnCancellation(Task task, CancellationToken cancellationToken)
{
return task.IsCompleted
? task // If the task is already completed, no need to wrap it in a further layer of task
: task.ContinueWith(
_ => {}, // If the task completes, allow execution to continue
cancellationToken,
TaskContinuationOptions.ExecuteSynchronously,
TaskScheduler.Default);
}
#pragma warning disable 649 // These properties are populated via JSON deserialization
private class RpcJsonResponse<TResult>
{
public TResult Result { get; set; }
public string ErrorMessage { get; set; }
public string ErrorDetails { get; set; }
}
#pragma warning restore 649
}
}

View File

@@ -1,21 +0,0 @@
using System;
namespace Microsoft.AspNetCore.NodeServices.Sockets
{
/// <summary>
/// Extension methods that help with populating a <see cref="NodeServicesOptions"/> object.
/// </summary>
public static class NodeServicesOptionsExtensions
{
/// <summary>
/// Configures the <see cref="INodeServices"/> service so that it will use out-of-process
/// Node.js instances and perform RPC calls over binary sockets (on Windows, this is
/// implemented as named pipes; on other platforms it uses domain sockets).
/// </summary>
public static void UseSocketHosting(this NodeServicesOptions options)
{
var pipeName = "pni-" + Guid.NewGuid().ToString("D"); // Arbitrary non-clashing string
options.NodeInstanceFactory = () => new SocketNodeInstance(options, pipeName);
}
}
}

View File

@@ -1,79 +0,0 @@
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
import '../../Microsoft.AspNetCore.NodeServices/TypeScript/Util/OverrideStdOutputs';
import * as net from 'net';
import * as path from 'path';
import * as readline from 'readline';
import { Duplex } from 'stream';
import { parseArgs } from '../../Microsoft.AspNetCore.NodeServices/TypeScript/Util/ArgsUtil';
import { exitWhenParentExits } from '../../Microsoft.AspNetCore.NodeServices/TypeScript/Util/ExitWhenParentExits';
import * as virtualConnectionServer from './VirtualConnections/VirtualConnectionServer';
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
const dynamicRequire: (name: string) => any = eval('require');
// Signal to the .NET side when we're ready to accept invocations
const server = net.createServer().on('listening', () => {
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
// Each virtual connection represents a separate invocation
virtualConnectionServer.createInterface(server).on('connection', (connection: Duplex) => {
readline.createInterface(connection, null).on('line', line => {
try {
// Get a reference to the function to invoke
const invocation = JSON.parse(line) as RpcInvocation;
const invokedModule = dynamicRequire(path.resolve(process.cwd(), invocation.moduleName));
const invokedFunction = invocation.exportedFunctionName ? invokedModule[invocation.exportedFunctionName] : invokedModule;
// Prepare a callback for accepting non-streamed JSON responses
let hasInvokedCallback = false;
const invocationCallback = (errorValue, successValue) => {
if (hasInvokedCallback) {
throw new Error('Cannot supply more than one result. The callback has already been invoked,'
+ ' or the result stream has already been accessed');
}
hasInvokedCallback = true;
connection.end(JSON.stringify({
result: successValue,
errorMessage: errorValue && (errorValue.message || errorValue),
errorDetails: errorValue && (errorValue.stack || null)
}));
};
// Also support streamed binary responses
Object.defineProperty(invocationCallback, 'stream', {
enumerable: true,
get: (): Duplex => {
hasInvokedCallback = true;
return connection;
}
});
// Actually invoke it, passing through any supplied args
invokedFunction.apply(null, [invocationCallback].concat(invocation.args));
} catch (ex) {
connection.end(JSON.stringify({
errorMessage: ex.message,
errorDetails: ex.stack
}));
}
});
});
// Begin listening now. The underlying transport varies according to the runtime platform.
// On Windows it's Named Pipes; on Linux/OSX it's Domain Sockets.
const useWindowsNamedPipes = /^win/.test(process.platform);
const parsedArgs = parseArgs(process.argv);
const listenAddress = (useWindowsNamedPipes ? '\\\\.\\pipe\\' : '/tmp/') + parsedArgs.listenAddress;
server.listen(listenAddress);
exitWhenParentExits(parseInt(parsedArgs.parentPid), /* ignoreSigint */ true);
interface RpcInvocation {
moduleName: string;
exportedFunctionName: string;
args: any[];
}

View File

@@ -1,43 +0,0 @@
import { Duplex } from 'stream';
export type EndWriteCallback = (error?: any) => void;
export type BeginWriteCallback = (data: Buffer, callback: EndWriteCallback) => void;
/**
* Represents a virtual connection. Multiple virtual connections may be multiplexed over a single physical socket connection.
*/
export class VirtualConnection extends Duplex {
private _flowing = false;
private _receivedDataQueue: Buffer[] = [];
constructor(private _beginWriteCallback: BeginWriteCallback) {
super();
}
public _read() {
this._flowing = true;
// Keep pushing data until we run out, or the underlying framework asks us to stop.
// When we finish, the 'flowing' state is detemined by whether more data is still being requested.
while (this._flowing && this._receivedDataQueue.length > 0) {
const nextChunk = this._receivedDataQueue.shift();
this._flowing = this.push(nextChunk);
}
}
public _write(chunk: Buffer | string, encodingIfString: string, callback: EndWriteCallback) {
if (typeof chunk === 'string') {
chunk = new Buffer(chunk as string, encodingIfString);
}
this._beginWriteCallback(chunk as Buffer, callback);
}
public onReceivedData(dataOrNullToSignalEOF: Buffer) {
if (this._flowing) {
this._flowing = this.push(dataOrNullToSignalEOF);
} else {
this._receivedDataQueue.push(dataOrNullToSignalEOF);
}
}
}

View File

@@ -1,199 +0,0 @@
import { Server, Socket } from 'net';
import { EventEmitter } from 'events';
import { Duplex } from 'stream';
import { VirtualConnection, EndWriteCallback } from './VirtualConnection';
// Keep this in sync with the equivalent constant in the .NET code. Both sides split up their transmissions into frames with this max length,
// and both will reject longer frames.
const MaxFrameBodyLength = 16 * 1024;
/**
* Accepts connections to a net.Server and adapts them to behave as multiplexed connections. That is, for each physical socket connection,
* we track a list of 'virtual connections' whose API is a Duplex stream. The remote clients may open and close as many virtual connections
* as they wish, reading and writing to them independently, without the overhead of establishing new physical connections each time.
*/
export function createInterface(server: Server): EventEmitter {
const emitter = new EventEmitter();
server.on('connection', (socket: Socket) => {
// For each physical socket connection, maintain a set of virtual connections. Issue a notification whenever
// a new virtual connections is opened.
const childSockets = new VirtualConnectionsCollection(socket, virtualConnection => {
emitter.emit('connection', virtualConnection);
});
});
return emitter;
}
/**
* Tracks the 'virtual connections' associated with a single physical socket connection.
*/
class VirtualConnectionsCollection {
private _currentFrameHeader: FrameHeader = null;
private _virtualConnections: { [id: string]: VirtualConnection } = {};
constructor(private _socket: Socket, private _onVirtualConnectionCallback: (virtualConnection: Duplex) => void) {
// If the remote end closes the physical socket, treat all the virtual connections as being closed remotely too
this._socket.on('close', () => {
Object.getOwnPropertyNames(this._virtualConnections).forEach(id => {
// A 'null' frame signals that the connection was closed remotely
this._virtualConnections[id].onReceivedData(null);
});
});
this._socket.on('readable', this._onIncomingDataAvailable.bind(this));
}
/**
* This is called whenever the underlying socket signals that it may have some data available to read. It will synchronously read as many
* message frames as it can from the underlying socket, opens virtual connections as needed, and dispatches data to them.
*/
private _onIncomingDataAvailable() {
let exhaustedAllData = false;
while (!exhaustedAllData) {
// We might already have a pending frame header from the previous time this method ran, but if not, that's the next thing we need to read
if (this._currentFrameHeader === null) {
this._currentFrameHeader = this._readNextFrameHeader();
}
if (this._currentFrameHeader === null) {
// There's not enough data to fill a frameheader, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
} else {
const frameBodyLength = this._currentFrameHeader.bodyLength;
const frameBodyOrNull: Buffer = frameBodyLength > 0 ? this._socket.read(this._currentFrameHeader.bodyLength) : null;
if (frameBodyOrNull !== null || frameBodyLength === 0) {
// We have a complete frame header+body pair, so we can now dispatch this to a virtual connection. We set _currentFrameHeader back to null
// so that the next thing we try to read is the next frame header.
const headerCopy = this._currentFrameHeader;
this._currentFrameHeader = null;
this._onReceivedCompleteFrame(headerCopy, frameBodyOrNull);
} else {
// There's not enough data to fill the pending frame body, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
}
}
}
}
private _onReceivedCompleteFrame(header: FrameHeader, bodyIfNotEmpty: Buffer) {
// An incoming zero-length frame signals that there's no more data to read.
// Signal this to the Node stream APIs by pushing a 'null' chunk to it.
const virtualConnection = this._getOrOpenVirtualConnection(header);
virtualConnection.onReceivedData(header.bodyLength > 0 ? bodyIfNotEmpty : null);
}
private _getOrOpenVirtualConnection(header: FrameHeader) {
if (this._virtualConnections.hasOwnProperty(header.connectionIdString)) {
// It's an existing virtual connection
return this._virtualConnections[header.connectionIdString];
} else {
// It's a new one
return this._openVirtualConnection(header);
}
}
private _openVirtualConnection(header: FrameHeader) {
const beginWriteCallback = (data, writeCompletedCallback) => {
// Only send nonempty frames, since empty ones are a signal to close the virtual connection
if (data.length > 0) {
this._sendFrame(header.connectionIdBinary, data, writeCompletedCallback);
}
};
const newVirtualConnection = new VirtualConnection(beginWriteCallback);
newVirtualConnection.on('end', () => {
// The virtual connection was closed remotely. Clean up locally.
this._onVirtualConnectionWasClosed(header.connectionIdString);
});
newVirtualConnection.on('finish', () => {
// The virtual connection was closed locally. Clean up locally, and notify the remote that we're done.
this._onVirtualConnectionWasClosed(header.connectionIdString);
this._sendFrame(header.connectionIdBinary, new Buffer(0));
});
this._virtualConnections[header.connectionIdString] = newVirtualConnection;
this._onVirtualConnectionCallback(newVirtualConnection);
return newVirtualConnection;
}
/**
* Attempts to read a complete frame header, synchronously, from the underlying socket.
* If not enough data is available synchronously, returns null without consuming any data from the socket.
*/
private _readNextFrameHeader(): FrameHeader {
const headerBuf: Buffer = this._socket.read(12);
if (headerBuf !== null) {
// We have enough data synchronously
const connectionIdBinary = headerBuf.slice(0, 8);
const connectionIdString = connectionIdBinary.toString('hex');
const bodyLength = headerBuf.readInt32LE(8);
if (bodyLength < 0 || bodyLength > MaxFrameBodyLength) {
// Throwing here is going to bring down the whole process, so this cannot be allowed to happen in real use.
// But it won't happen in real use, because this is only used with our .NET client, which doesn't violate this rule.
throw new Error('Illegal frame body length: ' + bodyLength);
}
return { connectionIdBinary, connectionIdString, bodyLength };
} else {
// Not enough bytes are available synchronously, so none were consumed
return null;
}
}
private _sendFrame(connectionIdBinary: Buffer, data: Buffer, callback?: EndWriteCallback) {
// For all sends other than the last one, only invoke the callback if it failed.
// Also, only invoke the callback at most once.
let hasInvokedCallback = false;
const finalCallback: EndWriteCallback = callback && (error => {
if (!hasInvokedCallback) {
hasInvokedCallback = true;
callback(error);
}
});
const notFinalCallback: EndWriteCallback = callback && (error => {
if (error) {
finalCallback(error);
}
});
// The amount of data we're writing might exceed MaxFrameBodyLength, so split into frames as needed.
// Note that we always send at least one frame, even if it's empty (because that's the close-virtual-connection signal).
// If needed, this could be changed to send frames asynchronously, so that large sends could proceed in parallel
// (though that would involve making a clone of 'data', to avoid the risk of it being mutated during the send).
let bytesSent = 0;
do {
const nextFrameBodyLength = Math.min(MaxFrameBodyLength, data.length - bytesSent);
const isFinalChunk = (bytesSent + nextFrameBodyLength) === data.length;
this._socket.write(connectionIdBinary, notFinalCallback);
this._sendInt32LE(nextFrameBodyLength, notFinalCallback);
this._socket.write(data.slice(bytesSent, bytesSent + nextFrameBodyLength), isFinalChunk ? finalCallback : notFinalCallback);
bytesSent += nextFrameBodyLength;
} while (bytesSent < data.length);
}
/**
* Sends a number serialized in the correct format for .NET to receive as a System.Int32
*/
private _sendInt32LE(value: number, callback?: EndWriteCallback) {
const buf = new Buffer(4);
buf.writeInt32LE(value, 0);
this._socket.write(buf, callback);
}
private _onVirtualConnectionWasClosed(id: string) {
if (this._virtualConnections.hasOwnProperty(id)) {
delete this._virtualConnections[id];
}
}
}
interface FrameHeader {
connectionIdBinary: Buffer;
connectionIdString: string;
bodyLength: number;
}

View File

@@ -1,11 +0,0 @@
{
"compilerOptions": {
"target": "es3",
"module": "commonjs",
"moduleResolution": "node",
"types": ["node"]
},
"exclude": [
"node_modules"
]
}

View File

@@ -1,150 +0,0 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Threading.Tasks.Dataflow;
namespace Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections
{
/// <summary>
/// A virtual read/write connection, typically to a remote process. Multiple virtual connections can be
/// multiplexed over a single physical connection (e.g., a named pipe, domain socket, or TCP socket).
/// </summary>
internal class VirtualConnection : Stream
{
private readonly static Task CompletedTask = Task.CompletedTask;
private VirtualConnectionClient _host;
private readonly BufferBlock<byte[]> _receivedDataQueue = new BufferBlock<byte[]>();
private ArraySegment<byte> _receivedDataNotYetUsed;
private bool _wasClosedByRemote;
private bool _isDisposed;
public VirtualConnection(long id, VirtualConnectionClient host)
{
Id = id;
_host = host;
}
public long Id { get; }
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return true; } }
public override long Length
{
get { throw new NotImplementedException(); }
}
public override long Position
{
get { throw new NotImplementedException(); }
set { throw new NotImplementedException(); }
}
public override void Flush()
{
// We're auto-flushing, so this is a no-op.
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_wasClosedByRemote)
{
return 0;
}
var bytesRead = 0;
while (true)
{
// Pull as many applicable bytes as we can out of receivedDataNotYetUsed, then update its offset/length
int bytesToExtract = Math.Min(count - bytesRead, _receivedDataNotYetUsed.Count);
if (bytesToExtract > 0)
{
Buffer.BlockCopy(_receivedDataNotYetUsed.Array, _receivedDataNotYetUsed.Offset, buffer, bytesRead, bytesToExtract);
_receivedDataNotYetUsed = new ArraySegment<byte>(_receivedDataNotYetUsed.Array, _receivedDataNotYetUsed.Offset + bytesToExtract, _receivedDataNotYetUsed.Count - bytesToExtract);
bytesRead += bytesToExtract;
}
// If we've completely filled the output buffer, we're done
if (bytesRead == count)
{
return bytesRead;
}
// We haven't yet filled the output buffer, so we must have exhausted receivedDataNotYetUsed instead.
// We want to get the next block of data from the underlying queue.
byte[] nextReceivedBlock;
if (bytesRead > 0)
{
if (!_receivedDataQueue.TryReceive(null, out nextReceivedBlock))
{
// No more data is available synchronously, and we already have some data, so we can stop now
return bytesRead;
}
}
else
{
// Since we don't yet have anything, wait for the underlying source
nextReceivedBlock = await _receivedDataQueue.ReceiveAsync(cancellationToken);
}
if (nextReceivedBlock.Length == 0)
{
// A zero-length block signals that the remote regards this virtual connection as closed
_wasClosedByRemote = true;
return bytesRead;
}
else
{
// We got some more data, so can continue trying to fill the output buffer
_receivedDataNotYetUsed = new ArraySegment<byte>(nextReceivedBlock, 0, nextReceivedBlock.Length);
}
}
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_wasClosedByRemote)
{
throw new InvalidOperationException("The connection was already closed by the remote party");
}
return count > 0 ? _host.WriteAsync(Id, buffer, offset, count, cancellationToken) : CompletedTask;
}
public override int Read(byte[] buffer, int offset, int count)
{
return ReadAsync(buffer, offset, count, CancellationToken.None).Result;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
WriteAsync(buffer, offset, count, CancellationToken.None).Wait();
}
protected override void Dispose(bool disposing)
{
if (disposing && !_isDisposed)
{
_isDisposed = true;
_host.CloseInnerStream(Id, _wasClosedByRemote);
}
}
public async Task AddDataToQueue(byte[] data)
{
await _receivedDataQueue.SendAsync(data);
}
}
}

View File

@@ -1,238 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections
{
/// <summary>
/// A callback that will be invoked if the <see cref="VirtualConnectionClient"/> encounters a read error.
/// </summary>
/// <param name="ex"></param>
public delegate void VirtualConnectionReadErrorHandler(Exception ex);
/// <summary>
/// Wraps an underlying physical read/write stream (e.g., named pipes, domain sockets, or TCP sockets) and
/// exposes an API for making 'virtual connections', which act as independent read/write streams.
/// Traffic over these virtual connections is multiplexed over the underlying physical stream. This is useful
/// for fast stream-based inter-process communication because it avoids the overhead of opening a new physical
/// connection each time a new communication channel is needed.
/// </summary>
internal class VirtualConnectionClient : IDisposable
{
internal const int MaxFrameBodyLength = 16 * 1024;
public event VirtualConnectionReadErrorHandler OnError;
private Stream _underlyingTransport;
private Dictionary<long, VirtualConnection> _activeInnerStreams;
private long _nextInnerStreamId;
private readonly SemaphoreSlim _streamWriterSemaphore = new SemaphoreSlim(1);
private readonly object _readControlLock = new object();
private Exception _readLoopExitedWithException;
private readonly CancellationTokenSource _disposalCancellatonToken = new CancellationTokenSource();
private bool _disposedValue = false;
public VirtualConnectionClient(Stream underlyingTransport)
{
_underlyingTransport = underlyingTransport;
_activeInnerStreams = new Dictionary<long, VirtualConnection>();
RunReadLoop();
}
public Stream OpenVirtualConnection()
{
// Improve discoverability of read-loop errors (in case the developer doesn't add an OnError listener)
ThrowIfReadLoopFailed();
var id = Interlocked.Increment(ref _nextInnerStreamId);
var newInnerStream = new VirtualConnection(id, this);
lock (_activeInnerStreams)
{
_activeInnerStreams.Add(id, newInnerStream);
}
return newInnerStream;
}
// It's async void because nothing waits for it to finish (it continues indefinitely). It signals any errors via
// a separate channel.
private async void RunReadLoop()
{
try
{
while (!_disposalCancellatonToken.IsCancellationRequested)
{
var remoteIsStillConnected = await ProcessNextFrameAsync();
if (!remoteIsStillConnected)
{
CloseAllActiveStreams();
}
}
}
catch (Exception ex)
{
// Not all underlying transports correctly honor cancellation tokens. For example,
// DomainSocketStreamTransport's ReadAsync ignores them, so we only know to stop
// the read loop when the underlying stream is disposed and then it throws ObjectDisposedException.
if (!(ex is TaskCanceledException || ex is ObjectDisposedException))
{
_readLoopExitedWithException = ex;
var evt = OnError;
if (evt != null)
{
evt(ex);
}
}
}
}
private async Task<bool> ProcessNextFrameAsync()
{
// First read frame header
var frameHeaderBuffer = await ReadExactLength(12);
if (frameHeaderBuffer == null)
{
return false; // Underlying stream was closed
}
// Parse frame header, then read the frame body
long streamId = BitConverter.ToInt64(frameHeaderBuffer, 0);
int frameBodyLength = BitConverter.ToInt32(frameHeaderBuffer, 8);
if (frameBodyLength < 0 || frameBodyLength > MaxFrameBodyLength)
{
throw new InvalidDataException("Illegal frame length: " + frameBodyLength);
}
var frameBody = await ReadExactLength(frameBodyLength);
if (frameBody == null)
{
return false; // Underlying stream was closed
}
// Dispatch the frame to the relevant inner stream
VirtualConnection innerStream;
lock (_activeInnerStreams)
{
_activeInnerStreams.TryGetValue(streamId, out innerStream);
}
if (innerStream != null)
{
await innerStream.AddDataToQueue(frameBody);
}
return true;
}
private async Task<byte[]> ReadExactLength(int lengthToRead) {
byte[] buffer = new byte[lengthToRead];
var totalBytesRead = 0;
var ct = _disposalCancellatonToken.Token;
while (totalBytesRead < lengthToRead)
{
var chunkLengthRead = await _underlyingTransport.ReadAsync(buffer, totalBytesRead, lengthToRead - totalBytesRead, ct);
if (chunkLengthRead == 0)
{
// Underlying stream was closed
return null;
}
totalBytesRead += chunkLengthRead;
}
return buffer;
}
private void CloseAllActiveStreams()
{
IList<VirtualConnection> innerStreamsCopy;
// Only hold the lock while cloning the list of inner streams. Release the lock before
// actually disposing them, because each 'dispose' call will try to take another lock
// so it can remove that inner stream from activeInnerStreams.
lock (_activeInnerStreams)
{
innerStreamsCopy = _activeInnerStreams.Values.ToList();
}
foreach (var stream in innerStreamsCopy)
{
stream.Dispose();
}
}
public void Dispose()
{
if (!_disposedValue)
{
_disposedValue = true;
_disposalCancellatonToken.Cancel(); // Stops the read loop
CloseAllActiveStreams();
}
}
public async Task WriteAsync(long innerStreamId, byte[] data, int offset, int count, CancellationToken cancellationToken)
{
// In case the amount of data to be sent exceeds the max frame length, split it into separate frames
// Note that we always send at least one frame, even if it's empty, because the zero-length frame is the signal to close a virtual connection
// (hence 'do..while' instead of just 'while').
int bytesWritten = 0;
do {
// Improve discoverability of read-loop errors (in case the developer doesn't add an OnError listener)
ThrowIfReadLoopFailed();
// Hold the write lock only for the time taken to send a single frame, not all frames, to allow large sends to be proceed in parallel
await _streamWriterSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
// Write stream ID, then length prefix, then chunk payload, then flush
var nextChunkBodyLength = Math.Min(MaxFrameBodyLength, count - bytesWritten);
await _underlyingTransport.WriteAsync(BitConverter.GetBytes(innerStreamId), 0, 8, cancellationToken).ConfigureAwait(false);
await _underlyingTransport.WriteAsync(BitConverter.GetBytes(nextChunkBodyLength), 0, 4, cancellationToken).ConfigureAwait(false);
if (nextChunkBodyLength > 0)
{
await _underlyingTransport.WriteAsync(data, offset + bytesWritten, nextChunkBodyLength, cancellationToken).ConfigureAwait(false);
bytesWritten += nextChunkBodyLength;
}
await _underlyingTransport.FlushAsync(cancellationToken).ConfigureAwait(false);
}
finally
{
_streamWriterSemaphore.Release();
}
} while (bytesWritten < count);
}
public void CloseInnerStream(long innerStreamId, bool isAlreadyClosedRemotely)
{
lock (_activeInnerStreams)
{
if (_activeInnerStreams.ContainsKey(innerStreamId))
{
_activeInnerStreams.Remove(innerStreamId);
}
}
if (!isAlreadyClosedRemotely) {
// Also notify the remote that this innerstream is closed
WriteAsync(innerStreamId, new byte[0], 0, 0, new CancellationToken()).Wait();
}
}
private void ThrowIfReadLoopFailed()
{
if (_readLoopExitedWithException != null)
{
throw new AggregateException("The connection failed - see InnerException for details.", _readLoopExitedWithException);
}
}
}
}

View File

@@ -1,18 +0,0 @@
{
"name": "nodeservices.sockets",
"version": "1.0.0",
"description": "This is not really an NPM package and will not be published. This file exists only to reference compilation tools.",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "./node_modules/.bin/webpack"
},
"author": "Microsoft",
"license": "Apache-2.0",
"devDependencies": {
"@types/node": "^6.0.42",
"ts-loader": "^0.8.2",
"typescript": "^2.0.0",
"webpack": "^1.13.1"
}
}

View File

@@ -1,20 +0,0 @@
module.exports = {
target: 'node',
externals: ['fs', 'net', 'events', 'readline', 'stream'],
resolve: {
extensions: [ '.ts' ]
},
module: {
loaders: [
{ test: /\.ts$/, loader: 'ts-loader' },
]
},
entry: {
'entrypoint-socket': ['./TypeScript/SocketNodeInstanceEntryPoint'],
},
output: {
libraryTarget: 'commonjs',
path: './Content/Node',
filename: '[name].js'
}
};

View File

@@ -1,3 +0,0 @@
/bin/
/node_modules/
yarn.lock

View File

@@ -1,25 +0,0 @@
using System;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Supplies INodeServices instances.
/// </summary>
public static class NodeServicesFactory
{
/// <summary>
/// Create an <see cref="INodeServices"/> instance according to the supplied options.
/// </summary>
/// <param name="options">Options for creating the <see cref="INodeServices"/> instance.</param>
/// <returns>An <see cref="INodeServices"/> instance.</returns>
public static INodeServices CreateNodeServices(NodeServicesOptions options)
{
if (options == null)
{
throw new ArgumentNullException(nameof (options));
}
return new NodeServicesImpl(options.NodeInstanceFactory);
}
}
}

View File

@@ -1,114 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using Microsoft.AspNetCore.NodeServices.HostingModels;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Logging.Console;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Describes options used to configure an <see cref="INodeServices"/> instance.
/// </summary>
public class NodeServicesOptions
{
internal const string TimeoutConfigPropertyName = nameof(InvocationTimeoutMilliseconds);
private const int DefaultInvocationTimeoutMilliseconds = 60 * 1000;
private const string LogCategoryName = "Microsoft.AspNetCore.NodeServices";
private static readonly string[] DefaultWatchFileExtensions = { ".js", ".jsx", ".ts", ".tsx", ".json", ".html" };
/// <summary>
/// Creates a new instance of <see cref="NodeServicesOptions"/>.
/// </summary>
/// <param name="serviceProvider">The <see cref="IServiceProvider"/>.</param>
public NodeServicesOptions(IServiceProvider serviceProvider)
{
if (serviceProvider == null)
{
throw new ArgumentNullException(nameof (serviceProvider));
}
EnvironmentVariables = new Dictionary<string, string>();
InvocationTimeoutMilliseconds = DefaultInvocationTimeoutMilliseconds;
WatchFileExtensions = (string[])DefaultWatchFileExtensions.Clone();
var hostEnv = serviceProvider.GetService<IHostingEnvironment>();
if (hostEnv != null)
{
// In an ASP.NET environment, we can use the IHostingEnvironment data to auto-populate a few
// things that you'd otherwise have to specify manually
ProjectPath = hostEnv.ContentRootPath;
EnvironmentVariables["NODE_ENV"] = hostEnv.IsDevelopment() ? "development" : "production"; // De-facto standard values for Node
}
else
{
ProjectPath = Directory.GetCurrentDirectory();
}
var applicationLifetime = serviceProvider.GetService<IApplicationLifetime>();
if (applicationLifetime != null)
{
ApplicationStoppingToken = applicationLifetime.ApplicationStopping;
}
// If the DI system gives us a logger, use it. Otherwise, set up a default one.
var loggerFactory = serviceProvider.GetService<ILoggerFactory>();
NodeInstanceOutputLogger = loggerFactory != null
? loggerFactory.CreateLogger(LogCategoryName)
: new ConsoleLogger(LogCategoryName, null, false);
// By default, we use this package's built-in out-of-process-via-HTTP hosting/transport
this.UseHttpHosting();
}
/// <summary>
/// Specifies how to construct Node.js instances. An <see cref="INodeInstance"/> encapsulates all details about
/// how Node.js instances are launched and communicated with. A new <see cref="INodeInstance"/> will be created
/// automatically if the previous instance has terminated (e.g., because a source file changed).
/// </summary>
public Func<INodeInstance> NodeInstanceFactory { get; set; }
/// <summary>
/// If set, overrides the path to the root of your application. This path is used when locating Node.js modules relative to your project.
/// </summary>
public string ProjectPath { get; set; }
/// <summary>
/// If set, the Node.js instance should restart when any matching file on disk within your project changes.
/// </summary>
public string[] WatchFileExtensions { get; set; }
/// <summary>
/// The Node.js instance's stdout/stderr will be redirected to this <see cref="ILogger"/>.
/// </summary>
public ILogger NodeInstanceOutputLogger { get; set; }
/// <summary>
/// If true, the Node.js instance will accept incoming V8 debugger connections (e.g., from node-inspector).
/// </summary>
public bool LaunchWithDebugging { get; set; }
/// <summary>
/// If <see cref="LaunchWithDebugging"/> is true, the Node.js instance will listen for V8 debugger connections on this port.
/// </summary>
public int DebuggingPort { get; set; }
/// <summary>
/// If set, starts the Node.js instance with the specified environment variables.
/// </summary>
public IDictionary<string, string> EnvironmentVariables { get; set; }
/// <summary>
/// Specifies the maximum duration, in milliseconds, that your .NET code should wait for Node.js RPC calls to return.
/// </summary>
public int InvocationTimeoutMilliseconds { get; set; }
/// <summary>
/// A token that indicates when the host application is stopping.
/// </summary>
public CancellationToken ApplicationStoppingToken { get; set; }
}
}

View File

@@ -1,41 +0,0 @@
using System;
using Microsoft.AspNetCore.NodeServices;
namespace Microsoft.Extensions.DependencyInjection
{
/// <summary>
/// Extension methods for setting up NodeServices in an <see cref="IServiceCollection" />.
/// </summary>
public static class NodeServicesServiceCollectionExtensions
{
/// <summary>
/// Adds NodeServices support to the <paramref name="serviceCollection"/>.
/// </summary>
/// <param name="serviceCollection">The <see cref="IServiceCollection"/>.</param>
public static void AddNodeServices(this IServiceCollection serviceCollection)
=> AddNodeServices(serviceCollection, _ => {});
/// <summary>
/// Adds NodeServices support to the <paramref name="serviceCollection"/>.
/// </summary>
/// <param name="serviceCollection">The <see cref="IServiceCollection"/>.</param>
/// <param name="setupAction">A callback that will be invoked to populate the <see cref="NodeServicesOptions"/>.</param>
public static void AddNodeServices(this IServiceCollection serviceCollection, Action<NodeServicesOptions> setupAction)
{
if (setupAction == null)
{
throw new ArgumentNullException(nameof (setupAction));
}
serviceCollection.AddSingleton(typeof(INodeServices), serviceProvider =>
{
// First we let NodeServicesOptions take its defaults from the IServiceProvider,
// then we let the developer override those options
var options = new NodeServicesOptions(serviceProvider);
setupAction(options);
return NodeServicesFactory.CreateNodeServices(options);
});
}
}
}

View File

@@ -1,361 +0,0 @@
(function(e, a) { for(var i in a) e[i] = a[i]; }(exports, /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(1);
/***/ },
/* 1 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
__webpack_require__(2);
__webpack_require__(4);
var http = __webpack_require__(5);
var path = __webpack_require__(3);
var ArgsUtil_1 = __webpack_require__(6);
var ExitWhenParentExits_1 = __webpack_require__(7);
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
var dynamicRequire = eval('require');
var server = http.createServer(function (req, res) {
readRequestBodyAsJson(req, function (bodyJson) {
var hasSentResult = false;
var callback = function (errorValue, successValue) {
if (!hasSentResult) {
hasSentResult = true;
if (errorValue) {
respondWithError(res, errorValue);
}
else if (typeof successValue !== 'string') {
// Arbitrary object/number/etc - JSON-serialize it
var successValueJson = void 0;
try {
successValueJson = JSON.stringify(successValue);
}
catch (ex) {
// JSON serialization error - pass it back to .NET
respondWithError(res, ex);
return;
}
res.setHeader('Content-Type', 'application/json');
res.end(successValueJson);
}
else {
// String - can bypass JSON-serialization altogether
res.setHeader('Content-Type', 'text/plain');
res.end(successValue);
}
}
};
// Support streamed responses
Object.defineProperty(callback, 'stream', {
enumerable: true,
get: function () {
if (!hasSentResult) {
hasSentResult = true;
res.setHeader('Content-Type', 'application/octet-stream');
}
return res;
}
});
try {
var resolvedPath = path.resolve(process.cwd(), bodyJson.moduleName);
var invokedModule = dynamicRequire(resolvedPath);
var func = bodyJson.exportedFunctionName ? invokedModule[bodyJson.exportedFunctionName] : invokedModule;
if (!func) {
throw new Error('The module "' + resolvedPath + '" has no export named "' + bodyJson.exportedFunctionName + '"');
}
func.apply(null, [callback].concat(bodyJson.args));
}
catch (synchronousException) {
callback(synchronousException, null);
}
});
});
var parsedArgs = ArgsUtil_1.parseArgs(process.argv);
var requestedPortOrZero = parsedArgs.port || 0; // 0 means 'let the OS decide'
server.listen(requestedPortOrZero, 'localhost', function () {
// Signal to HttpNodeHost which port it should make its HTTP connections on
console.log('[Microsoft.AspNetCore.NodeServices.HttpNodeHost:Listening on port ' + server.address().port + '\]');
// Signal to the NodeServices base class that we're ready to accept invocations
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
ExitWhenParentExits_1.exitWhenParentExits(parseInt(parsedArgs.parentPid), /* ignoreSigint */ true);
function readRequestBodyAsJson(request, callback) {
var requestBodyAsString = '';
request.on('data', function (chunk) { requestBodyAsString += chunk; });
request.on('end', function () { callback(JSON.parse(requestBodyAsString)); });
}
function respondWithError(res, errorValue) {
res.statusCode = 500;
res.end(JSON.stringify({
errorMessage: errorValue.message || errorValue,
errorDetails: errorValue.stack || null
}));
}
/***/ },
/* 2 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var path = __webpack_require__(3);
var startsWith = function (str, prefix) { return str.substring(0, prefix.length) === prefix; };
var appRootDir = process.cwd();
function patchedLStat(pathToStatLong, fsReqWrap) {
try {
// If the lstat completes without errors, we don't modify its behavior at all
return origLStat.apply(this, arguments);
}
catch (ex) {
var shouldOverrideError = startsWith(ex.message, 'EPERM') // It's a permissions error
&& typeof appRootDirLong === 'string'
&& startsWith(appRootDirLong, pathToStatLong) // ... for an ancestor directory
&& ex.stack.indexOf('Object.realpathSync ') >= 0; // ... during symlink resolution
if (shouldOverrideError) {
// Fake the result to give the same result as an 'lstat' on the app root dir.
// This stops Node failing to load modules just because it doesn't know whether
// ancestor directories are symlinks or not. If there's a genuine file
// permissions issue, it will still surface later when Node actually
// tries to read the file.
return origLStat.call(this, appRootDir, fsReqWrap);
}
else {
// In any other case, preserve the original error
throw ex;
}
}
}
;
// It's only necessary to apply this workaround on Windows
var appRootDirLong = null;
var origLStat = null;
if (/^win/.test(process.platform)) {
try {
// Get the app's root dir in Node's internal "long" format (e.g., \\?\C:\dir\subdir)
appRootDirLong = path._makeLong(appRootDir);
// Actually apply the patch, being as defensive as possible
var bindingFs = process.binding('fs');
origLStat = bindingFs.lstat;
if (typeof origLStat === 'function') {
bindingFs.lstat = patchedLStat;
}
}
catch (ex) {
}
}
/***/ },
/* 3 */
/***/ function(module, exports) {
module.exports = require("path");
/***/ },
/* 4 */
/***/ function(module, exports) {
// When Node writes to stdout/strerr, we capture that and convert the lines into calls on the
// active .NET ILogger. But by default, stdout/stderr don't have any way of distinguishing
// linebreaks inside log messages from the linebreaks that delimit separate log messages,
// so multiline strings will end up being written to the ILogger as multiple independent
// log messages. This makes them very hard to make sense of, especially when they represent
// something like stack traces.
//
// To fix this, we intercept stdout/stderr writes, and replace internal linebreaks with a
// marker token. When .NET receives the lines, it converts the marker tokens back to regular
// linebreaks within the logged messages.
//
// Note that it's better to do the interception at the stdout/stderr level, rather than at
// the console.log/console.error (etc.) level, because this takes place after any native
// message formatting has taken place (e.g., inserting values for % placeholders).
var findInternalNewlinesRegex = /\n(?!$)/g;
var encodedNewline = '__ns_newline__';
encodeNewlinesWrittenToStream(process.stdout);
encodeNewlinesWrittenToStream(process.stderr);
function encodeNewlinesWrittenToStream(outputStream) {
var origWriteFunction = outputStream.write;
outputStream.write = function (value) {
// Only interfere with the write if it's definitely a string
if (typeof value === 'string') {
var argsClone = Array.prototype.slice.call(arguments, 0);
argsClone[0] = encodeNewlinesInString(value);
origWriteFunction.apply(this, argsClone);
}
else {
origWriteFunction.apply(this, arguments);
}
};
}
function encodeNewlinesInString(str) {
return str.replace(findInternalNewlinesRegex, encodedNewline);
}
/***/ },
/* 5 */
/***/ function(module, exports) {
module.exports = require("http");
/***/ },
/* 6 */
/***/ function(module, exports) {
"use strict";
function parseArgs(args) {
// Very simplistic parsing which is sufficient for the cases needed. We don't want to bring in any external
// dependencies (such as an args-parsing library) to this file.
var result = {};
var currentKey = null;
args.forEach(function (arg) {
if (arg.indexOf('--') === 0) {
var argName = arg.substring(2);
result[argName] = undefined;
currentKey = argName;
}
else if (currentKey) {
result[currentKey] = arg;
currentKey = null;
}
});
return result;
}
exports.parseArgs = parseArgs;
/***/ },
/* 7 */
/***/ function(module, exports) {
/*
In general, we want the Node child processes to be terminated as soon as the parent .NET processes exit,
because we have no further use for them. If the .NET process shuts down gracefully, it will run its
finalizers, one of which (in OutOfProcessNodeInstance.cs) will kill its associated Node process immediately.
But if the .NET process is terminated forcefully (e.g., on Linux/OSX with 'kill -9'), then it won't have
any opportunity to shut down its child processes, and by default they will keep running. In this case, it's
up to the child process to detect this has happened and terminate itself.
There are many possible approaches to detecting when a parent process has exited, most of which behave
differently between Windows and Linux/OS X:
- On Windows, the parent process can mark its child as being a 'job' that should auto-terminate when
the parent does (http://stackoverflow.com/a/4657392). Not cross-platform.
- The child Node process can get a callback when the parent disconnects (process.on('disconnect', ...)).
But despite http://stackoverflow.com/a/16487966, no callback fires in any case I've tested (Windows / OS X).
- The child Node process can get a callback when its stdin/stdout are disconnected, as described at
http://stackoverflow.com/a/15693934. This works well on OS X, but calling stdout.resume() on Windows
causes the process to terminate prematurely.
- I don't know why, but on Windows, it's enough to invoke process.stdin.resume(). For some reason this causes
the child Node process to exit as soon as the parent one does, but I don't see this documented anywhere.
- You can poll to see if the parent process, or your stdin/stdout connection to it, is gone
- You can directly pass a parent process PID to the child, and then have the child poll to see if it's
still running (e.g., using process.kill(pid, 0), which doesn't kill it but just tests whether it exists,
as per https://nodejs.org/api/process.html#process_process_kill_pid_signal)
- Or, on each poll, you can try writing to process.stdout. If the parent has died, then this will throw.
However I don't see this documented anywhere. It would be nice if you could just poll for whether or not
process.stdout is still connected (without actually writing to it) but I haven't found any property whose
value changes until you actually try to write to it.
Of these, the only cross-platform approach that is actually documented as a valid strategy is simply polling
to check whether the parent PID is still running. So that's what we do here.
*/
"use strict";
var pollIntervalMs = 1000;
function exitWhenParentExits(parentPid, ignoreSigint) {
setInterval(function () {
if (!processExists(parentPid)) {
// Can't log anything at this point, because out stdout was connected to the parent,
// but the parent is gone.
process.exit();
}
}, pollIntervalMs);
if (ignoreSigint) {
// Pressing ctrl+c in the terminal sends a SIGINT to all processes in the foreground process tree.
// By default, the Node process would then exit before the .NET process, because ASP.NET implements
// a delayed shutdown to allow ongoing requests to complete.
//
// This is problematic, because if Node exits first, the CopyToAsync code in ConditionalProxyMiddleware
// will experience a read fault, and logs a huge load of errors. Fortunately, since the Node process is
// already set up to shut itself down if it detects the .NET process is terminated, all we have to do is
// ignore the SIGINT. The Node process will then terminate automatically after the .NET process does.
//
// A better solution would be to have WebpackDevMiddleware listen for SIGINT and gracefully close any
// ongoing EventSource connections before letting the Node process exit, independently of the .NET
// process exiting. However, doing this well in general is very nontrivial (see all the discussion at
// https://github.com/nodejs/node/issues/2642).
process.on('SIGINT', function () {
console.log('Received SIGINT. Waiting for .NET process to exit...');
});
}
}
exports.exitWhenParentExits = exitWhenParentExits;
function processExists(pid) {
try {
// Sending signal 0 - on all platforms - tests whether the process exists. As long as it doesn't
// throw, that means it does exist.
process.kill(pid, 0);
return true;
}
catch (ex) {
// If the reason for the error is that we don't have permission to ask about this process,
// report that as a separate problem.
if (ex.code === 'EPERM') {
throw new Error("Attempted to check whether process " + pid + " was running, but got a permissions error.");
}
return false;
}
}
/***/ }
/******/ ])));

View File

@@ -1,151 +0,0 @@
using System;
using System.IO;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// A specialisation of the OutOfProcessNodeInstance base class that uses HTTP to perform RPC invocations.
///
/// The Node child process starts an HTTP listener on an arbitrary available port (except where a nonzero
/// port number is specified as a constructor parameter), and signals which port was selected using the same
/// input/output-based mechanism that the base class uses to determine when the child process is ready to
/// accept RPC invocations.
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.HostingModels.OutOfProcessNodeInstance" />
internal class HttpNodeInstance : OutOfProcessNodeInstance
{
private static readonly Regex PortMessageRegex =
new Regex(@"^\[Microsoft.AspNetCore.NodeServices.HttpNodeHost:Listening on port (\d+)\]$");
private static readonly JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver(),
TypeNameHandling = TypeNameHandling.None
};
private readonly HttpClient _client;
private bool _disposed;
private int _portNumber;
public HttpNodeInstance(NodeServicesOptions options, int port = 0)
: base(
EmbeddedResourceReader.Read(
typeof(HttpNodeInstance),
"/Content/Node/entrypoint-http.js"),
options.ProjectPath,
options.WatchFileExtensions,
MakeCommandLineOptions(port),
options.ApplicationStoppingToken,
options.NodeInstanceOutputLogger,
options.EnvironmentVariables,
options.InvocationTimeoutMilliseconds,
options.LaunchWithDebugging,
options.DebuggingPort)
{
_client = new HttpClient();
_client.Timeout = TimeSpan.FromMilliseconds(options.InvocationTimeoutMilliseconds + 1000);
}
private static string MakeCommandLineOptions(int port)
{
return $"--port {port}";
}
protected override async Task<T> InvokeExportAsync<T>(
NodeInvocationInfo invocationInfo, CancellationToken cancellationToken)
{
var payloadJson = JsonConvert.SerializeObject(invocationInfo, jsonSerializerSettings);
var payload = new StringContent(payloadJson, Encoding.UTF8, "application/json");
var response = await _client.PostAsync("http://localhost:" + _portNumber, payload, cancellationToken);
if (!response.IsSuccessStatusCode)
{
// Unfortunately there's no true way to cancel ReadAsStringAsync calls, hence AbandonIfCancelled
var responseJson = await response.Content.ReadAsStringAsync().OrThrowOnCancellation(cancellationToken);
var responseError = JsonConvert.DeserializeObject<RpcJsonResponse>(responseJson, jsonSerializerSettings);
throw new NodeInvocationException(responseError.ErrorMessage, responseError.ErrorDetails);
}
var responseContentType = response.Content.Headers.ContentType;
switch (responseContentType.MediaType)
{
case "text/plain":
// String responses can skip JSON encoding/decoding
if (typeof(T) != typeof(string))
{
throw new ArgumentException(
"Node module responded with non-JSON string. This cannot be converted to the requested generic type: " +
typeof(T).FullName);
}
var responseString = await response.Content.ReadAsStringAsync().OrThrowOnCancellation(cancellationToken);
return (T)(object)responseString;
case "application/json":
var responseJson = await response.Content.ReadAsStringAsync().OrThrowOnCancellation(cancellationToken);
return JsonConvert.DeserializeObject<T>(responseJson, jsonSerializerSettings);
case "application/octet-stream":
// Streamed responses have to be received as System.IO.Stream instances
if (typeof(T) != typeof(Stream) && typeof(T) != typeof(object))
{
throw new ArgumentException(
"Node module responded with binary stream. This cannot be converted to the requested generic type: " +
typeof(T).FullName + ". Instead you must use the generic type System.IO.Stream.");
}
return (T)(object)(await response.Content.ReadAsStreamAsync().OrThrowOnCancellation(cancellationToken));
default:
throw new InvalidOperationException("Unexpected response content type: " + responseContentType.MediaType);
}
}
protected override void OnOutputDataReceived(string outputData)
{
// Watch for "port selected" messages, and when observed, store the port number
// so we can use it when making HTTP requests. The child process will always send
// one of these messages before it sends a "ready for connections" message.
var match = _portNumber != 0 ? null : PortMessageRegex.Match(outputData);
if (match != null && match.Success)
{
_portNumber = int.Parse(match.Groups[1].Captures[0].Value);
}
else
{
base.OnOutputDataReceived(outputData);
}
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
if (!_disposed)
{
if (disposing)
{
_client.Dispose();
}
_disposed = true;
}
}
#pragma warning disable 649 // These properties are populated via JSON deserialization
private class RpcJsonResponse
{
public string ErrorMessage { get; set; }
public string ErrorDetails { get; set; }
}
#pragma warning restore 649
}
}

View File

@@ -1,23 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Represents an instance of Node.js to which Remote Procedure Calls (RPC) may be sent.
/// </summary>
public interface INodeInstance : IDisposable
{
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportNameOrNull">If set, specifies the CommonJS export to be invoked. If not set, the module's default CommonJS export itself must be a function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeExportAsync<T>(CancellationToken cancellationToken, string moduleName, string exportNameOrNull, params object[] args);
}
}

View File

@@ -1,55 +0,0 @@
using System;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Represents an exception caused by invoking Node.js code.
/// </summary>
public class NodeInvocationException : Exception
{
/// <summary>
/// If true, indicates that the invocation failed because the Node.js instance could not be reached. For example,
/// it might have already shut down or previously crashed.
/// </summary>
public bool NodeInstanceUnavailable { get; private set; }
/// <summary>
/// If true, indicates that even though the invocation failed because the Node.js instance could not be reached
/// or needs to be restarted, that Node.js instance may remain alive for a period in order to complete any
/// outstanding requests.
/// </summary>
public bool AllowConnectionDraining { get; private set;}
/// <summary>
/// Creates a new instance of <see cref="NodeInvocationException"/>.
/// </summary>
/// <param name="message">A description of the exception.</param>
/// <param name="details">Additional information, such as a Node.js stack trace, representing the exception.</param>
public NodeInvocationException(string message, string details)
: base(message + Environment.NewLine + details)
{
}
/// <summary>
/// Creates a new instance of <see cref="NodeInvocationException"/>.
/// </summary>
/// <param name="message">A description of the exception.</param>
/// <param name="details">Additional information, such as a Node.js stack trace, representing the exception.</param>
/// <param name="nodeInstanceUnavailable">Specifies a value for the <see cref="NodeInstanceUnavailable"/> flag.</param>
/// <param name="allowConnectionDraining">Specifies a value for the <see cref="AllowConnectionDraining"/> flag.</param>
public NodeInvocationException(string message, string details, bool nodeInstanceUnavailable, bool allowConnectionDraining)
: this(message, details)
{
// Reject a meaningless combination of flags
if (allowConnectionDraining && !nodeInstanceUnavailable)
{
throw new ArgumentException(
$"The '${ nameof(allowConnectionDraining) }' parameter cannot be true " +
$"unless the '${ nameof(nodeInstanceUnavailable) }' parameter is also true.");
}
NodeInstanceUnavailable = nodeInstanceUnavailable;
AllowConnectionDraining = allowConnectionDraining;
}
}
}

View File

@@ -1,24 +0,0 @@
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Describes an RPC call sent from .NET code to Node.js code.
/// </summary>
public class NodeInvocationInfo
{
/// <summary>
/// Specifies the path to the Node.js module (i.e., .js file) relative to the project root.
/// </summary>
public string ModuleName { get; set; }
/// <summary>
/// If set, specifies the name of CommonJS function export to be invoked.
/// If not set, the Node.js module's default export must itself be a function to be invoked.
/// </summary>
public string ExportedFunctionName { get; set; }
/// <summary>
/// A sequence of JSON-serializable arguments to be passed to the Node.js function being invoked.
/// </summary>
public object[] Args { get; set; }
}
}

View File

@@ -1,17 +0,0 @@
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Extension methods that help with populating a <see cref="NodeServicesOptions"/> object.
/// </summary>
public static class NodeServicesOptionsExtensions
{
/// <summary>
/// Configures the <see cref="INodeServices"/> service so that it will use out-of-process
/// Node.js instances and perform RPC calls over HTTP.
/// </summary>
public static void UseHttpHosting(this NodeServicesOptions options)
{
options.NodeInstanceFactory = () => new HttpNodeInstance(options);
}
}
}

View File

@@ -1,475 +0,0 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Class responsible for launching a Node child process on the local machine, determining when it is ready to
/// accept invocations, detecting if it dies on its own, and finally terminating it on disposal.
///
/// This abstract base class uses the input/output streams of the child process to perform a simple handshake
/// to determine when the child process is ready to accept invocations. This is agnostic to the mechanism that
/// derived classes use to actually perform the invocations (e.g., they could use HTTP-RPC, or a binary TCP
/// protocol, or any other RPC-type mechanism).
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.HostingModels.INodeInstance" />
public abstract class OutOfProcessNodeInstance : INodeInstance
{
/// <summary>
/// The <see cref="ILogger"/> to which the Node.js instance's stdout/stderr is being redirected.
/// </summary>
protected readonly ILogger OutputLogger;
private const string ConnectionEstablishedMessage = "[Microsoft.AspNetCore.NodeServices:Listening]";
private readonly TaskCompletionSource<object> _connectionIsReadySource = new TaskCompletionSource<object>();
private bool _disposed;
private readonly StringAsTempFile _entryPointScript;
private FileSystemWatcher _fileSystemWatcher;
private int _invocationTimeoutMilliseconds;
private bool _launchWithDebugging;
private readonly Process _nodeProcess;
private int? _nodeDebuggingPort;
private bool _nodeProcessNeedsRestart;
private readonly string[] _watchFileExtensions;
/// <summary>
/// Creates a new instance of <see cref="OutOfProcessNodeInstance"/>.
/// </summary>
/// <param name="entryPointScript">The path to the entry point script that the Node instance should load and execute.</param>
/// <param name="projectPath">The root path of the current project. This is used when resolving Node.js module paths relative to the project root.</param>
/// <param name="watchFileExtensions">The filename extensions that should be watched within the project root. The Node instance will automatically shut itself down if any matching file changes.</param>
/// <param name="commandLineArguments">Additional command-line arguments to be passed to the Node.js instance.</param>
/// <param name="applicationStoppingToken">A token that indicates when the host application is stopping.</param>
/// <param name="nodeOutputLogger">The <see cref="ILogger"/> to which the Node.js instance's stdout/stderr (and other log information) should be written.</param>
/// <param name="environmentVars">Environment variables to be set on the Node.js process.</param>
/// <param name="invocationTimeoutMilliseconds">The maximum duration, in milliseconds, to wait for RPC calls to complete.</param>
/// <param name="launchWithDebugging">If true, passes a flag to the Node.js process telling it to accept V8 debugger connections.</param>
/// <param name="debuggingPort">If debugging is enabled, the Node.js process should listen for V8 debugger connections on this port.</param>
public OutOfProcessNodeInstance(
string entryPointScript,
string projectPath,
string[] watchFileExtensions,
string commandLineArguments,
CancellationToken applicationStoppingToken,
ILogger nodeOutputLogger,
IDictionary<string, string> environmentVars,
int invocationTimeoutMilliseconds,
bool launchWithDebugging,
int debuggingPort)
{
if (nodeOutputLogger == null)
{
throw new ArgumentNullException(nameof(nodeOutputLogger));
}
OutputLogger = nodeOutputLogger;
_entryPointScript = new StringAsTempFile(entryPointScript, applicationStoppingToken);
_invocationTimeoutMilliseconds = invocationTimeoutMilliseconds;
_launchWithDebugging = launchWithDebugging;
var startInfo = PrepareNodeProcessStartInfo(_entryPointScript.FileName, projectPath, commandLineArguments,
environmentVars, _launchWithDebugging, debuggingPort);
_nodeProcess = LaunchNodeProcess(startInfo);
_watchFileExtensions = watchFileExtensions;
_fileSystemWatcher = BeginFileWatcher(projectPath);
ConnectToInputOutputStreams();
}
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportNameOrNull">If set, specifies the CommonJS export to be invoked. If not set, the module's default CommonJS export itself must be a function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
public async Task<T> InvokeExportAsync<T>(
CancellationToken cancellationToken, string moduleName, string exportNameOrNull, params object[] args)
{
if (_nodeProcess.HasExited || _nodeProcessNeedsRestart)
{
// This special kind of exception triggers a transparent retry - NodeServicesImpl will launch
// a new Node instance and pass the invocation to that one instead.
// Note that if the Node process is listening for debugger connections, then we need it to shut
// down immediately and not stay open for connection draining (because if it did, the new Node
// instance wouldn't able to start, because the old one would still hold the debugging port).
var message = _nodeProcess.HasExited
? "The Node process has exited"
: "The Node process needs to restart";
throw new NodeInvocationException(
message,
details: null,
nodeInstanceUnavailable: true,
allowConnectionDraining: !_launchWithDebugging);
}
// Construct a new cancellation token that combines the supplied token with the configured invocation
// timeout. Technically we could avoid wrapping the cancellationToken if no timeout is configured,
// but that's not really a major use case, since timeouts are enabled by default.
using (var timeoutSource = new CancellationTokenSource())
using (var combinedCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutSource.Token))
{
if (_invocationTimeoutMilliseconds > 0)
{
timeoutSource.CancelAfter(_invocationTimeoutMilliseconds);
}
// By overwriting the supplied cancellation token, we ensure that it isn't accidentally used
// below. We only want to pass through the token that respects timeouts.
cancellationToken = combinedCancellationTokenSource.Token;
var connectionDidSucceed = false;
try
{
// Wait until the connection is established. This will throw if the connection fails to initialize,
// or if cancellation is requested first. Note that we can't really cancel the "establishing connection"
// task because that's shared with all callers, but we can stop waiting for it if this call is cancelled.
await _connectionIsReadySource.Task.OrThrowOnCancellation(cancellationToken);
connectionDidSucceed = true;
return await InvokeExportAsync<T>(new NodeInvocationInfo
{
ModuleName = moduleName,
ExportedFunctionName = exportNameOrNull,
Args = args
}, cancellationToken);
}
catch (TaskCanceledException)
{
if (timeoutSource.IsCancellationRequested)
{
// It was very common for developers to report 'TaskCanceledException' when encountering almost any
// trouble when using NodeServices. Now we have a default invocation timeout, and attempt to give
// a more descriptive exception message if it happens.
if (!connectionDidSucceed)
{
// This is very unlikely, but for debugging, it's still useful to differentiate it from the
// case below.
throw new NodeInvocationException(
$"Attempt to connect to Node timed out after {_invocationTimeoutMilliseconds}ms.",
string.Empty);
}
else
{
// Developers encounter this fairly often (if their Node code fails without invoking the callback,
// all that the .NET side knows is that the invocation eventually times out). Previously, this surfaced
// as a TaskCanceledException, but this led to a lot of issue reports. Now we throw the following
// descriptive error.
throw new NodeInvocationException(
$"The Node invocation timed out after {_invocationTimeoutMilliseconds}ms.",
$"You can change the timeout duration by setting the {NodeServicesOptions.TimeoutConfigPropertyName} "
+ $"property on {nameof(NodeServicesOptions)}.\n\n"
+ "The first debugging step is to ensure that your Node.js function always invokes the supplied "
+ "callback (or throws an exception synchronously), even if it encounters an error. Otherwise, "
+ "the .NET code has no way to know that it is finished or has failed."
);
}
}
else
{
throw;
}
}
}
}
/// <summary>
/// Disposes this instance.
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="invocationInfo">Specifies the Node.js function to be invoked and arguments to be passed to it.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
protected abstract Task<T> InvokeExportAsync<T>(
NodeInvocationInfo invocationInfo,
CancellationToken cancellationToken);
/// <summary>
/// Configures a <see cref="ProcessStartInfo"/> instance describing how to launch the Node.js process.
/// </summary>
/// <param name="entryPointFilename">The entrypoint JavaScript file that the Node.js process should execute.</param>
/// <param name="projectPath">The root path of the project. This is used when locating Node.js modules relative to the project root.</param>
/// <param name="commandLineArguments">Command-line arguments to be passed to the Node.js process.</param>
/// <param name="environmentVars">Environment variables to be set on the Node.js process.</param>
/// <param name="launchWithDebugging">If true, passes a flag to the Node.js process telling it to accept V8 Inspector connections.</param>
/// <param name="debuggingPort">If debugging is enabled, the Node.js process should listen for V8 Inspector connections on this port.</param>
/// <returns></returns>
protected virtual ProcessStartInfo PrepareNodeProcessStartInfo(
string entryPointFilename, string projectPath, string commandLineArguments,
IDictionary<string, string> environmentVars, bool launchWithDebugging, int debuggingPort)
{
// This method is virtual, as it provides a way to override the NODE_PATH or the path to node.exe
string debuggingArgs;
if (launchWithDebugging)
{
debuggingArgs = debuggingPort != default(int) ? $"--inspect={debuggingPort} " : "--inspect ";
_nodeDebuggingPort = debuggingPort;
}
else
{
debuggingArgs = string.Empty;
}
var thisProcessPid = Process.GetCurrentProcess().Id;
var startInfo = new ProcessStartInfo("node")
{
Arguments = $"{debuggingArgs}\"{entryPointFilename}\" --parentPid {thisProcessPid} {commandLineArguments ?? string.Empty}",
UseShellExecute = false,
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
WorkingDirectory = projectPath
};
// Append environment vars
if (environmentVars != null)
{
foreach (var envVarKey in environmentVars.Keys)
{
var envVarValue = environmentVars[envVarKey];
if (envVarValue != null)
{
SetEnvironmentVariable(startInfo, envVarKey, envVarValue);
}
}
}
// Append projectPath to NODE_PATH so it can locate node_modules
var existingNodePath = Environment.GetEnvironmentVariable("NODE_PATH") ?? string.Empty;
if (existingNodePath != string.Empty)
{
existingNodePath += Path.PathSeparator;
}
var nodePathValue = existingNodePath + Path.Combine(projectPath, "node_modules");
SetEnvironmentVariable(startInfo, "NODE_PATH", nodePathValue);
return startInfo;
}
/// <summary>
/// Virtual method invoked whenever the Node.js process emits a line to its stdout.
/// </summary>
/// <param name="outputData">The line emitted to the Node.js process's stdout.</param>
protected virtual void OnOutputDataReceived(string outputData)
{
OutputLogger.LogInformation(outputData);
}
/// <summary>
/// Virtual method invoked whenever the Node.js process emits a line to its stderr.
/// </summary>
/// <param name="errorData">The line emitted to the Node.js process's stderr.</param>
protected virtual void OnErrorDataReceived(string errorData)
{
OutputLogger.LogError(errorData);
}
/// <summary>
/// Disposes the instance.
/// </summary>
/// <param name="disposing">True if the object is disposing or false if it is finalizing.</param>
protected virtual void Dispose(bool disposing)
{
if (!_disposed)
{
if (disposing)
{
_entryPointScript.Dispose();
EnsureFileSystemWatcherIsDisposed();
}
// Make sure the Node process is finished
// TODO: Is there a more graceful way to end it? Or does this still let it perform any cleanup?
if (_nodeProcess != null && !_nodeProcess.HasExited)
{
_nodeProcess.Kill();
}
_disposed = true;
}
}
private void EnsureFileSystemWatcherIsDisposed()
{
if (_fileSystemWatcher != null)
{
_fileSystemWatcher.Dispose();
_fileSystemWatcher = null;
}
}
private static void SetEnvironmentVariable(ProcessStartInfo startInfo, string name, string value)
{
startInfo.Environment[name] = value;
}
private static Process LaunchNodeProcess(ProcessStartInfo startInfo)
{
try {
var process = Process.Start(startInfo);
// On Mac at least, a killed child process is left open as a zombie until the parent
// captures its exit code. We don't need the exit code for this process, and don't want
// to use process.WaitForExit() explicitly (we'd have to block the thread until it really
// has exited), but we don't want to leave zombies lying around either. It's sufficient
// to use process.EnableRaisingEvents so that .NET will grab the exit code and let the
// zombie be cleaned away without having to block our thread.
process.EnableRaisingEvents = true;
return process;
} catch (Exception ex) {
var message = "Failed to start Node process. To resolve this:.\n\n"
+ "[1] Ensure that Node.js is installed and can be found in one of the PATH directories.\n"
+ $" Current PATH enviroment variable is: { Environment.GetEnvironmentVariable("PATH") }\n"
+ " Make sure the Node executable is in one of those directories, or update your PATH.\n\n"
+ "[2] See the InnerException for further details of the cause.";
throw new InvalidOperationException(message, ex);
}
}
private static string UnencodeNewlines(string str)
{
if (str != null)
{
// The token here needs to match the const in OverrideStdOutputs.ts.
// See the comment there for why we're doing this.
str = str.Replace("__ns_newline__", Environment.NewLine);
}
return str;
}
private void ConnectToInputOutputStreams()
{
var initializationIsCompleted = false;
_nodeProcess.OutputDataReceived += (sender, evt) =>
{
if (evt.Data == ConnectionEstablishedMessage && !initializationIsCompleted)
{
_connectionIsReadySource.SetResult(null);
initializationIsCompleted = true;
}
else if (evt.Data != null)
{
OnOutputDataReceived(UnencodeNewlines(evt.Data));
}
};
_nodeProcess.ErrorDataReceived += (sender, evt) =>
{
if (evt.Data != null)
{
if (_launchWithDebugging && IsDebuggerMessage(evt.Data))
{
OutputLogger.LogWarning(evt.Data);
}
else
{
OnErrorDataReceived(UnencodeNewlines(evt.Data));
}
}
};
_nodeProcess.BeginOutputReadLine();
_nodeProcess.BeginErrorReadLine();
}
private static bool IsDebuggerMessage(string message)
{
return message.StartsWith("Debugger attached", StringComparison.Ordinal) ||
message.StartsWith("Debugger listening ", StringComparison.Ordinal) ||
message.StartsWith("To start debugging", StringComparison.Ordinal) ||
message.Equals("Warning: This is an experimental feature and could change at any time.", StringComparison.Ordinal) ||
message.Equals("For help see https://nodejs.org/en/docs/inspector", StringComparison.Ordinal) ||
message.Contains("chrome-devtools:");
}
private FileSystemWatcher BeginFileWatcher(string rootDir)
{
if (_watchFileExtensions == null || _watchFileExtensions.Length == 0)
{
// Nothing to watch
return null;
}
var watcher = new FileSystemWatcher(rootDir)
{
IncludeSubdirectories = true,
NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.FileName | NotifyFilters.DirectoryName
};
watcher.Changed += OnFileChanged;
watcher.Created += OnFileChanged;
watcher.Deleted += OnFileChanged;
watcher.Renamed += OnFileRenamed;
watcher.EnableRaisingEvents = true;
return watcher;
}
private void OnFileChanged(object source, FileSystemEventArgs e)
{
if (IsFilenameBeingWatched(e.FullPath))
{
RestartDueToFileChange(e.FullPath);
}
}
private void OnFileRenamed(object source, RenamedEventArgs e)
{
if (IsFilenameBeingWatched(e.OldFullPath) || IsFilenameBeingWatched(e.FullPath))
{
RestartDueToFileChange(e.OldFullPath);
}
}
private bool IsFilenameBeingWatched(string fullPath)
{
if (string.IsNullOrEmpty(fullPath))
{
return false;
}
else
{
var actualExtension = Path.GetExtension(fullPath) ?? string.Empty;
return _watchFileExtensions.Any(actualExtension.Equals);
}
}
private void RestartDueToFileChange(string fullPath)
{
OutputLogger.LogInformation($"Node will restart because file changed: {fullPath}");
_nodeProcessNeedsRestart = true;
// There's no need to watch for any more changes, since we're already restarting, and if the
// restart takes some time (e.g., due to connection draining), we could end up getting duplicate
// notifications.
EnsureFileSystemWatcherIsDisposed();
}
/// <summary>
/// Implements the finalization part of the IDisposable pattern by calling Dispose(false).
/// </summary>
~OutOfProcessNodeInstance()
{
Dispose(false);
}
}
}

View File

@@ -1,54 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Represents the ability to invoke code in a Node.js environment. Although the underlying Node.js instance
/// might change over time (e.g., the process might be restarted), the <see cref="INodeServices"/> instance
/// will remain constant.
/// </summary>
public interface INodeServices : IDisposable
{
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root whose default CommonJS export is the function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeAsync<T>(string moduleName, params object[] args);
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root whose default CommonJS export is the function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeAsync<T>(CancellationToken cancellationToken, string moduleName, params object[] args);
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportedFunctionName">Specifies the CommonJS export to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeExportAsync<T>(string moduleName, string exportedFunctionName, params object[] args);
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportedFunctionName">Specifies the CommonJS export to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeExportAsync<T>(CancellationToken cancellationToken, string moduleName, string exportedFunctionName, params object[] args);
}
}

View File

@@ -1,24 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Description>Invoke Node.js modules at runtime in ASP.NET Core applications.</Description>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<None Remove="node_modules\**\*" />
<EmbeddedResource Include="Content\**\*" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Hosting.Abstractions" Version="$(MicrosoftAspNetCoreHostingAbstractionsPackageVersion)" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="$(MicrosoftExtensionsLoggingConsolePackageVersion)" />
<PackageReference Include="Newtonsoft.Json" Version="$(NewtonsoftJsonPackageVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish" Condition=" '$(IsCrossTargetingBuild)' != 'true' ">
<Exec Command="npm install" />
<Exec Command="node node_modules/webpack/bin/webpack.js" />
</Target>
</Project>

View File

@@ -1,165 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.NodeServices.HostingModels;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Default implementation of INodeServices. This is the primary API surface through which developers
/// make use of this package. It provides simple "InvokeAsync" methods that dispatch calls to the
/// correct Node instance, creating and destroying those instances as needed.
///
/// If a Node instance dies (or none was yet created), this class takes care of creating a new one.
/// If a Node instance signals that it needs to be restarted (e.g., because a file changed), then this
/// class will create a new instance and dispatch future calls to it, while keeping the old instance
/// alive for a defined period so that any in-flight RPC calls can complete. This latter feature is
/// analogous to the "connection draining" feature implemented by HTTP load balancers.
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.INodeServices" />
internal class NodeServicesImpl : INodeServices
{
private static TimeSpan ConnectionDrainingTimespan = TimeSpan.FromSeconds(15);
private Func<INodeInstance> _nodeInstanceFactory;
private INodeInstance _currentNodeInstance;
private object _currentNodeInstanceAccessLock = new object();
private Exception _instanceDelayedDisposalException;
internal NodeServicesImpl(Func<INodeInstance> nodeInstanceFactory)
{
_nodeInstanceFactory = nodeInstanceFactory;
}
public Task<T> InvokeAsync<T>(string moduleName, params object[] args)
{
return InvokeExportAsync<T>(moduleName, null, args);
}
public Task<T> InvokeAsync<T>(CancellationToken cancellationToken, string moduleName, params object[] args)
{
return InvokeExportAsync<T>(cancellationToken, moduleName, null, args);
}
public Task<T> InvokeExportAsync<T>(string moduleName, string exportedFunctionName, params object[] args)
{
return InvokeExportWithPossibleRetryAsync<T>(moduleName, exportedFunctionName, args, /* allowRetry */ true, CancellationToken.None);
}
public Task<T> InvokeExportAsync<T>(CancellationToken cancellationToken, string moduleName, string exportedFunctionName, params object[] args)
{
return InvokeExportWithPossibleRetryAsync<T>(moduleName, exportedFunctionName, args, /* allowRetry */ true, cancellationToken);
}
private async Task<T> InvokeExportWithPossibleRetryAsync<T>(string moduleName, string exportedFunctionName, object[] args, bool allowRetry, CancellationToken cancellationToken)
{
ThrowAnyOutstandingDelayedDisposalException();
var nodeInstance = GetOrCreateCurrentNodeInstance();
try
{
return await nodeInstance.InvokeExportAsync<T>(cancellationToken, moduleName, exportedFunctionName, args);
}
catch (NodeInvocationException ex)
{
// If the Node instance can't complete the invocation because it needs to restart (e.g., because the underlying
// Node process has exited, or a file it depends on has changed), then we make one attempt to restart transparently.
if (allowRetry && ex.NodeInstanceUnavailable)
{
// Perform the retry after clearing away the old instance
// Since we disposal is delayed even though the node instance is replaced immediately, this produces the
// "connection draining" feature whereby in-flight RPC calls are given a certain period to complete.
lock (_currentNodeInstanceAccessLock)
{
if (_currentNodeInstance == nodeInstance)
{
var disposalDelay = ex.AllowConnectionDraining ? ConnectionDrainingTimespan : TimeSpan.Zero;
DisposeNodeInstance(_currentNodeInstance, disposalDelay);
_currentNodeInstance = null;
}
}
// One the next call, don't allow retries, because we could get into an infinite retry loop, or a long retry
// loop that masks an underlying problem. A newly-created Node instance should be able to accept invocations,
// or something more serious must be wrong.
return await InvokeExportWithPossibleRetryAsync<T>(moduleName, exportedFunctionName, args, /* allowRetry */ false, cancellationToken);
}
else
{
throw;
}
}
}
public void Dispose()
{
lock (_currentNodeInstanceAccessLock)
{
if (_currentNodeInstance != null)
{
DisposeNodeInstance(_currentNodeInstance, delay: TimeSpan.Zero);
_currentNodeInstance = null;
}
}
}
private void DisposeNodeInstance(INodeInstance nodeInstance, TimeSpan delay)
{
if (delay == TimeSpan.Zero)
{
nodeInstance.Dispose();
}
else
{
Task.Run(async () => {
try
{
await Task.Delay(delay);
nodeInstance.Dispose();
}
catch(Exception ex)
{
// Nothing's waiting for the delayed disposal task, so any exceptions in it would
// by default just get ignored. To make these discoverable, capture them here so
// they can be rethrown to the next caller to InvokeExportAsync.
_instanceDelayedDisposalException = ex;
}
});
}
}
private void ThrowAnyOutstandingDelayedDisposalException()
{
if (_instanceDelayedDisposalException != null)
{
var ex = _instanceDelayedDisposalException;
_instanceDelayedDisposalException = null;
throw new AggregateException(
"A previous attempt to dispose a Node instance failed. See InnerException for details.",
ex);
}
}
private INodeInstance GetOrCreateCurrentNodeInstance()
{
var instance = _currentNodeInstance;
if (instance == null)
{
lock (_currentNodeInstanceAccessLock)
{
instance = _currentNodeInstance;
if (instance == null)
{
instance = _currentNodeInstance = CreateNewNodeInstance();
}
}
}
return instance;
}
private INodeInstance CreateNewNodeInstance()
{
return _nodeInstanceFactory();
}
}
}

View File

@@ -1,363 +0,0 @@
# Microsoft.AspNetCore.NodeServices
This NuGet package provides a fast and robust way to invoke Node.js code from a .NET application (typically ASP.NET Core web apps). You can use this whenever you want to use Node/NPM-supplied functionality at runtime in ASP.NET. For example,
* Executing arbitrary JavaScript
* Runtime integration with JavaScript build or packaging tools, e.g., transpiling code via Babel
* Using of NPM modules for image resizing, audio compression, language recognition, etc.
* Calling third-party services that supply Node-based APIs but don't yet ship native .NET ones
It is the underlying mechanism supporting the following packages:
* [`Microsoft.AspNetCore.SpaServices`](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices) - builds on NodeServices, adding functionality commonly used in Single Page Applications, such as server-side prerendering, webpack middleware, and integration between server-side and client-side routing.
* [`Microsoft.AspNetCore.AngularServices`](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.AngularServices) and [`Microsoft.AspNetCore.ReactServices`](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.ReactServices) - these build on `SpaServices`, adding helpers specific to Angular and React, such as cache priming and integrating server-side and client-side validation
### Requirements
* [Node.js](https://nodejs.org/en/)
* To test this is installed and can be found, run `node -v` on a command line
* Note: If you're deploying to an Azure web site, you don't need to do anything here - Node is already installed and available in the server environments
* [.NET](https://dot.net)
* For .NET Core (e.g., ASP.NET Core apps), you need at least 1.0 RC2
* For .NET Framework, you need at least version 4.5.1.
### Installation
For .NET Core apps:
* Add `Microsoft.AspNetCore.NodeServices` to the dependencies list in your `project.json` file
* Run `dotnet restore` (or if you use Visual Studio, just wait a moment - it will restore dependencies automatically)
For .NET Framework apps:
* `nuget install Microsoft.AspNetCore.NodeServices`
### Do you just want to build an ASP.NET Core app with Angular / React / Knockout / etc.?
In that case, you don't need to use NodeServices directly (or install it manually). You can either:
* **Recommended:** Use the `aspnetcore-spa` Yeoman generator to get a ready-to-go starting point using your choice of client-side framework. [Instructions here.](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/)
* Or set up your ASP.NET Core and client-side Angular/React/KO/etc. app manually, and then use the [`Microsoft.AspNetCore.SpaServices`](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices) package to add features like server-side prerendering or Webpack middleware. But really, at least try using the `aspnetcore-spa` generator first.
# Simple usage example
## For ASP.NET Core apps
.NET Core has a built-in dependency injection (DI) system. NodeServices is designed to work with this, so you don't have to manage the creation or disposal of instances.
Enable NodeServices in your application by first adding the following to your `ConfigureServices` method in `Startup.cs`:
```csharp
public void ConfigureServices(IServiceCollection services)
{
// ... all your existing configuration is here ...
// Enable Node Services
services.AddNodeServices();
}
```
Now you can receive an instance of `NodeServices` as an action method parameter to any MVC action, and then use it to make calls into Node.js code, e.g.:
```csharp
public async Task<IActionResult> MyAction([FromServices] INodeServices nodeServices)
{
var result = await nodeServices.InvokeAsync<int>("./addNumbers", 1, 2);
return Content("1 + 2 = " + result);
}
```
Of course, you also need to supply the Node.js code you want to invoke. Create a file called `addNumbers.js` at the root of your ASP.NET Core application, and add the following code:
```javascript
module.exports = function (callback, first, second) {
var result = first + second;
callback(/* error */ null, result);
};
```
As you can see, the exported JavaScript function will receive the arguments you pass from .NET (as long as they are JSON-serializable), along with a Node-style callback you can use to send back a result or error when you are ready.
When the `InvokeAsync<T>` method receives the result back from Node, the result will be JSON-deserialized to whatever generic type you specified when calling `InvokeAsync<T>` (e.g., above, that type is `int`). If `InvokeAsync<T>` receives an error from your Node code, it will throw an exception describing that error.
If you want to put `addNumber.js` inside a subfolder rather than the root of your app, then also amend the path in the `_nodeServices.Invoke` call to match that path.
## For non-ASP.NET apps
In other types of .NET Core app, where you don't have ASP.NET supplying an `IServiceCollection` to you, you'll need to instantiate your own DI container. For example, add a reference to the .NET package `Microsoft.Extensions.DependencyInjection`, and then you can construct an `IServiceCollection`, then register NodeServices as usual:
```csharp
var services = new ServiceCollection();
services.AddNodeServices(options => {
// Set any properties that you want on 'options' here
});
```
Now you can ask it to supply the shared `INodeServices` instance:
```csharp
var serviceProvider = services.BuildServiceProvider();
var nodeServices = serviceProvider.GetRequiredService<INodeServices>();
```
Or, if you want to obtain a separate (non-shared) `INodeServices` instance:
```csharp
var options = new NodeServicesOptions(serviceProvider) { /* Assign/override any other options here */ };
var nodeServices = NodeServicesFactory.CreateNodeServices(options);
```
Besides this, the usage is the same as described for ASP.NET above, so you can now call `nodeServices.InvokeAsync<T>(...)` etc.
You can dispose the `nodeServices` object whenever you are done with it (and it will shut down the associated Node.js instance), but because these instances are expensive to create, you should whenever possible retain and reuse instances. Don't dispose the shared instance returned from `serviceProvider.GetRequiredService` (except perhaps if you know your application is shutting down, although .NET's finalizers will dispose it anyway if the shutdown is graceful).
NodeServices instances are thread-safe - you can call `InvokeAsync<T>` simultaneously from multiple threads. Also, they are smart enough to detect if the associated Node instance has died and will automatically start a new Node instance if needed.
# API Reference
### AddNodeServices
**Signatures:**
```csharp
AddNodeServices()
AddNodeServices(Action<NodeServicesOptions> setupAction)
```
This is an extension method on `IServiceCollection`. It registers NodeServices with ASP.NET Core's DI system. Typically you should call this from the `ConfigureServices` method in your `Startup.cs` file.
To access this extension method, you'll need to add the following namespace import to the top of your file, if it isn't already there:
```csharp
using Microsoft.Extensions.DependencyInjection;
```
**Examples**
Using default options:
```csharp
services.AddNodeServices();
```
Or, specifying options:
```csharp
services.AddNodeServices(options =>
{
options.WatchFileExtensions = new[] { ".coffee", ".sass" };
// ... etc. - see other properties below
});
```
**Parameters**
* `setupAction` - type: `Action<NodeServicesOptions>`
* Optional. If not specified, defaults will be used.
* Properties on `NodeServicesOptions`:
* `HostingModel` - an `NodeHostingModel` enum value. See: [hosting models](#hosting-models)
* `ProjectPath` - if specified, controls the working directory used when launching Node instances. This affects, for example, the location that `require` statements resolve relative paths against. If not specified, your application root directory is used.
* `WatchFileExtensions` - if specified, the launched Node instance will watch for changes to any files with these extensions, and auto-restarts when any are changed. The default array includes `.js`, `.jsx`, `.ts`, `.tsx`, `.json`, and `.html`.
**Return type**: None. But once you've done this, you can get `NodeServices` instances out of ASP.NET's DI system. Typically it will be a singleton instance.
### CreateNodeServices
**Signature:**
```csharp
CreateNodeServices(NodeServicesOptions options)
```
Supplies a new (non-shared) instance of `NodeServices`.
**Example**
```csharp
var options = new NodeServicesOptions(serviceProvider); // Obtains default options from DI config
var nodeServices = NodeServicesFactory.CreateNodeServices(options);
```
**Parameters**
* `options` - type: `NodeServicesOptions`.
* Configures the returned `NodeServices` instance.
* Properties:
* `HostingModel` - an `NodeHostingModel` enum value. See: [hosting models](#hosting-models)
* `ProjectPath` - if specified, controls the working directory used when launching Node instances. This affects, for example, the location that `require` statements resolve relative paths against. If not specified, your application root directory is used.
* `WatchFileExtensions` - if specified, the launched Node instance will watch for changes to any files with these extension, and auto-restarts when any are changed.
**Return type:** `NodeServices`
If you create a `NodeServices` instance this way, you can also dispose it (call `nodeServiceInstance.Dispose();`) and it will shut down the associated Node instance. But because these instances are expensive to create, you should whenever possible retain and reuse your `NodeServices` object. They are thread-safe - you can call `nodeServiceInstance.InvokeAsync<T>(...)` simultaneously from multiple threads.
### InvokeAsync&lt;T&gt;
**Signature:**
```csharp
InvokeAsync<T>(string moduleName, params object[] args)
```
Asynchronously calls a JavaScript function and returns the result, or throws an exception if the result was an error.
**Example 1: Getting a JSON-serializable object from Node (the most common use case)**
```csharp
var result = await myNodeServicesInstance.InvokeAsync<TranspilerResult>(
"./Node/transpile",
pathOfSomeFileToBeTranspiled);
```
... where `TranspilerResult` might be defined as follows:
```csharp
public class TranspilerResult
{
public string Code { get; set; }
public string[] Warnings { get; set; }
}
```
... and the corresponding JavaScript module (in `Node/transpile.js`) could be implemented as follows:
```javascript
module.exports = function (callback, filePath) {
// Invoke some external transpiler (e.g., an NPM module) then:
callback(null, {
code: theTranspiledCodeAsAString,
warnings: someArrayOfStrings
});
};
```
**Example 2: Getting a stream of binary data from Node**
```csharp
var imageStream = await myNodeServicesInstance.InvokeAsync<Stream>(
"./Node/resizeImage",
fullImagePath,
width,
height);
// In an MVC action method, you can pipe the result to the response as follows
return File(imageStream, someContentType);
```
... where the corresponding JavaScript module (in `Node/resizeImage.js`) could be implemented as follows:
```javascript
var sharp = require('sharp'); // A popular image manipulation package on NPM
module.exports = function(result, physicalPath, maxWidth, maxHeight) {
// Invoke the 'sharp' NPM module, and have it pipe the resulting image data back to .NET
sharp(physicalPath)
.resize(maxWidth || null, maxHeight || null)
.pipe(result.stream);
}
```
There's a working image resizing example following this approach [here](https://github.com/aspnet/JavaScriptServices/tree/dev/samples/misc/NodeServicesExamples) - see the [C# code](https://github.com/aspnet/JavaScriptServices/blob/dev/samples/misc/NodeServicesExamples/Controllers/ResizeImage.cs) and the [JavaScript code](https://github.com/aspnet/JavaScriptServices/blob/dev/samples/misc/NodeServicesExamples/Node/resizeImage.js).
**Parameters**
* `moduleName` - type: `string`
* The name of a JavaScript module that Node.js must be able to resolve by calling `require(moduleName)`. This can be a relative path such as `"./Some/Directory/mymodule"`. If you don't specify the `.js` filename extension, Node.js will infer it anyway.
* `params`
* Any set of JSON-serializable objects you want to pass to the exported JavaScript function
**Return type:** `T`, which must be:
* A JSON-serializable .NET type, if your JavaScript code uses the `callback(error, result)` pattern to return an object, as in example 1 above
* Or, the type `System.IO.Stream`, if your JavaScript code writes data to the `result.stream` object (which is a [Node `Duplex` stream](https://nodejs.org/api/stream.html#stream_class_stream_duplex)), as in example 2 above
### InvokeExportAsync&lt;T&gt;
**Signature**
```csharp
InvokeExportAsync<T>(string moduleName, string exportName, params object[] args)
```
This is exactly the same as `InvokeAsync<T>`, except that it also takes an `exportName` parameter. You can use this if you want your JavaScript module to export more than one function.
**Example**
```csharp
var someString = await myNodeServicesInstance.InvokeExportAsync<string>(
"./Node/myNodeApis",
"getMeAString");
var someStringInFrench = await myNodeServicesInstance.InvokeExportAsync<string>(
"./Node/myNodeApis",
"convertLanguage"
someString,
"fr-FR");
```
... where the corresponding JavaScript module (in `Node/myNodeApis.js`) could be implemented as follows:
```javascript
module.exports = {
getMeAString: function (callback) {
callback(null, 'Here is a string');
},
convertLanguage: function (callback, sourceString, targetLanguage) {
// Implementation detail left as an exercise for the reader
doMachineTranslation(sourceString, targetLanguage, function(error, result) {
callback(error, result);
});
}
};
```
**Parameters, return type, etc.** For all other details, see the docs for [`InvokeAsync<T>`](#invokeasynct)
## Hosting models
NodeServices has a pluggable hosting/transport mechanism, because it is an abstraction over various possible ways to invoke Node.js from .NET. This allows more high-level facilities (e.g., for Angular prerendering) to be agnostic to the details of launching Node and communicating with it - those high-level facilities can just trust that *somehow* we can invoke code in Node for them.
Using this abstraction, we could run Node inside the .NET process, in a separate process on the same machine, or even on a different machine altogether. At the time of writing, all the built-in hosting mechanisms work by launching Node as a separate process on the same machine as your .NET code.
**What about Edge.js?**
[Edge.js](http://tjanczuk.github.io/edge/#/) hosts Node.js inside a .NET process, or vice-versa, and lets you interoperate between the two.
NodeServices is not meant to compete with Edge.js. Instead, NodeServices is an abstraction over all possible ways to invoke Node from .NET. Eventually we may offer an in-process Node hosting mechanism via Edge.js, without you needing to change your higher-level code. This can be done when Edge.js supports hosting Node in cross-platform .NET Core processes ([discussion](https://github.com/tjanczuk/edge/issues/279)).
**What about VroomJS?**
People have asked about using [VroomJS](https://github.com/fogzot/vroomjs) as a hosting mechanism. We don't currently plan to implement that, because Vroom only supplies a V8 runtime environment, not a complete Node environment. The difference is that, with a true Node environment, *all* NPM modules and Node code will work exactly as expected, whereas in a Vroom environment, code will only work if it doesn't use any Node primitives, which rules out large portions of the NPM landscape.
### Built-in hosting models
Normally, you can just use the default hosting model, and not worry about it. But if you have some special requirements, you can write your own hosting model, or reference a package that supplies one.
For example, you could use the 'socket' hosting model. It performs RPC between .NET and Node.js using a fast, low-level binary channel rather than the default HTTP transport. To do this, first install the NuGet package `Microsoft.AspNetCore.NodeServices.Sockets`. Then, at the top of your `Startup.cs` file, add:
```csharp
using Microsoft.AspNetCore.NodeServices.Sockets;
```
...then in your `Startup.cs` file's `ConfigureServices` method, you can configure:
```csharp
services.AddNodeServices(options => {
options.UseSocketHosting();
});
```
Now when you run your application, it will use the socket-based hosting and transport mechanism. In the past, the socket transport was faster than HTTP, but since .NET Core 1.1 improved the performance of `HttpClient` there isn't really any speed difference any more, so there's no longer any significant advantage to using `Microsoft.AspNetCore.NodeServices.Sockets`.
### Custom hosting models
If you implement a custom hosting model (by implementing `INodeInstance`), then you can cause it to be used by populating `NodeInstanceFactory` on your options:
```csharp
services.AddNodeServices(options =>
{
options.NodeInstanceFactory = () => new MyCustomNodeInstance();
});
```

View File

@@ -1,94 +0,0 @@
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
import './Util/PatchModuleResolutionLStat';
import './Util/OverrideStdOutputs';
import * as http from 'http';
import * as path from 'path';
import { parseArgs } from './Util/ArgsUtil';
import { exitWhenParentExits } from './Util/ExitWhenParentExits';
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
const dynamicRequire: (name: string) => any = eval('require');
const server = http.createServer((req, res) => {
readRequestBodyAsJson(req, bodyJson => {
let hasSentResult = false;
const callback = (errorValue, successValue) => {
if (!hasSentResult) {
hasSentResult = true;
if (errorValue) {
respondWithError(res, errorValue);
} else if (typeof successValue !== 'string') {
// Arbitrary object/number/etc - JSON-serialize it
let successValueJson: string;
try {
successValueJson = JSON.stringify(successValue);
} catch (ex) {
// JSON serialization error - pass it back to .NET
respondWithError(res, ex);
return;
}
res.setHeader('Content-Type', 'application/json');
res.end(successValueJson);
} else {
// String - can bypass JSON-serialization altogether
res.setHeader('Content-Type', 'text/plain');
res.end(successValue);
}
}
};
// Support streamed responses
Object.defineProperty(callback, 'stream', {
enumerable: true,
get: function() {
if (!hasSentResult) {
hasSentResult = true;
res.setHeader('Content-Type', 'application/octet-stream');
}
return res;
}
});
try {
const resolvedPath = path.resolve(process.cwd(), bodyJson.moduleName);
const invokedModule = dynamicRequire(resolvedPath);
const func = bodyJson.exportedFunctionName ? invokedModule[bodyJson.exportedFunctionName] : invokedModule;
if (!func) {
throw new Error('The module "' + resolvedPath + '" has no export named "' + bodyJson.exportedFunctionName + '"');
}
func.apply(null, [callback].concat(bodyJson.args));
} catch (synchronousException) {
callback(synchronousException, null);
}
});
});
const parsedArgs = parseArgs(process.argv);
const requestedPortOrZero = parsedArgs.port || 0; // 0 means 'let the OS decide'
server.listen(requestedPortOrZero, 'localhost', function () {
// Signal to HttpNodeHost which port it should make its HTTP connections on
console.log('[Microsoft.AspNetCore.NodeServices.HttpNodeHost:Listening on port ' + server.address().port + '\]');
// Signal to the NodeServices base class that we're ready to accept invocations
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
exitWhenParentExits(parseInt(parsedArgs.parentPid), /* ignoreSigint */ true);
function readRequestBodyAsJson(request, callback) {
let requestBodyAsString = '';
request.on('data', chunk => { requestBodyAsString += chunk; });
request.on('end', () => { callback(JSON.parse(requestBodyAsString)); });
}
function respondWithError(res: http.ServerResponse, errorValue: any) {
res.statusCode = 500;
res.end(JSON.stringify({
errorMessage: errorValue.message || errorValue,
errorDetails: errorValue.stack || null
}));
}

View File

@@ -1,18 +0,0 @@
export function parseArgs(args: string[]): any {
// Very simplistic parsing which is sufficient for the cases needed. We don't want to bring in any external
// dependencies (such as an args-parsing library) to this file.
const result = {};
let currentKey = null;
args.forEach(arg => {
if (arg.indexOf('--') === 0) {
const argName = arg.substring(2);
result[argName] = undefined;
currentKey = argName;
} else if (currentKey) {
result[currentKey] = arg;
currentKey = null;
}
});
return result;
}

View File

@@ -1,81 +0,0 @@
/*
In general, we want the Node child processes to be terminated as soon as the parent .NET processes exit,
because we have no further use for them. If the .NET process shuts down gracefully, it will run its
finalizers, one of which (in OutOfProcessNodeInstance.cs) will kill its associated Node process immediately.
But if the .NET process is terminated forcefully (e.g., on Linux/OSX with 'kill -9'), then it won't have
any opportunity to shut down its child processes, and by default they will keep running. In this case, it's
up to the child process to detect this has happened and terminate itself.
There are many possible approaches to detecting when a parent process has exited, most of which behave
differently between Windows and Linux/OS X:
- On Windows, the parent process can mark its child as being a 'job' that should auto-terminate when
the parent does (http://stackoverflow.com/a/4657392). Not cross-platform.
- The child Node process can get a callback when the parent disconnects (process.on('disconnect', ...)).
But despite http://stackoverflow.com/a/16487966, no callback fires in any case I've tested (Windows / OS X).
- The child Node process can get a callback when its stdin/stdout are disconnected, as described at
http://stackoverflow.com/a/15693934. This works well on OS X, but calling stdout.resume() on Windows
causes the process to terminate prematurely.
- I don't know why, but on Windows, it's enough to invoke process.stdin.resume(). For some reason this causes
the child Node process to exit as soon as the parent one does, but I don't see this documented anywhere.
- You can poll to see if the parent process, or your stdin/stdout connection to it, is gone
- You can directly pass a parent process PID to the child, and then have the child poll to see if it's
still running (e.g., using process.kill(pid, 0), which doesn't kill it but just tests whether it exists,
as per https://nodejs.org/api/process.html#process_process_kill_pid_signal)
- Or, on each poll, you can try writing to process.stdout. If the parent has died, then this will throw.
However I don't see this documented anywhere. It would be nice if you could just poll for whether or not
process.stdout is still connected (without actually writing to it) but I haven't found any property whose
value changes until you actually try to write to it.
Of these, the only cross-platform approach that is actually documented as a valid strategy is simply polling
to check whether the parent PID is still running. So that's what we do here.
*/
const pollIntervalMs = 1000;
export function exitWhenParentExits(parentPid: number, ignoreSigint: boolean) {
setInterval(() => {
if (!processExists(parentPid)) {
// Can't log anything at this point, because out stdout was connected to the parent,
// but the parent is gone.
process.exit();
}
}, pollIntervalMs);
if (ignoreSigint) {
// Pressing ctrl+c in the terminal sends a SIGINT to all processes in the foreground process tree.
// By default, the Node process would then exit before the .NET process, because ASP.NET implements
// a delayed shutdown to allow ongoing requests to complete.
//
// This is problematic, because if Node exits first, the CopyToAsync code in ConditionalProxyMiddleware
// will experience a read fault, and logs a huge load of errors. Fortunately, since the Node process is
// already set up to shut itself down if it detects the .NET process is terminated, all we have to do is
// ignore the SIGINT. The Node process will then terminate automatically after the .NET process does.
//
// A better solution would be to have WebpackDevMiddleware listen for SIGINT and gracefully close any
// ongoing EventSource connections before letting the Node process exit, independently of the .NET
// process exiting. However, doing this well in general is very nontrivial (see all the discussion at
// https://github.com/nodejs/node/issues/2642).
process.on('SIGINT', () => {
console.log('Received SIGINT. Waiting for .NET process to exit...');
});
}
}
function processExists(pid: number) {
try {
// Sending signal 0 - on all platforms - tests whether the process exists. As long as it doesn't
// throw, that means it does exist.
process.kill(pid, 0);
return true;
} catch (ex) {
// If the reason for the error is that we don't have permission to ask about this process,
// report that as a separate problem.
if (ex.code === 'EPERM') {
throw new Error(`Attempted to check whether process ${pid} was running, but got a permissions error.`);
}
return false;
}
}

View File

@@ -1,37 +0,0 @@
// When Node writes to stdout/strerr, we capture that and convert the lines into calls on the
// active .NET ILogger. But by default, stdout/stderr don't have any way of distinguishing
// linebreaks inside log messages from the linebreaks that delimit separate log messages,
// so multiline strings will end up being written to the ILogger as multiple independent
// log messages. This makes them very hard to make sense of, especially when they represent
// something like stack traces.
//
// To fix this, we intercept stdout/stderr writes, and replace internal linebreaks with a
// marker token. When .NET receives the lines, it converts the marker tokens back to regular
// linebreaks within the logged messages.
//
// Note that it's better to do the interception at the stdout/stderr level, rather than at
// the console.log/console.error (etc.) level, because this takes place after any native
// message formatting has taken place (e.g., inserting values for % placeholders).
const findInternalNewlinesRegex = /\n(?!$)/g;
const encodedNewline = '__ns_newline__';
encodeNewlinesWrittenToStream(process.stdout);
encodeNewlinesWrittenToStream(process.stderr);
function encodeNewlinesWrittenToStream(outputStream: NodeJS.WritableStream) {
const origWriteFunction = outputStream.write;
outputStream.write = <any>function (value: any) {
// Only interfere with the write if it's definitely a string
if (typeof value === 'string') {
const argsClone = Array.prototype.slice.call(arguments, 0);
argsClone[0] = encodeNewlinesInString(value);
origWriteFunction.apply(this, argsClone);
} else {
origWriteFunction.apply(this, arguments);
}
};
}
function encodeNewlinesInString(str: string): string {
return str.replace(findInternalNewlinesRegex, encodedNewline);
}

View File

@@ -1,48 +0,0 @@
import * as path from 'path';
const startsWith = (str: string, prefix: string) => str.substring(0, prefix.length) === prefix;
const appRootDir = process.cwd();
function patchedLStat(pathToStatLong: string, fsReqWrap?: any) {
try {
// If the lstat completes without errors, we don't modify its behavior at all
return origLStat.apply(this, arguments);
} catch(ex) {
const shouldOverrideError =
startsWith(ex.message, 'EPERM') // It's a permissions error
&& typeof appRootDirLong === 'string'
&& startsWith(appRootDirLong, pathToStatLong) // ... for an ancestor directory
&& ex.stack.indexOf('Object.realpathSync ') >= 0; // ... during symlink resolution
if (shouldOverrideError) {
// Fake the result to give the same result as an 'lstat' on the app root dir.
// This stops Node failing to load modules just because it doesn't know whether
// ancestor directories are symlinks or not. If there's a genuine file
// permissions issue, it will still surface later when Node actually
// tries to read the file.
return origLStat.call(this, appRootDir, fsReqWrap);
} else {
// In any other case, preserve the original error
throw ex;
}
}
};
// It's only necessary to apply this workaround on Windows
let appRootDirLong: string = null;
let origLStat: Function = null;
if (/^win/.test(process.platform)) {
try {
// Get the app's root dir in Node's internal "long" format (e.g., \\?\C:\dir\subdir)
appRootDirLong = (path as any)._makeLong(appRootDir);
// Actually apply the patch, being as defensive as possible
const bindingFs = (process as any).binding('fs');
origLStat = bindingFs.lstat;
if (typeof origLStat === 'function') {
bindingFs.lstat = patchedLStat;
}
} catch(ex) {
// If some future version of Node throws (e.g., to prevent use of process.binding()),
// don't apply the patch, but still let the application run.
}
}

View File

@@ -1,11 +0,0 @@
{
"compilerOptions": {
"target": "es3",
"module": "commonjs",
"moduleResolution": "node",
"types": ["node"]
},
"exclude": [
"node_modules"
]
}

View File

@@ -1,30 +0,0 @@
using System;
using System.IO;
using System.Reflection;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Contains methods for reading embedded resources.
/// </summary>
public static class EmbeddedResourceReader
{
/// <summary>
/// Reads the specified embedded resource from a given assembly.
/// </summary>
/// <param name="assemblyContainingType">Any <see cref="Type"/> in the assembly whose resource is to be read.</param>
/// <param name="path">The path of the resource to be read.</param>
/// <returns>The contents of the resource.</returns>
public static string Read(Type assemblyContainingType, string path)
{
var asm = assemblyContainingType.GetTypeInfo().Assembly;
var embeddedResourceName = asm.GetName().Name + path.Replace("/", ".");
using (var stream = asm.GetManifestResourceStream(embeddedResourceName))
using (var sr = new StreamReader(stream))
{
return sr.ReadToEnd();
}
}
}
}

View File

@@ -1,82 +0,0 @@
using System;
using System.IO;
using System.Threading;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Makes it easier to pass script files to Node in a way that's sure to clean up after the process exits.
/// </summary>
public sealed class StringAsTempFile : IDisposable
{
private bool _disposedValue;
private bool _hasDeletedTempFile;
private object _fileDeletionLock = new object();
private IDisposable _applicationLifetimeRegistration;
/// <summary>
/// Create a new instance of <see cref="StringAsTempFile"/>.
/// </summary>
/// <param name="content">The contents of the temporary file to be created.</param>
/// <param name="applicationStoppingToken">A token that indicates when the host application is stopping.</param>
public StringAsTempFile(string content, CancellationToken applicationStoppingToken)
{
FileName = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
File.WriteAllText(FileName, content);
// Because .NET finalizers don't reliably run when the process is terminating, also
// add event handlers for other shutdown scenarios.
_applicationLifetimeRegistration = applicationStoppingToken.Register(EnsureTempFileDeleted);
}
/// <summary>
/// Specifies the filename of the temporary file.
/// </summary>
public string FileName { get; }
/// <summary>
/// Disposes the instance and deletes the associated temporary file.
/// </summary>
public void Dispose()
{
DisposeImpl(true);
GC.SuppressFinalize(this);
}
private void DisposeImpl(bool disposing)
{
if (!_disposedValue)
{
if (disposing)
{
// Dispose managed state
_applicationLifetimeRegistration.Dispose();
}
EnsureTempFileDeleted();
_disposedValue = true;
}
}
private void EnsureTempFileDeleted()
{
lock (_fileDeletionLock)
{
if (!_hasDeletedTempFile)
{
File.Delete(FileName);
_hasDeletedTempFile = true;
}
}
}
/// <summary>
/// Implements the finalization part of the IDisposable pattern by calling Dispose(false).
/// </summary>
~StringAsTempFile()
{
DisposeImpl(false);
}
}
}

View File

@@ -1,30 +0,0 @@
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices
{
internal static class TaskExtensions
{
public static Task OrThrowOnCancellation(this Task task, CancellationToken cancellationToken)
{
return task.IsCompleted
? task // If the task is already completed, no need to wrap it in a further layer of task
: task.ContinueWith(
_ => {}, // If the task completes, allow execution to continue
cancellationToken,
TaskContinuationOptions.ExecuteSynchronously,
TaskScheduler.Default);
}
public static Task<T> OrThrowOnCancellation<T>(this Task<T> task, CancellationToken cancellationToken)
{
return task.IsCompleted
? task // If the task is already completed, no need to wrap it in a further layer of task
: task.ContinueWith(
t => t.Result, // If the task completes, pass through its result
cancellationToken,
TaskContinuationOptions.ExecuteSynchronously,
TaskScheduler.Default);
}
}
}

View File

@@ -1,18 +0,0 @@
{
"name": "nodeservices",
"version": "1.0.0",
"description": "This is not really an NPM package and will not be published. This file exists only to reference compilation tools.",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "./node_modules/.bin/webpack"
},
"author": "Microsoft",
"license": "Apache-2.0",
"devDependencies": {
"@types/node": "^6.0.42",
"ts-loader": "^0.8.2",
"typescript": "^2.0.0",
"webpack": "^1.13.1"
}
}

View File

@@ -1,20 +0,0 @@
module.exports = {
target: 'node',
externals: ['fs', 'net', 'events', 'readline', 'stream'],
resolve: {
extensions: [ '.ts' ]
},
module: {
loaders: [
{ test: /\.ts$/, loader: 'ts-loader' },
]
},
entry: {
'entrypoint-http': ['./TypeScript/HttpNodeInstanceEntryPoint']
},
output: {
libraryTarget: 'commonjs',
path: './Content/Node',
filename: '[name].js'
}
};

View File

@@ -1,2 +0,0 @@
/bin/
/node_modules/

View File

@@ -1,178 +0,0 @@
(function(e, a) { for(var i in a) e[i] = a[i]; }(exports, /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(1);
/***/ },
/* 1 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var path = __webpack_require__(2);
// Separate declaration and export just to add type checking on function signature
exports.renderToString = renderToStringImpl;
// This function is invoked by .NET code (via NodeServices). Its job is to hand off execution to the application's
// prerendering boot function. It can operate in two modes:
// [1] Legacy mode
// This is for backward compatibility with projects created with templates older than the generator version 0.6.0.
// In this mode, we don't really do anything here - we just load the 'aspnet-prerendering' NPM module (which must
// exist in node_modules, and must be v1.x (not v2+)), and pass through all the parameters to it. Code in
// 'aspnet-prerendering' v1.x will locate the boot function and invoke it.
// The drawback to this mode is that, for it to work, you have to deploy node_modules to production.
// [2] Current mode
// This is for projects created with the Yeoman generator 0.6.0+ (or projects manually updated). In this mode,
// we don't invoke 'require' at runtime at all. All our dependencies are bundled into the NuGet package, so you
// don't have to deploy node_modules to production.
// To determine whether we're in mode [1] or [2], the code locates your prerendering boot function, and checks whether
// a certain flag is attached to the function instance.
function renderToStringImpl(callback, applicationBasePath, bootModule, absoluteRequestUrl, requestPathAndQuery, customDataParameter, overrideTimeoutMilliseconds) {
try {
var forceLegacy = isLegacyAspNetPrerendering();
var renderToStringFunc = !forceLegacy && findRenderToStringFunc(applicationBasePath, bootModule);
var isNotLegacyMode = renderToStringFunc && renderToStringFunc['isServerRenderer'];
if (isNotLegacyMode) {
// Current (non-legacy) mode - we invoke the exported function directly (instead of going through aspnet-prerendering)
// It's type-safe to just apply the incoming args to this function, because we already type-checked that it's a RenderToStringFunc,
// just like renderToStringImpl itself is.
renderToStringFunc.apply(null, arguments);
}
else {
// Legacy mode - just hand off execution to 'aspnet-prerendering' v1.x, which must exist in node_modules at runtime
var aspNetPrerenderingV1RenderToString = __webpack_require__(3).renderToString;
if (aspNetPrerenderingV1RenderToString) {
aspNetPrerenderingV1RenderToString(callback, applicationBasePath, bootModule, absoluteRequestUrl, requestPathAndQuery, customDataParameter, overrideTimeoutMilliseconds);
}
else {
callback('If you use aspnet-prerendering >= 2.0.0, you must update your server-side boot module to call createServerRenderer. '
+ 'Either update your boot module code, or revert to aspnet-prerendering version 1.x');
}
}
}
catch (ex) {
// Make sure loading errors are reported back to the .NET part of the app
callback('Prerendering failed because of error: '
+ ex.stack
+ '\nCurrent directory is: '
+ process.cwd());
}
}
;
function findBootModule(applicationBasePath, bootModule) {
var bootModuleNameFullPath = path.resolve(applicationBasePath, bootModule.moduleName);
if (bootModule.webpackConfig) {
// If you're using asp-prerender-webpack-config, you're definitely in legacy mode
return null;
}
else {
return require(bootModuleNameFullPath);
}
}
function findRenderToStringFunc(applicationBasePath, bootModule) {
// First try to load the module
var foundBootModule = findBootModule(applicationBasePath, bootModule);
if (foundBootModule === null) {
return null; // Must be legacy mode
}
// Now try to pick out the function they want us to invoke
var renderToStringFunc;
if (bootModule.exportName) {
// Explicitly-named export
renderToStringFunc = foundBootModule[bootModule.exportName];
}
else if (typeof foundBootModule !== 'function') {
// TypeScript-style default export
renderToStringFunc = foundBootModule.default;
}
else {
// Native default export
renderToStringFunc = foundBootModule;
}
// Validate the result
if (typeof renderToStringFunc !== 'function') {
if (bootModule.exportName) {
throw new Error("The module at " + bootModule.moduleName + " has no function export named " + bootModule.exportName + ".");
}
else {
throw new Error("The module at " + bootModule.moduleName + " does not export a default function, and you have not specified which export to invoke.");
}
}
return renderToStringFunc;
}
function isLegacyAspNetPrerendering() {
var version = getAspNetPrerenderingPackageVersion();
return version && /^1\./.test(version);
}
function getAspNetPrerenderingPackageVersion() {
try {
var packageEntryPoint = require.resolve('aspnet-prerendering');
var packageDir = path.dirname(packageEntryPoint);
var packageJsonPath = path.join(packageDir, 'package.json');
var packageJson = require(packageJsonPath);
return packageJson.version.toString();
}
catch (ex) {
// Implies aspnet-prerendering isn't in node_modules at all (or node_modules itself doesn't exist,
// which will be the case in production based on latest templates).
return null;
}
}
/***/ },
/* 2 */
/***/ function(module, exports) {
module.exports = require("path");
/***/ },
/* 3 */
/***/ function(module, exports) {
module.exports = require("aspnet-prerendering");
/***/ }
/******/ ])));

View File

@@ -1,86 +0,0 @@
(function(e, a) { for(var i in a) e[i] = a[i]; }(exports, /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(4);
/***/ },
/* 1 */,
/* 2 */,
/* 3 */,
/* 4 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
// Pass through the invocation to the 'aspnet-webpack' package, verifying that it can be loaded
function createWebpackDevServer(callback) {
var aspNetWebpack;
try {
aspNetWebpack = __webpack_require__(5);
}
catch (ex) {
// Developers sometimes have trouble with badly-configured Node installations, where it's unable
// to find node_modules. Or they accidentally fail to deploy node_modules, or even to run 'npm install'.
// Make sure such errors are reported back to the .NET part of the app.
callback('Webpack dev middleware failed because of an error while loading \'aspnet-webpack\'. Error was: '
+ ex.stack
+ '\nCurrent directory is: '
+ process.cwd());
return;
}
return aspNetWebpack.createWebpackDevServer.apply(this, arguments);
}
exports.createWebpackDevServer = createWebpackDevServer;
/***/ },
/* 5 */
/***/ function(module, exports) {
module.exports = require("aspnet-webpack");
/***/ }
/******/ ])));

View File

@@ -1,27 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Description>Helpers for building single-page applications on ASP.NET MVC Core.</Description>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<None Remove="node_modules\**\*" />
<EmbeddedResource Include="Content\**\*" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Mvc.TagHelpers" Version="$(MicrosoftAspNetCoreMvcTagHelpersPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.ViewFeatures" Version="$(MicrosoftAspNetCoreMvcViewFeaturesPackageVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish" Condition=" '$(IsCrossTargetingBuild)' != 'true' ">
<Exec Command="npm install" />
<Exec Command="node node_modules/webpack/bin/webpack.js" />
</Target>
</Project>

View File

@@ -1,49 +0,0 @@
using System.Threading;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.NodeServices;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.SpaServices.Prerendering
{
/// <summary>
/// Default implementation of a DI service that provides convenient access to
/// server-side prerendering APIs. This is an alternative to prerendering via
/// the asp-prerender-module tag helper.
/// </summary>
internal class DefaultSpaPrerenderer : ISpaPrerenderer
{
private readonly string _applicationBasePath;
private readonly CancellationToken _applicationStoppingToken;
private readonly IHttpContextAccessor _httpContextAccessor;
private readonly INodeServices _nodeServices;
public DefaultSpaPrerenderer(
INodeServices nodeServices,
IApplicationLifetime applicationLifetime,
IHostingEnvironment hostingEnvironment,
IHttpContextAccessor httpContextAccessor)
{
_applicationBasePath = hostingEnvironment.ContentRootPath;
_applicationStoppingToken = applicationLifetime.ApplicationStopping;
_httpContextAccessor = httpContextAccessor;
_nodeServices = nodeServices;
}
public Task<RenderToStringResult> RenderToString(
string moduleName,
string exportName = null,
object customDataParameter = null,
int timeoutMilliseconds = default(int))
{
return Prerenderer.RenderToString(
_applicationBasePath,
_nodeServices,
_applicationStoppingToken,
new JavaScriptModuleExport(moduleName) { ExportName = exportName },
_httpContextAccessor.HttpContext,
customDataParameter,
timeoutMilliseconds);
}
}
}

View File

@@ -1,28 +0,0 @@
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.SpaServices.Prerendering
{
/// <summary>
/// Represents a service that can perform server-side prerendering for
/// JavaScript-based Single Page Applications. This is an alternative
/// to using the 'asp-prerender-module' tag helper.
/// </summary>
public interface ISpaPrerenderer
{
/// <summary>
/// Invokes JavaScript code to perform server-side prerendering for a
/// Single-Page Application. This is an alternative to using the
/// 'asp-prerender-module' tag helper.
/// </summary>
/// <param name="moduleName">The JavaScript module that exports a prerendering function.</param>
/// <param name="exportName">The name of the export from the JavaScript module, if it is not the default export.</param>
/// <param name="customDataParameter">An optional JSON-serializable object to pass to the JavaScript prerendering function.</param>
/// <param name="timeoutMilliseconds">If specified, the prerendering task will time out after this duration if not already completed.</param>
/// <returns></returns>
Task<RenderToStringResult> RenderToString(
string moduleName,
string exportName = null,
object customDataParameter = null,
int timeoutMilliseconds = default(int));
}
}

View File

@@ -1,30 +0,0 @@
using System;
namespace Microsoft.AspNetCore.SpaServices.Prerendering
{
/// <summary>
/// Describes how to find the JavaScript code that performs prerendering.
/// </summary>
public class JavaScriptModuleExport
{
/// <summary>
/// Creates a new instance of <see cref="JavaScriptModuleExport"/>.
/// </summary>
/// <param name="moduleName">The path to the JavaScript module containing prerendering code.</param>
public JavaScriptModuleExport(string moduleName)
{
ModuleName = moduleName;
}
/// <summary>
/// Specifies the path to the JavaScript module containing prerendering code.
/// </summary>
public string ModuleName { get; private set; }
/// <summary>
/// If set, specifies the name of the CommonJS export that is the prerendering function to execute.
/// If not set, the JavaScript module's default CommonJS export must itself be the prerendering function.
/// </summary>
public string ExportName { get; set; }
}
}

View File

@@ -1,126 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc.ViewFeatures;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.AspNetCore.Razor.TagHelpers;
namespace Microsoft.AspNetCore.SpaServices.Prerendering
{
/// <summary>
/// A tag helper for prerendering JavaScript applications on the server.
/// </summary>
[HtmlTargetElement(Attributes = PrerenderModuleAttributeName)]
public class PrerenderTagHelper : TagHelper
{
private const string PrerenderModuleAttributeName = "asp-prerender-module";
private const string PrerenderExportAttributeName = "asp-prerender-export";
private const string PrerenderDataAttributeName = "asp-prerender-data";
private const string PrerenderTimeoutAttributeName = "asp-prerender-timeout";
private static INodeServices _fallbackNodeServices; // Used only if no INodeServices was registered with DI
private readonly string _applicationBasePath;
private readonly CancellationToken _applicationStoppingToken;
private readonly INodeServices _nodeServices;
/// <summary>
/// Creates a new instance of <see cref="PrerenderTagHelper"/>.
/// </summary>
/// <param name="serviceProvider">The <see cref="IServiceProvider"/>.</param>
public PrerenderTagHelper(IServiceProvider serviceProvider)
{
var hostEnv = (IHostingEnvironment) serviceProvider.GetService(typeof(IHostingEnvironment));
_nodeServices = (INodeServices) serviceProvider.GetService(typeof(INodeServices)) ?? _fallbackNodeServices;
_applicationBasePath = hostEnv.ContentRootPath;
var applicationLifetime = (IApplicationLifetime) serviceProvider.GetService(typeof(IApplicationLifetime));
_applicationStoppingToken = applicationLifetime.ApplicationStopping;
// Consider removing the following. Having it means you can get away with not putting app.AddNodeServices()
// in your startup file, but then again it might be confusing that you don't need to.
if (_nodeServices == null)
{
_nodeServices = _fallbackNodeServices = NodeServicesFactory.CreateNodeServices(
new NodeServicesOptions(serviceProvider));
}
}
/// <summary>
/// Specifies the path to the JavaScript module containing prerendering code.
/// </summary>
[HtmlAttributeName(PrerenderModuleAttributeName)]
public string ModuleName { get; set; }
/// <summary>
/// If set, specifies the name of the CommonJS export that is the prerendering function to execute.
/// If not set, the JavaScript module's default CommonJS export must itself be the prerendering function.
/// </summary>
[HtmlAttributeName(PrerenderExportAttributeName)]
public string ExportName { get; set; }
/// <summary>
/// An optional JSON-serializable parameter to be supplied to the prerendering code.
/// </summary>
[HtmlAttributeName(PrerenderDataAttributeName)]
public object CustomDataParameter { get; set; }
/// <summary>
/// The maximum duration to wait for prerendering to complete.
/// </summary>
[HtmlAttributeName(PrerenderTimeoutAttributeName)]
public int TimeoutMillisecondsParameter { get; set; }
/// <summary>
/// The <see cref="ViewContext"/>.
/// </summary>
[HtmlAttributeNotBound]
[ViewContext]
public ViewContext ViewContext { get; set; }
/// <summary>
/// Executes the tag helper to perform server-side prerendering.
/// </summary>
/// <param name="context">The <see cref="TagHelperContext"/>.</param>
/// <param name="output">The <see cref="TagHelperOutput"/>.</param>
/// <returns>A <see cref="Task"/> representing the operation.</returns>
public override async Task ProcessAsync(TagHelperContext context, TagHelperOutput output)
{
var result = await Prerenderer.RenderToString(
_applicationBasePath,
_nodeServices,
_applicationStoppingToken,
new JavaScriptModuleExport(ModuleName)
{
ExportName = ExportName
},
ViewContext.HttpContext,
CustomDataParameter,
TimeoutMillisecondsParameter);
if (!string.IsNullOrEmpty(result.RedirectUrl))
{
// It's a redirection
ViewContext.HttpContext.Response.Redirect(result.RedirectUrl);
return;
}
if (result.StatusCode.HasValue)
{
ViewContext.HttpContext.Response.StatusCode = result.StatusCode.Value;
}
// It's some HTML to inject
output.Content.SetHtmlContent(result.Html);
// Also attach any specified globals to the 'window' object. This is useful for transferring
// general state between server and client.
var globalsScript = result.CreateGlobalsAssignmentScript();
if (!string.IsNullOrEmpty(globalsScript))
{
output.PostElement.SetHtmlContent($"<script>{globalsScript}</script>");
}
}
}
}

View File

@@ -1,103 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Features;
namespace Microsoft.AspNetCore.SpaServices.Prerendering
{
/// <summary>
/// Performs server-side prerendering by invoking code in Node.js.
/// </summary>
public static class Prerenderer
{
private static readonly object CreateNodeScriptLock = new object();
private static StringAsTempFile NodeScript;
internal static Task<RenderToStringResult> RenderToString(
string applicationBasePath,
INodeServices nodeServices,
CancellationToken applicationStoppingToken,
JavaScriptModuleExport bootModule,
HttpContext httpContext,
object customDataParameter,
int timeoutMilliseconds)
{
// We want to pass the original, unencoded incoming URL data through to Node, so that
// server-side code has the same view of the URL as client-side code (on the client,
// location.pathname returns an unencoded string).
// The following logic handles special characters in URL paths in the same way that
// Node and client-side JS does. For example, the path "/a=b%20c" gets passed through
// unchanged (whereas other .NET APIs do change it - Path.Value will return it as
// "/a=b c" and Path.ToString() will return it as "/a%3db%20c")
var requestFeature = httpContext.Features.Get<IHttpRequestFeature>();
var unencodedPathAndQuery = requestFeature.RawTarget;
var request = httpContext.Request;
var unencodedAbsoluteUrl = $"{request.Scheme}://{request.Host}{unencodedPathAndQuery}";
return RenderToString(
applicationBasePath,
nodeServices,
applicationStoppingToken,
bootModule,
unencodedAbsoluteUrl,
unencodedPathAndQuery,
customDataParameter,
timeoutMilliseconds,
request.PathBase.ToString());
}
/// <summary>
/// Performs server-side prerendering by invoking code in Node.js.
/// </summary>
/// <param name="applicationBasePath">The root path to your application. This is used when resolving project-relative paths.</param>
/// <param name="nodeServices">The instance of <see cref="INodeServices"/> that will be used to invoke JavaScript code.</param>
/// <param name="applicationStoppingToken">A token that indicates when the host application is stopping.</param>
/// <param name="bootModule">The path to the JavaScript file containing the prerendering logic.</param>
/// <param name="requestAbsoluteUrl">The URL of the currently-executing HTTP request. This is supplied to the prerendering code.</param>
/// <param name="requestPathAndQuery">The path and query part of the URL of the currently-executing HTTP request. This is supplied to the prerendering code.</param>
/// <param name="customDataParameter">An optional JSON-serializable parameter to be supplied to the prerendering code.</param>
/// <param name="timeoutMilliseconds">The maximum duration to wait for prerendering to complete.</param>
/// <param name="requestPathBase">The PathBase for the currently-executing HTTP request.</param>
/// <returns></returns>
public static Task<RenderToStringResult> RenderToString(
string applicationBasePath,
INodeServices nodeServices,
CancellationToken applicationStoppingToken,
JavaScriptModuleExport bootModule,
string requestAbsoluteUrl,
string requestPathAndQuery,
object customDataParameter,
int timeoutMilliseconds,
string requestPathBase)
{
return nodeServices.InvokeExportAsync<RenderToStringResult>(
GetNodeScriptFilename(applicationStoppingToken),
"renderToString",
applicationBasePath,
bootModule,
requestAbsoluteUrl,
requestPathAndQuery,
customDataParameter,
timeoutMilliseconds,
requestPathBase);
}
private static string GetNodeScriptFilename(CancellationToken applicationStoppingToken)
{
lock(CreateNodeScriptLock)
{
if (NodeScript == null)
{
var script = EmbeddedResourceReader.Read(typeof(Prerenderer), "/Content/Node/prerenderer.js");
NodeScript = new StringAsTempFile(script, applicationStoppingToken); // Will be cleaned up on process exit
}
}
return NodeScript.FileName;
}
}
}

View File

@@ -1,27 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.AspNetCore.SpaServices.Prerendering;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace Microsoft.Extensions.DependencyInjection
{
/// <summary>
/// Extension methods for setting up prerendering features in an <see cref="IServiceCollection" />.
/// </summary>
public static class PrerenderingServiceCollectionExtensions
{
/// <summary>
/// Configures the dependency injection system to supply an implementation
/// of <see cref="ISpaPrerenderer"/>.
/// </summary>
/// <param name="serviceCollection">The <see cref="IServiceCollection"/>.</param>
public static void AddSpaPrerenderer(this IServiceCollection serviceCollection)
{
serviceCollection.TryAddSingleton<IHttpContextAccessor, HttpContextAccessor>();
serviceCollection.AddSingleton<ISpaPrerenderer, DefaultSpaPrerenderer>();
}
}
}

View File

@@ -1,60 +0,0 @@
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System.Text;
namespace Microsoft.AspNetCore.SpaServices.Prerendering
{
/// <summary>
/// Describes the prerendering result returned by JavaScript code.
/// </summary>
public class RenderToStringResult
{
/// <summary>
/// If set, specifies JSON-serializable data that should be added as a set of global JavaScript variables in the document.
/// This can be used to transfer arbitrary data from server-side prerendering code to client-side code (for example, to
/// transfer the state of a Redux store).
/// </summary>
public JObject Globals { get; set; }
/// <summary>
/// The HTML generated by the prerendering logic.
/// </summary>
public string Html { get; set; }
/// <summary>
/// If set, specifies that instead of rendering HTML, the response should be an HTTP redirection to this URL.
/// This can be used if the prerendering code determines that the requested URL would lead to a redirection according
/// to the SPA's routing configuration.
/// </summary>
public string RedirectUrl { get; set; }
/// <summary>
/// If set, specifies the HTTP status code that should be sent back with the server response.
/// </summary>
public int? StatusCode { get; set; }
/// <summary>
/// Constructs a block of JavaScript code that assigns data from the
/// <see cref="Globals"/> property to the global namespace.
/// </summary>
/// <returns>A block of JavaScript code.</returns>
public string CreateGlobalsAssignmentScript()
{
if (Globals == null)
{
return string.Empty;
}
var stringBuilder = new StringBuilder();
foreach (var property in Globals.Properties())
{
stringBuilder.AppendFormat("window.{0} = {1};",
property.Name,
property.Value.ToString(Formatting.None));
}
return stringBuilder.ToString();
}
}
}

View File

@@ -1,822 +0,0 @@
# Microsoft.AspNetCore.SpaServices
If you're building an ASP.NET Core application, and want to use Angular, React, Knockout, or another single-page app (SPA) framework, this NuGet package contains useful infrastructure for you.
This package enables:
* [**Server-side prerendering**](#server-side-prerendering) for *universal* (a.k.a. *isomorphic*) applications, where your Angular / React / etc. components are first rendered on the server, and then transferred to the client where execution continues
* [**Webpack middleware**](#webpack-dev-middleware) so that, during development, any webpack-built resources will be generated on demand, without you having to run webpack manually or compile files to disk
* [**Hot module replacement**](#webpack-hot-module-replacement) so that, during development, your code and markup changes will be pushed to your browser and updated in the running application automatically, without even needing to reload the page
* [**Routing helpers**](#routing-helper-mapspafallbackroute) for integrating server-side routing with client-side routing
Behind the scenes, it uses the [`Microsoft.AspNetCore.NodeServices`](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.NodeServices) package as a fast and robust way to invoke Node.js-hosted code from ASP.NET Core at runtime.
### Requirements
* [Node.js](https://nodejs.org/en/)
* To test this is installed and can be found, run `node -v` on a command line
* Note: If you're deploying to an Azure web site, you don't need to do anything here - Node is already installed and available in the server environments
* [.NET Core](https://dot.net), version 1.0 RC2 or later
### Installation into existing projects
* Install the `Microsoft.AspNetCore.SpaServices` NuGet package
* Run `dotnet restore` (or if you use Visual Studio, just wait a moment - it will restore dependencies automatically)
* Install supporting NPM packages for the features you'll be using:
* For **server-side prerendering**, install `aspnet-prerendering`
* For **server-side prerendering with Webpack build support**, also install `aspnet-webpack`
* For **webpack dev middleware**, install `aspnet-webpack`
* For **webpack dev middleware with hot module replacement**, also install `webpack-hot-middleware`
* For **webpack dev middleware with React hot module replacement**, also install `aspnet-webpack-react`
For example, run `npm install --save aspnet-prerendering aspnet-webpack` to install `aspnet-prerendering` and `aspnet-webpack`.
### Creating entirely new projects
If you're starting from scratch, you might prefer to use the `aspnetcore-spa` Yeoman generator to get a ready-to-go starting point using your choice of client-side framework. This includes `Microsoft.AspNetCore.SpaServices` along with everything configured for webpack middleware, server-side prerendering, etc.
See: [Getting started with the aspnetcore-spa generator](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/)
Also, if you want to debug projects created with the aspnetcore-spa generator, see [Debugging your projects](#debugging-your-projects)
## Server-side prerendering
The `SpaServices` package isn't tied to any particular client-side framework, and it doesn't force you to set up your client-side application in any one particular style. So, `SpaServices` doesn't contain hard-coded logic for rendering Angular / React / etc. components.
Instead, what `SpaServices` offers is ASP.NET Core APIs that know how to invoke a JavaScript function that you supply, passing through context information that you'll need for server-side prerendering, and then injects the resulting HTML string into your rendered page. In this document, you'll find examples of setting this up to render Angular and React components.
### 1. Enable the asp-prerender-* tag helpers
Make sure you've installed into your project:
* The `Microsoft.AspNetCore.SpaServices` NuGet package, version 1.1.0-* or later
* The `aspnet-prerendering` NPM package, version 2.0.1 or later
Together these contain the server-side and client-side library code you'll need. Now go to your `Views/_ViewImports.cshtml` file, and add the following line:
@addTagHelper "*, Microsoft.AspNetCore.SpaServices"
### 2. Use asp-prerender-* in a view
Choose a place in one of your MVC views where you want to prerender a SPA component. For example, open `Views/Home/Index.cshtml`, and add markup like the following:
<div id="my-spa" asp-prerender-module="ClientApp/boot-server"></div>
If you run your application now, and browse to whatever page renders the view you just edited, you should get an error similar to the following (assuming you're running in *Development* mode so you can see the error information): *Error: Cannot find module 'some/directory/ClientApp/boot-server'*. You've told the prerendering tag helper to execute code from a JavaScript module called `boot-server`, but haven't yet supplied any such module!
### 3. Supplying JavaScript code to perform prerendering
Create a JavaScript file at the path matching the `asp-prerender-module` value you specified above. In this example, that means creating a folder called `ClientApp` at the root of your project, and creating a file inside it called `boot-server.js`. Try putting the following into it:
```javascript
var prerendering = require('aspnet-prerendering');
module.exports = prerendering.createServerRenderer(function(params) {
return new Promise(function (resolve, reject) {
var result = '<h1>Hello world!</h1>'
+ '<p>Current time in Node is: ' + new Date() + '</p>'
+ '<p>Request path is: ' + params.location.path + '</p>'
+ '<p>Absolute URL is: ' + params.absoluteUrl + '</p>';
resolve({ html: result });
});
});
```
If you try running your app now, you should see the HTML snippet generated by your JavaScript getting injected into your page.
As you can see, your JavaScript code receives context information (such as the URL being requested), and returns a `Promise` so that it can asynchronously supply the markup to be injected into the page. You can put whatever logic you like here, but typically you'll want to execute a component from your Angular / React / etc. application.
**Passing data from .NET code into JavaScript code**
If you want to supply additional data to the JavaScript function that performs your prerendering, you can use the `asp-prerender-data` attribute. You can give any value as long as it's JSON-serializable. Bear in mind that it will be serialized and sent as part of the remote procedure call (RPC) to Node.js, so avoid trying to pass massive amounts of data.
For example, in your `cshtml`,
<div id="my-spa" asp-prerender-module="ClientApp/boot-server"
asp-prerender-data="new {
IsGoldUser = true,
Cookies = ViewContext.HttpContext.Request.Cookies
}"></div>
Now in your JavaScript prerendering function, you can access this data by reading `params.data`, e.g.:
```javascript
var prerendering = require('aspnet-prerendering');
module.exports = prerendering.createServerRenderer(function(params) {
return new Promise(function (resolve, reject) {
var result = '<h1>Hello world!</h1>'
+ '<p>Is gold user: ' + params.data.isGoldUser + '</p>'
+ '<p>Number of cookies: ' + params.data.cookies.length + '</p>';
resolve({ html: result });
});
});
```
Notice that the property names are received in JavaScript-style casing (e.g., `isGoldUser`) even though they were sent in C#-style casing (e.g., `IsGoldUser`). This is because of how the JSON serialization is configured by default.
**Passing data from server-side to client-side code**
If, as well as returning HTML, you also want to pass some contextual data from your server-side code to your client-side code, you can supply a `globals` object alongside the initial `html`, e.g.:
```javascript
resolve({
html: result,
globals: {
albumsList: someDataHere,
userData: someMoreDataHere
}
});
```
When the `aspnet-prerender-*` tag helper emits this result into the document, as well as injecting the `html` string, it will also emit code that populates `window.albumsList` and `window.userData` with JSON-serialized copies of the objects you passed.
This can be useful if, for example, you want to avoid loading the same data twice (once on the server and once on the client).
### 4. Enabling webpack build tooling
Of course, rather than writing your `boot-server` module and your entire SPA in plain ES5 JavaScript, it's quite likely that you'll want to write your client-side code in TypeScript or at least ES2015 code. To enable this, you need to set up a build system.
#### Example: Configuring Webpack to build TypeScript
Let's say you want to write your boot module and SPA code in TypeScript, and build it using Webpack. First ensure that `webpack` is installed, along with the libraries needed for TypeScript compilation:
npm install -g webpack
npm install --save ts-loader typescript
Next, create a file `webpack.config.js` at the root of your project, containing:
```javascript
var path = require('path');
module.exports = {
entry: { 'main-server': './ClientApp/boot-server.ts' },
resolve: { extensions: [ '', '.js', '.ts' ] },
output: {
path: path.join(__dirname, './ClientApp/dist'),
filename: '[name].js',
libraryTarget: 'commonjs'
},
module: {
loaders: [
{ test: /\.ts$/, loader: 'ts-loader' }
]
},
target: 'node',
devtool: 'inline-source-map'
};
```
This tells webpack that it should compile `.ts` files using TypeScript, and that when looking for modules by name (e.g., `boot-server`), it should also find files with `.js` and `.ts` extensions.
If you don't already have a `tsconfig.json` file at the root of your project, add one now. Make sure your `tsconfig.json` includes `"es6"` in its `"lib"` array so that TypeScript knows about intrinsics such as `Promise`. Here's an example `tsconfig.json`:
```json
{
"compilerOptions": {
"moduleResolution": "node",
"target": "es5",
"sourceMap": true,
"lib": [ "es6", "dom" ]
},
"exclude": [ "bin", "node_modules" ]
}
```
Now you can delete `ClientApp/boot-server.js`, and in its place, create `ClientApp/boot-server.ts`, containing the TypeScript equivalent of what you had before:
```javascript
import { createServerRenderer } from 'aspnet-prerendering';
export default createServerRenderer(params => {
return new Promise((resolve, reject) => {
const html = `
<h1>Hello world!</h1>
<p>Current time in Node is: ${ new Date() }</p>
<p>Request path is: ${ params.location.path }</p>
<p>Absolute URL is: ${ params.absoluteUrl }</p>`;
resolve({ html });
});
});
```
Finally, run `webpack` on the command line to build `ClientApp/dist/main-server.js`. Then you can tell `SpaServices` to use that file for server-side prerendering. In your MVC view where you use `aspnet-prerender-module`, update the attribute value:
<div id="my-spa" asp-prerender-module="ClientApp/dist/main-server"></div>
Webpack is a broad and powerful tool and can do far more than just invoke the TypeScript compiler. To learn more, see the [webpack website](https://webpack.github.io/).
### 5(a). Prerendering Angular components
If you're building an Angular application, you can run your components on the server inside your `boot-server.ts` file so they will be injected into the resulting web page.
First install the NPM package `angular2-universal` - this contains infrastructure for executing Angular components inside Node.js:
```
npm install --save angular2-universal
```
Now you can use the [`angular2-universal` APIs](https://github.com/angular/universal) from your `boot-server.ts` TypeScript module to execute your Angular component on the server. The code needed for this is fairly complex, but that's unavoidable because Angular supports so many different ways of being configured, and you need to provide wiring for whatever combination of DI modules you're using.
You can find an example `boot-server.ts` that renders arbitrary Angular components [here](https://github.com/aspnet/JavaScriptServices/blob/dev/templates/AngularSpa/ClientApp/boot-server.ts). If you use this with your own application, you might need to edit the `serverBindings` array to reference any other DI services that your Angular component depends on.
The easiest way to get started with Angular server-side rendering on ASP.NET Core is to use the [aspnetcore-spa generator](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/), which creates a ready-made working starting point.
### 5(b). Prerendering React components
React components can be executed synchronously on the server quite easily, although asynchronous execution is tricker as described below.
#### Setting up client-side React code
Let's say you want to write a React component in ES2015 code. You might install the NPM modules `react react-dom babel-loader babel-preset-react babel-preset-es2015`, and then prepare Webpack to build `.jsx` files by creating `webpack.config.js` in your project root, containing:
```javascript
var path = require('path');
module.exports = {
resolve: { extensions: [ '', '.js', '.jsx' ] },
module: {
loaders: [
{ test: /\.jsx?$/, loader: 'babel-loader' }
]
},
entry: {
main: ['./ClientApp/react-app.jsx'],
},
output: {
path: path.join(__dirname, 'wwwroot', 'dist'),
filename: '[name].js'
},
};
```
You will also need a `.babelrc` file in your project root, containing:
```javascript
{
"presets": ["es2015", "react"]
}
```
This is enough to be able to build ES2015 `.jsx` files via Webpack. Now you could implement a simple React component, for example the following at `ClientApp/react-app.jsx`:
```javascript
import * as React from 'react';
export class HelloMessage extends React.Component
{
render() {
return <h1>Hello {this.props.message}!</h1>;
}
}
```
... and the following code to run it in a browser at `ClientApp/boot-client.jsx`:
```javascript
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import { HelloMessage } from './react-app';
ReactDOM.render(<HelloMessage message="World" />, document.getElementById('my-spa'));
```
At this stage, run `webpack` on the command line to build `wwwroot/dist/main.js`. Or, to avoid having to do this manually, you could use the `SpaServices` package to [enable Webpack dev middleware](#webpack-dev-middleware).
You can now run your React code on the client by adding the following to one of your MVC views:
<div id="my-spa"></div>
<script src="/dist/main.js"></script>
If you want to enable server-side prerendering too, follow the same process as described under [server-side prerendering](#server-side-prerendering).
#### Realistic React apps and Redux
The above example is extremely simple - it doesn't use `react-router`, and it doesn't load any data asynchronously. Real applications are likely to do both of these.
For an example server-side boot module that knows how to evaluate `react-router` routes and render the correct React component, see [this example](https://github.com/aspnet/JavaScriptServices/blob/dev/templates/ReactReduxSpa/ClientApp/boot-server.tsx).
Supporting asynchronous data loading involves more considerations. Unlike Angular applications that run asynchronously on the server and freely overwrite server-generated markup with client-generated markup, React strictly wants to run synchronously on the server and always produce the same markup on the server as it does on the client.
To make this work, you most likely need some way to know in advance what data your React components will need to use, load it separately from those components, and have some way of transferring information about the loaded data from server to client. If you try to implement this in a generalized way, you'll end up reinventing something like the Flux/Redux pattern.
To avoid inventing your own incomplete version of Flux/Redux, you probably should just use [Redux](https://github.com/reactjs/redux). This is at first a very unfamiliar and tricky-looking abstraction, but does solve all the problems around server-side execution of React apps. To get a working starting point for an ASP.NET Core site with React+Redux on the client (and server-side prerendering), see the [aspnetcore-spa generator](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/).
## Webpack dev middleware
If you're using webpack, the webpack dev middleware feature included in `Microsoft.AspNetCore.SpaServices` will streamline your development process. It intercepts requests that would match files built by webpack, and dynamically builds those files on demand. They don't need to be written to disk - they are just held in memory and served directly to the browser.
Benefits:
* You don't have to run `webpack` manually or set up any file watchers
* The browser is always guaranteed to receive up-to-date built output
* The built artifacts are normally served instantly or at least extremely quickly, because internally, an instance of `webpack` stays active and has partial compilation states pre-cached in memory
It lets you work as if the browser natively understands whatever file types you are working with (e.g., TypeScript, SASS), because it's as if there's no build process to wait for.
### Example: A simple Webpack setup that builds TypeScript
**Note:** If you already have Webpack in your project, then you can skip this section.
As a simple example, here's how you can set up Webpack to build TypeScript files. First install the relevant NPM packages by executing this from the root directory of your project:
```
npm install --save typescript ts-loader
```
And if you don't already have it, you'll find it useful to install the `webpack` command-line tool:
```
npm install -g webpack
```
Now add a Webpack configuration file. Create `webpack.config.js` in the root of your project, containing the following:
```javascript
module.exports = {
resolve: {
// For modules referenced with no filename extension, Webpack will consider these extensions
extensions: [ '', '.js', '.ts' ]
},
module: {
loaders: [
// This example only configures Webpack to load .ts files. You can also drop in loaders
// for other file types, e.g., .coffee, .sass, .jsx, ...
{ test: /\.ts$/, loader: 'ts-loader' }
]
},
entry: {
// The loader will follow all chains of reference from this entry point...
main: ['./ClientApp/MyApp.ts']
},
output: {
// ... and emit the built result in this location
path: __dirname + '/wwwroot/dist',
filename: '[name].js'
},
};
```
Now you can put some TypeScript code (minimally, just `console.log('Hello');`) at `ClientApp/MyApp.ts` and then run `webpack` from the command line to build it (and everything it references). The output will be placed in `wwwroot/dist`, so you can load and run it in a browser by adding the following to one of your views (e.g., `Views\Home\Index.cshtml`):
<script src="/dist/main.js"></script>
The Webpack loader, `ts-loader`, follows all chains of reference from `MyApp.ts` and will compile all referenced TypeScript code into your output. If you want, you can create a [`tsconfig.json` file](https://www.typescriptlang.org/docs/handbook/tsconfig-json.html) to control things like whether source maps will be included in the output. If you add other Webpack loaders to your `webpack.config.js`, you can even reference things like SASS from your TypeScript, and then it will get built to CSS and loaded automatically.
So that's enough to build TypeScript. Here's where webpack dev middleware comes in to auto-build your code whenever needed (so you don't need any file watchers or to run `webpack` manually), and optionally hot module replacement (HMR) to push your changes automatically from code editor to browser without even reloading the page.
### Example: A simple Webpack setup that builds LESS
Following on from the preceding example that builds TypeScript, you could extend your Webpack configuration further to support building LESS. There are three major approaches to doing this:
1. **If using Angular, use its native style loader to attach the styles to components**. This is extremely simple and is usually the right choice if you are using Angular. However it only applies to Angular components, not to any other part of the host page, so sometimes you might want to combine this technique with options 2 or 3 below.
2. **Or, use Webpack's style loader to attach the styles at runtime**. The CSS markup will be included in your JavaScript bundles and will be attached to the document dynamically. This has certain benefits during development but isn't recommended in production.
3. **Or, have each build write a standalone `.css` file to disk**. At runtime, load it using a regular `<link rel='stylesheet'>` tag. This is likely to be the approach you'll want for production use (at least for non-Angular applications, such as React applications) as it's the most robust and best-performing option.
If instead of LESS you prefer SASS or another CSS preprocessor, the exact same techniques should work, but of course you'll need to replace the `less-loader` with an equivalent Webpack loader for SASS or your chosen preprocessor.
#### Approach 1: Scoping styles to Angular components
If you are using Angular, this is the easiest way to perform styling. It works with both server and client rendering, supports Hot Module Replacement, and robustly scopes styles to particular components (and optionally, their descendant elements).
This repository's Angular template uses this technique to scope styles to components out of the box. It defines those styles as `.css` files. For example, its components reference `.css` files like this:
```javascript
@Component({
...
styles: [require('./somecomponent.css')]
})
export class SomeComponent { ... }
```
To make this work, the template has Webpack configured to inject the contents of the `.css` file as a string literal in the built file. Here's the configuration that enables this:
```javascript
// This goes into webpack.config.js, in the module loaders array:
{ test: /\.css/, include: /ClientApp/, loader: 'raw-loader' }
```
Now if you want to use LESS instead of plain CSS, you just need to include a LESS loader. Run the following in a command prompt at your project root:
```
npm install --save less-loader less
```
Next, add the following loader configuration to the `loaders` array in `webpack.config.js`:
```javascript
{ test: /\.less/, include: /ClientApp/, loader: 'raw-loader!less-loader' }
```
Notice how this chains together with `less-loader` (which transforms `.less` syntax to plain CSS syntax), then the `raw` loader (which turn the result into a string literal). With this in place, you can reference `.less` files from your Angular components in the obvious way:
```javascript
@Component({
...
styles: [require('./somecomponent.less')]
})
export class SomeComponent { ... }
```
... and your styles will be applied in both server-side and client-side rendering.
#### Approach 2: Loading the styles using Webpack and JavaScript
This technique works with any client-side framework (not just Angular), and can also apply styles to the entire document rather than just individual components. It's a little simpler to set up than technique 3, plus it works flawlessly with Hot Module Replacement (HMR). The downside is that it's really only good for development time, because in production you probably don't want users to wait until JavaScript is loaded before styles are applied to the page (this would mean they'd see a 'flash of unstyled content' while the page is being loaded).
First create a `.less` file in your project. For example, create a file at `ClientApp/styles/mystyles.less` containing:
```less
@base: #f938ab;
h1 {
color: @base;
}
```
Reference this file from an `import` or `require` statement in one of your JavaScript or TypeScript files. For example, if you've got a `boot-client.ts` file, add the following near the top:
```javascript
import './styles/mystyles.less';
```
If you try to run the Webpack compiler now (e.g., via `webpack` on the command line), you'll get an error saying it doesn't know how to build `.less` files. So, it's time to install a Webpack loader for LESS (plus related NPM modules). In a command prompt at your project's root directory, run:
```
npm install --save less-loader less
```
Finally, tell Webpack to use this whenever it encounters a `.less` file. In `webpack.config.js`, add to the `loaders` array:
```
{ test: /\.less/, loader: 'style-loader!css-loader!less-loader' }
```
This means that when you `import` or `require` a `.less` file, it should pass it first to the LESS compiler to produce CSS, then the output goes to the CSS and Style loaders that know how to attach it dynamically to the page at runtime.
That's all you need to do! Restart your site and you should see the LESS styles being applied. This technique is compatible with both source maps and Hot Module Replacement (HMR), so you can edit your `.less` files at will and see the changes appearing live in the browser.
#### Approach 3: Building LESS to CSS files on disk
This technique takes a little more work to set up than technique 2, and lacks compatibility with HMR. But it's much better for production use if your styles are applied to the whole page (not just elements constructed via JavaScript), because it loads the CSS independently of JavaScript.
First add a `.less` file into your project. For example, create a file at `ClientApp/styles/mystyles.less` containing:
```less
@base: #f938ab;
h1 {
color: @base;
}
```
Reference this file from an `import` or `require` statement in one of your JavaScript or TypeScript files. For example, if you've got a `boot-client.ts` file, add the following near the top:
```javascript
import './styles/mystyles.less';
```
If you try to run the Webpack compiler now (e.g., via `webpack` on the command line), you'll get an error saying it doesn't know how to build `.less` files. So, it's time to install a Webpack loader for LESS (plus related NPM modules). In a command prompt at your project's root directory, run:
```
npm install --save less less-loader extract-text-webpack-plugin
```
Next, you can extend your Webpack configuration to handle `.less` files. In `webpack.config.js`, at the top, add:
```javascript
var extractStyles = new (require('extract-text-webpack-plugin'))('mystyles.css');
```
This creates a plugin instance that will output text to a file called `mystyles.css`. You can now compile `.less` files and emit the resulting CSS text into that file. To do so, add the following to the `loaders` array in your Webpack configuration:
```javascript
{ test: /\.less$/, loader: extractStyles.extract('css-loader!less-loader') }
```
This tells Webpack that, whenever it finds a `.less` file, it should use the LESS loader to produce CSS, and then feed that CSS into the `extractStyles` object which you've already configured to write a file on disk called `mystyles.css`. Finally, for this to actually work, you need to include `extractStyles` in the list of active plugins. Just add that object to the `plugins` array in your Webpack config, e.g.:
```javascript
plugins: [
extractStyles,
... leave any other plugins here ...
]
```
If you run `webpack` on the command line now, you should now find that it emits a new file at `dist/mystyles.css`. You can make browsers load this file simply by adding a regular `<link>` tag. For example, in `Views/Shared/_Layout.cshtml`, add:
```html
<link rel="stylesheet" href="~/dist/mystyles.css" asp-append-version="true" />
```
**Note:** This technique (writing the built `.css` file to disk) is ideal for production use. But note that, at development time, *it does not support Hot Module Replacement (HMR)*. You will need to reload the page each time you edit your `.less` file. This is a known limitation of `extract-text-webpack-plugin`. If you have constructive opinions on how this can be improved, see the [discussion here](https://github.com/webpack/extract-text-webpack-plugin/issues/30).
### Enabling webpack dev middleware
First install the `Microsoft.AspNetCore.SpaServices` NuGet package and the `aspnet-webpack` NPM package, then go to your `Startup.cs` file, and **before your call to `UseStaticFiles`**, add the following:
```csharp
if (env.IsDevelopment()) {
app.UseWebpackDevMiddleware();
}
// Your call to app.UseStaticFiles(); should be here
```
Also check your webpack configuration at `webpack.config.js`. Since `UseWebpackDevMiddleware` needs to know which incoming requests to intercept, make sure you've specified a `publicPath` value on your `output`, for example:
```javascript
module.exports = {
// ... rest of your webpack config is here ...
output: {
path: path.join(__dirname, 'wwwroot', 'dist'),
publicPath: '/dist/',
filename: '[name].js'
},
};
```
Now, assuming you're running in [development mode](https://docs.asp.net/en/latest/fundamentals/environments.html), any requests for files under `/dist` will be intercepted and served using Webpack dev middleware.
**This is for development time only, not for production use (hence the `env.IsDevelopment()` check in the code above).** While you could technically remove that check and serve your content in production through the webpack middleware, it's hard to think of a good reason for doing so. For best performance, it makes sense to prebuild your client-side resources so they can be served directly from disk with no build middleware. If you use the [aspnetcore-spa generator](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/), you'll get a site that produces optimised static builds for production, while also supporting webpack dev middleware at development time.
## Webpack Hot Module Replacement
For an even more streamlined development experience, you can enhance webpack dev middleware by enabling Hot Module Replacement (HMR) support. This watches for any changes you make to source files on disk (e.g., `.ts`/`.html`/`.sass`/etc. files), and automatically rebuilds them and pushes the result into your browser window, without even needing to reload the page.
This is *not* the same as a simple live-reload mechanism. It does not reload the page; it replaces code or markup directly in place. This is better, because it does not interfere with any state your SPA might have in memory, or any debugging session you have in progress.
Typically, when you change a source file, the effects appear in your local browser window in under 2 seconds, even when your overall application is large. This is superbly productive, especially in multi-monitor setups. If you cause a build error (e.g., a syntax error), details of the error will appear in your browser window. When you fix it, your application will reappear, without having lost its in-memory state.
### Enabling Hot Module Replacement
First ensure you already have a working Webpack dev middleware setup. Then, install the `webpack-hot-middleware` NPM module:
```
npm install --save webpack-hot-middleware
```
At the top of your `Startup.cs` file, add the following namespace reference:
```csharp
using Microsoft.AspNetCore.SpaServices.Webpack;
```
Now amend your call to `UseWebpackDevMiddleware` as follows:
```csharp
app.UseWebpackDevMiddleware(new WebpackDevMiddlewareOptions {
HotModuleReplacement = true
});
```
Also, to work around a temporary issue in `SpaServices`, you must ensure that your Webpack config includes a `plugins` array, even if it's empty. For example, in `webpack.config.js`:
```javascript
module.exports = {
// ... rest of your webpack config is here ...
plugins: [
// Put webpack plugins here if needed, or leave it as an empty array if not
]
};
```
Now when you load your application in a browser, you should see a message like the following in your browser console:
```
[HMR] connected
```
If you edit any of your source files that get built by webpack, the result will automatically be pushed into the browser. As for what the browser does with these updates - that's a matter of how you configure it - see below.
**Note for TypeScript + Visual Studio users**
If you want HMR to work correctly with TypeScript, and you use Visual Studio on Windows as an IDE (but not VS Code), then you will need to make a further configuration change. In your `.csproj` file, in one of the `<PropertyGroup>` elements, add this:
<TypeScriptCompileBlocked>true</TypeScriptCompileBlocked>
This is necessary because otherwise, Visual Studio will try to auto-compile TypeScript files as you save changes to them. That default auto-compilation behavior is unhelpful in projects where you have a proper build system (e.g., Webpack), because VS doesn't know about your build system and would emit `.js` files in the wrong locations, which would in turn cause problems with your real build or deployment mechanisms.
#### Enabling hot replacement for React components
Webpack has built-in support for updating React components in place. To enable this, amend your `UseWebpackDevMiddleware` call further as follows:
```csharp
app.UseWebpackDevMiddleware(new WebpackDevMiddlewareOptions {
HotModuleReplacement = true,
ReactHotModuleReplacement = true
});
```
Also, install the NPM module `aspnet-webpack-react`, e.g.:
```
npm install --save aspnet-webpack-react
```
Now if you edit any React component (e.g., in `.jsx` or `.tsx` files), the updated component will be injected into the running application, and will even preserve its in-memory state.
**Note**: In you webpack config, be sure that your React components are loaded using `babel-loader` (and *not* just directly using `babel` or `ts-loader`), because `babel-loader` is where the HMR instrumentation is injected. For an example of HMR for React components built with TypeScript, see the [aspnetcore-spa generator](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/).
#### Enabling hot replacement for other module types
Webpack has built-in HMR support for various types of module, such as styles and React components as described above. But to support HMR for other code modules, you need to add a small block of code that calls `module.hot.accept` to receive the updated module and update the running application.
This is [documented in detail on the Webpack site](https://webpack.github.io/docs/hot-module-replacement.html). Or to get a working HMR-enabled ASP.NET Core site with Angular, React, React+Redux, or Knockout, you can use the [aspnetcore-spa generator](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/).
#### Passing options to the Webpack Hot Middleware client
You can configure the [Webpack Hot Middleware client](https://github.com/glenjamin/webpack-hot-middleware#client)
by using the `HotModuleReplacementClientOptions` property on `WebpackDevMiddlewareOptions`:
```csharp
app.UseWebpackDevMiddleware(new WebpackDevMiddlewareOptions {
HotModuleReplacement = true,
HotModuleReplacementClientOptions = new Dictionary<string, string> {
{ "reload", "true" },
},
});
```
For the list of available options, please see [Webpack Hot Middleware docs](https://github.com/glenjamin/webpack-hot-middleware#client).
**Note**: The `path` option cannot be overridden this way - it is controlled by the `HotModuleReplacementEndpoint` setting.
## Routing helper: MapSpaFallbackRoute
In most single-page applications, you'll want client-side routing as well as your server-side routing. Most of the time, the two routing systems work independently without interfering. However, there is one case where things get challenging: identifying 404s.
If a request arrives for `/some/page`, and it doesn't match any server-side route, it's likely that you want to return HTML that starts up your client-side application, which probably understands the route `/some/page`. But if a request arrives for `/images/user-512.png`, and it doesn't match any server-side route or static file, it's **not** likely that your client-side application would handle it - you probably want to return a 404.
To help distinguish between these cases, the `Microsoft.AspNetCore.SpaServices` NuGet package includes a routing helper, `MapSpaFallbackRoute`. For example, in your `Startup.cs` file's `Configure` method, you might add:
```csharp
app.UseStaticFiles();
app.UseMvc(routes =>
{
routes.MapRoute(
name: "default",
template: "{controller=Home}/{action=Index}/{id?}");
routes.MapSpaFallbackRoute(
name: "spa-fallback",
defaults: new { controller = "Home", action = "Index" });
});
```
Since `UseStaticFiles` goes first, any requests that actually match physical files under `wwwroot` will be handled by serving that static file.
Since the default server-side MVC route goes next, any requests that match existing controller/action pairs will be handled by invoking that action.
Then, since `MapSpaFallbackRoute` is last, any other requests **that don't appear to be for static files** will be served by invoking the `Index` action on `HomeController`. This action's view should serve your client-side application code, allowing the client-side routing system to handle whatever URL has been requested.
Any requests that do appear to be for static files (i.e., those that end with filename extensions), will *not* be handled by `MapSpaFallbackRoute`, and so will end up as 404s.
This is not a perfect solution to the problem of identifying 404s, because for example `MapSpaFallbackRoute` will not match requests for `/users/albert.einstein`, because it appears to contain a filename extension (`.einstein`). If you need your SPA to handle routes like that, then don't use `MapSpaFallbackRoute` - just use a regular MVC catch-all route. But then beware that requests for unknown static files will result in your client-side app being rendered.
## Debugging your projects
How to attach and use a debugger depends on what code you want to debug. For details, see:
* [How to debug your C# code that runs on the server](#debugging-your-c-code-that-runs-on-the-server)
* How to debug your JavaScript/TypeScript code:
* ... [when it's running in a browser](#debugging-your-javascripttypescript-code-when-its-running-in-a-browser)
* ... [when it's running on the server](#debugging-your-javascripttypescript-code-when-it-runs-on-the-server) (i.e., via `asp-prerender` or NodeSevices)
### Debugging your C# code that runs on the server
You can use any .NET debugger, for example Visual Studio's C# debugger or [Visual Studio Code's C# debugger](https://code.visualstudio.com/Docs/editor/debugging).
### Debugging your JavaScript/TypeScript code when it's running in a browser
**The absolute most reliable way of debugging your client-side code is to use your browser's built-in debugger.** This is much easier to make work than debugging via an IDE, plus it offers much richer insight into what's going on than your IDE will do (for example, you'll be able to inspect the DOM and capture performance profiles as well as just set breakpoints and step through code).
If you're unfamiliar with your browser's debugging tools, then take the time to get familiar with them. You will become more productive.
#### Using your browser's built-in debugging tools
##### Using Chrome's developer tools for debugging
In Chrome, with your application running in the browser, [open the developer tools](https://developer.chrome.com/devtools#access). You can now find your code:
* In the developer tools *Sources* tab, expand folders in the hierarchy pane on the left to find the file you want
* Or, press `ctrl`+`o` (on Windows) or `cmd`+`o` on Mac, then start to type name name of the file you want to open (e.g., `counter.component.ts`)
With source maps enabled (which is the case in the project templates in this repo), you'll be able to see your original TypeScript source code, set breakpoints on it, etc.
##### Using Internet Explorer/Edge's developer tools (F12) for debugging
In Internet Explorer or Edge, with your application running in the browser, open the F12 developer tools by pressing `F12`. You can now find your code:
* In the F12 tools *Debugger* tab, expand folders in the hierarchy pane on the left to find the file you want
* Or, press `ctrl`+`o`, then start to type name name of the file you want to open (e.g., `counter.component.ts`)
With source maps enabled (which is the case in the project templates in this repo), you'll be able to see your original TypeScript source code, set breakpoints on it, etc.
##### Using Firefox's developer tools for debugging
In Firefox, with your application running in the browser, open the developer tools by pressing `F12`. You can now find your code:
* In the developer tools *Debugger* tab, expand folders in the hierarchy pane titled *Sources* towards the bottom to find the file you want
* Or, press `ctrl`+`o` (on Windows) or `cmd`+`o` on Mac, then start to type name name of the file you want to open (e.g., `counter.component.ts`)
With source maps enabled (which is the case in the project templates in this repo), you'll be able to see your original TypeScript source code, set breakpoints on it, etc.
##### How browser-based debugging interacts with Hot Module Replacement (HMR)
If you're using HMR, then each time you modify a file, the Webpack dev middleware restarts your client-side application, adding a new version of each affected module, without reloading the page. This can be confusing during debugging, because any breakpoints set on the old version of the code will still be there, but they will no longer get hit, because the old version of the module is no longer in use.
You have two options to get breakpoints that will be hit as expected:
* **Reload the page** (e.g., by pressing `F5`). Then your existing breakpoints will be applied to the new version of the module. This is obviously the easiest solution.
* Or, if you don't want to reload the page, you can **set new breakpoints on the new version of the module**. To do this, look in your browser's debug tools' list of source files, and identify the newly-injected copy of the module you want to debug. It will typically have a suffix on its URL such as `?4a2c`, and may appear in a new top-level hierarchy entry called `webpack://`. Set a breakpoint in the newly-injected module, and it will be hit as expected as your application runs.
#### Using Visual Studio Code's "Debugger for Chrome" extension
If you're using Visual Studio Code and Chrome, you can set breakpoints directly on your TypeScript source code in the IDE. To do this:
1. Install VS Code's [*Debugger for Chrome* extension](https://marketplace.visualstudio.com/items?itemName=msjsdiag.debugger-for-chrome)
2. Ensure your application server has started and can be reached with a browser (for example, run `dotnet watch run`)
3. In VS Code, open its *Debug* view (on Windows/Linux, press `ctrl`+`shift`+`d`; on Mac, press `cmd`+`shift`+`d`).
4. Press the cog icon and when prompted to *Select environment*, choose `Chrome`. VS Code will create a `launch.json` file for you. This describes how the debugger and browser should be launched.
5. Edit your new `.vscode/launch.json` file to specify the correct `url` and `webRoot` for your application. If you're using the project templates in this repo, then the values you probably want are:
* For `url`, put `"http://localhost:5000"` (but of course, change this if you're using a different port)
* For `port`, put `5000` (or your custom port number if applicable)
* For `workspace` in **both** configurations, put `"${workspaceRoot}/wwwroot"`
* This tells the debugger how URLs within your application correspond to files in your VS Code workspace. By default, ASP.NET Core projects treat `wwwroot` as the root directory for publicly-served files, so `http://localhost:5000/dist/myfile.js` corresponds to `<yourprojectroot>/wwwroot/dist/myfile.js`. VS Code doesn't know about `wwwroot` unless you tell it.
* **Important:** If your VS Code window's workspace root is not the same as your ASP.NET Core project root (for example, if VS Code is opened at a higher-level directory to show both your ASP.NET Core project plus other peer-level directories), then you will need to amend `workspace` correspondingly (e.g., to `"${workspaceRoot}/SomeDir/MyAspNetProject/wwwroot"`).
6. Start the debugger:
* While still on the *Debug* view, from the dropdown near the top-left, choose "*Launch Chrome against localhost, with sourcemaps*".
* Press the *Play* icon. Your application will launch in Chrome.
* If it does nothing for a while, then eventually gives the error *Cannot connect to runtime process*, that's because you already have an instance of Chrome running. Close it first, then try again.
7. Finally, you can now set and hit breakpoints in your TypeScript code in VS Code.
For more information about VS Code's built-in debugging facilities, [see its documentation](https://code.visualstudio.com/Docs/editor/debugging).
Caveats:
* The debugging interface between VS Code and Chrome occasionally has issues. If you're unable to set or hit breakpoints, or if you try to set a breakpoint but it appears in the wrong place, you may need to stop and restart the debugger (and often, the whole Chrome process).
* If you're using Hot Module Replacement (HMR), then whenever you edit a file, the breakpoints in it will no longer hit. This is because HMR loads a new version of the module into the browser, so the old code no longer runs. To fix this, you must:
* Reload the page in Chrome (e.g., by pressing `F5`)
* **Then** (and only then), remove and re-add the breakpoint in VS Code. It will now be attached to the current version of your module. Alternatively, stop and restart debugging altogether.
* If you prefer, you can use "*Attach to Chrome, with sourcemaps*" instead of launching a new Chrome instance, but this is a bit trickier: you must first start Chrome using the command-line option `--remote-debugging-port=9222`, and you must ensure there are no other tabs opened (otherwise, it might try to connect to the wrong one).
#### Using Visual Studio's built-in debugger for Internet Explorer
If you're using Visual Studio on Windows, and are running your app in Internet Explorer 11 (not Edge!), then you can use VS's built-in debugger rather than Interner Explorer's F12 tools if you prefer. To do this:
1. In Internet Explorer, [enable script debugging](https://msdn.microsoft.com/en-us/library/ms241741\(v=vs.100\).aspx)
2. In Visual Studio, [set the default "*Browse with*" option](http://stackoverflow.com/a/31959053) to Internet Explorer
3. In Visual Studio, press F5 to launch your application with the debugger in Internet Explorer.
* When the page has loaded in the browser, you'll be able to set and hit breakpoints in your TypeScript source files in Visual Studio.
Caveats:
* If you're using Hot Module Replacement, you'll need to stop and restart the debugger any time you change a source file. VS's IE debugger does not recognise that source files might change while the debugging session is in progress.
* Realistically, you are not going to be as productive using this approach to debugging as you would be if you used your browser's built-in debugging tools. The browser's built-in debugging tools are far more effective: they are always available (you don't have to have launched your application in a special way), they better handle HMR, and they don't make your application very slow to launch.
## Debugging your JavaScript/TypeScript code when it runs on the server
When you're using NodeServices or the server-side prerendering feature included in the project templates in this repo, your JavaScript/TypeScript code will execute on the server in a background instance of Node.js. You can enable debugging via [V8 Inspector Integration](https://nodejs.org/api/debugger.html#debugger_v8_inspector_integration_for_node_js) on that Node.js instance. Here's how to do it.
First, in your `Startup.cs` file, in the `ConfigureServices` method, add the following:
```
services.AddNodeServices(options => {
options.LaunchWithDebugging = true;
options.DebuggingPort = 9229;
});
```
Now, run your application from that command line (e.g., `dotnet run`). Then in a browser visit one of your pages that causes server-side JS to execute.
In the console, you should see all the normal trace messages appear, plus among them will be:
```
warn: Microsoft.AspNetCore.NodeServices[0]
Debugger listening on port 9229.
warn: Microsoft.AspNetCore.NodeServices[0]
Warning: This is an experimental feature and could change at any time.
warn: Microsoft.AspNetCore.NodeServices[0]
To start debugging, open the following URL in Chrome:
warn: Microsoft.AspNetCore.NodeServices[0]
chrome-devtools://devtools/bundled/inspector.html?experiments=true&v8only=true&ws=127.0.0.1:9229/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
```
As per instructions open the URL in Chrome. Alternatively you can go to the `Sources` tab of the Dev Tools (at http://localhost:5000) and connect to the Node instance under `Threads` in the right sidebar.
By expanding the `webpack://` entry in the sidebar, you'll be able to find your original source code (it's using source maps), and then set breakpoints in it. When you re-run your app in another browser window, your breakpoints will be hit, then you can debug the server-side execution just like you'd debug client-side execution. It looks like this:
![screenshot from 2017-03-25 13-33-26](https://cloud.githubusercontent.com/assets/1596280/24324604/ab888a7e-115f-11e7-89d1-1586acf5e35c.png)

View File

@@ -1,37 +0,0 @@
using System;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Routing;
namespace Microsoft.AspNetCore.SpaServices
{
internal class SpaRouteConstraint : IRouteConstraint
{
private readonly string _clientRouteTokenName;
public SpaRouteConstraint(string clientRouteTokenName)
{
if (string.IsNullOrEmpty(clientRouteTokenName))
{
throw new ArgumentException("Value cannot be null or empty", nameof(clientRouteTokenName));
}
_clientRouteTokenName = clientRouteTokenName;
}
public bool Match(
HttpContext httpContext,
IRouter route,
string routeKey,
RouteValueDictionary values,
RouteDirection routeDirection)
{
return !HasDotInLastSegment(values[_clientRouteTokenName] as string ?? string.Empty);
}
private bool HasDotInLastSegment(string uri)
{
var lastSegmentStartPos = uri.LastIndexOf('/');
return uri.IndexOf('.', lastSegmentStartPos + 1) >= 0;
}
}
}

View File

@@ -1,94 +0,0 @@
using System;
using System.Collections.Generic;
using Microsoft.AspNetCore.Routing;
using Microsoft.AspNetCore.SpaServices;
// Putting in this namespace so it's always available whenever MapRoute is
namespace Microsoft.AspNetCore.Builder
{
/// <summary>
/// Extension methods useful for configuring routing in a single-page application (SPA).
/// </summary>
public static class SpaRouteExtensions
{
private const string ClientRouteTokenName = "clientRoute";
/// <summary>
/// Configures a route that is automatically bypassed if the requested URL appears to be for a static file
/// (e.g., if it has a filename extension).
/// </summary>
/// <param name="routeBuilder">The <see cref="IRouteBuilder"/>.</param>
/// <param name="name">The route name.</param>
/// <param name="defaults">Default route parameters.</param>
/// <param name="constraints">Route constraints.</param>
/// <param name="dataTokens">Route data tokens.</param>
public static void MapSpaFallbackRoute(
this IRouteBuilder routeBuilder,
string name,
object defaults,
object constraints = null,
object dataTokens = null)
{
MapSpaFallbackRoute(
routeBuilder,
name,
/* templatePrefix */ null,
defaults,
constraints,
dataTokens);
}
/// <summary>
/// Configures a route that is automatically bypassed if the requested URL appears to be for a static file
/// (e.g., if it has a filename extension).
/// </summary>
/// <param name="routeBuilder">The <see cref="IRouteBuilder"/>.</param>
/// <param name="name">The route name.</param>
/// <param name="templatePrefix">The template prefix.</param>
/// <param name="defaults">Default route parameters.</param>
/// <param name="constraints">Route constraints.</param>
/// <param name="dataTokens">Route data tokens.</param>
public static void MapSpaFallbackRoute(
this IRouteBuilder routeBuilder,
string name,
string templatePrefix,
object defaults,
object constraints = null,
object dataTokens = null)
{
var template = CreateRouteTemplate(templatePrefix);
var constraintsDict = ObjectToDictionary(constraints);
constraintsDict.Add(ClientRouteTokenName, new SpaRouteConstraint(ClientRouteTokenName));
routeBuilder.MapRoute(name, template, defaults, constraintsDict, dataTokens);
}
private static string CreateRouteTemplate(string templatePrefix)
{
templatePrefix = templatePrefix ?? string.Empty;
if (templatePrefix.Contains("?"))
{
// TODO: Consider supporting this. The {*clientRoute} part should be added immediately before the '?'
throw new ArgumentException("SPA fallback route templates don't support querystrings");
}
if (templatePrefix.Contains("#"))
{
throw new ArgumentException(
"SPA fallback route templates should not include # characters. The hash part of a URI does not get sent to the server.");
}
if (templatePrefix != string.Empty && !templatePrefix.EndsWith("/"))
{
templatePrefix += "/";
}
return templatePrefix + $"{{*{ClientRouteTokenName}}}";
}
private static IDictionary<string, object> ObjectToDictionary(object value)
=> value as IDictionary<string, object> ?? new RouteValueDictionary(value);
}
}

View File

@@ -1,115 +0,0 @@
/// <reference path="../npm/aspnet-prerendering/src/PrerenderingInterfaces.d.ts" />
import * as url from 'url';
import * as path from 'path';
import * as fs from 'fs';
declare var __non_webpack_require__;
// Separate declaration and export just to add type checking on function signature
export const renderToString: RenderToStringFunc = renderToStringImpl;
// This function is invoked by .NET code (via NodeServices). Its job is to hand off execution to the application's
// prerendering boot function. It can operate in two modes:
// [1] Legacy mode
// This is for backward compatibility with projects created with templates older than the generator version 0.6.0.
// In this mode, we don't really do anything here - we just load the 'aspnet-prerendering' NPM module (which must
// exist in node_modules, and must be v1.x (not v2+)), and pass through all the parameters to it. Code in
// 'aspnet-prerendering' v1.x will locate the boot function and invoke it.
// The drawback to this mode is that, for it to work, you have to deploy node_modules to production.
// [2] Current mode
// This is for projects created with the Yeoman generator 0.6.0+ (or projects manually updated). In this mode,
// we don't invoke 'require' at runtime at all. All our dependencies are bundled into the NuGet package, so you
// don't have to deploy node_modules to production.
// To determine whether we're in mode [1] or [2], the code locates your prerendering boot function, and checks whether
// a certain flag is attached to the function instance.
function renderToStringImpl(callback: RenderToStringCallback, applicationBasePath: string, bootModule: BootModuleInfo, absoluteRequestUrl: string, requestPathAndQuery: string, customDataParameter: any, overrideTimeoutMilliseconds: number) {
try {
const forceLegacy = isLegacyAspNetPrerendering();
const renderToStringFunc = !forceLegacy && findRenderToStringFunc(applicationBasePath, bootModule);
const isNotLegacyMode = renderToStringFunc && renderToStringFunc['isServerRenderer'];
if (isNotLegacyMode) {
// Current (non-legacy) mode - we invoke the exported function directly (instead of going through aspnet-prerendering)
// It's type-safe to just apply the incoming args to this function, because we already type-checked that it's a RenderToStringFunc,
// just like renderToStringImpl itself is.
renderToStringFunc.apply(null, arguments);
} else {
// Legacy mode - just hand off execution to 'aspnet-prerendering' v1.x, which must exist in node_modules at runtime
const aspNetPrerenderingV1RenderToString = require('aspnet-prerendering').renderToString;
if (aspNetPrerenderingV1RenderToString) {
aspNetPrerenderingV1RenderToString(callback, applicationBasePath, bootModule, absoluteRequestUrl, requestPathAndQuery, customDataParameter, overrideTimeoutMilliseconds);
} else {
callback('If you use aspnet-prerendering >= 2.0.0, you must update your server-side boot module to call createServerRenderer. '
+ 'Either update your boot module code, or revert to aspnet-prerendering version 1.x');
}
}
} catch (ex) {
// Make sure loading errors are reported back to the .NET part of the app
callback(
'Prerendering failed because of error: '
+ ex.stack
+ '\nCurrent directory is: '
+ process.cwd()
);
}
};
function findBootModule(applicationBasePath: string, bootModule: BootModuleInfo): any {
const bootModuleNameFullPath = path.resolve(applicationBasePath, bootModule.moduleName);
if (bootModule.webpackConfig) {
// If you're using asp-prerender-webpack-config, you're definitely in legacy mode
return null;
} else {
return __non_webpack_require__(bootModuleNameFullPath);
}
}
function findRenderToStringFunc(applicationBasePath: string, bootModule: BootModuleInfo): RenderToStringFunc {
// First try to load the module
const foundBootModule = findBootModule(applicationBasePath, bootModule);
if (foundBootModule === null) {
return null; // Must be legacy mode
}
// Now try to pick out the function they want us to invoke
let renderToStringFunc: RenderToStringFunc;
if (bootModule.exportName) {
// Explicitly-named export
renderToStringFunc = foundBootModule[bootModule.exportName];
} else if (typeof foundBootModule !== 'function') {
// TypeScript-style default export
renderToStringFunc = foundBootModule.default;
} else {
// Native default export
renderToStringFunc = foundBootModule;
}
// Validate the result
if (typeof renderToStringFunc !== 'function') {
if (bootModule.exportName) {
throw new Error(`The module at ${ bootModule.moduleName } has no function export named ${ bootModule.exportName }.`);
} else {
throw new Error(`The module at ${ bootModule.moduleName } does not export a default function, and you have not specified which export to invoke.`);
}
}
return renderToStringFunc;
}
function isLegacyAspNetPrerendering() {
const version = getAspNetPrerenderingPackageVersion();
return version && /^1\./.test(version);
}
function getAspNetPrerenderingPackageVersion() {
try {
const packageEntryPoint = __non_webpack_require__.resolve('aspnet-prerendering');
const packageDir = path.dirname(packageEntryPoint);
const packageJsonPath = path.join(packageDir, 'package.json');
const packageJson = __non_webpack_require__(packageJsonPath);
return packageJson.version.toString();
} catch(ex) {
// Implies aspnet-prerendering isn't in node_modules at all (or node_modules itself doesn't exist,
// which will be the case in production based on latest templates).
return null;
}
}

View File

@@ -1,20 +0,0 @@
// Pass through the invocation to the 'aspnet-webpack' package, verifying that it can be loaded
export function createWebpackDevServer(callback) {
let aspNetWebpack;
try {
aspNetWebpack = require('aspnet-webpack');
} catch (ex) {
// Developers sometimes have trouble with badly-configured Node installations, where it's unable
// to find node_modules. Or they accidentally fail to deploy node_modules, or even to run 'npm install'.
// Make sure such errors are reported back to the .NET part of the app.
callback(
'Webpack dev middleware failed because of an error while loading \'aspnet-webpack\'. Error was: '
+ ex.stack
+ '\nCurrent directory is: '
+ process.cwd()
);
return;
}
return aspNetWebpack.createWebpackDevServer.apply(this, arguments);
}

View File

@@ -1,12 +0,0 @@
{
"compilerOptions": {
"target": "es3",
"module": "commonjs",
"moduleResolution": "node",
"types": ["node"],
"lib": ["es2015"]
},
"exclude": [
"node_modules"
]
}

View File

@@ -1,123 +0,0 @@
using System;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
namespace Microsoft.AspNetCore.SpaServices.Webpack
{
/// <summary>
/// Based on https://github.com/aspnet/Proxy/blob/dev/src/Microsoft.AspNetCore.Proxy/ProxyMiddleware.cs
/// Differs in that, if the proxied request returns a 404, we pass through to the next middleware in the chain
/// This is useful for Webpack middleware, because it lets you fall back on prebuilt files on disk for
/// chunks not exposed by the current Webpack config (e.g., DLL/vendor chunks).
/// </summary>
internal class ConditionalProxyMiddleware
{
private const int DefaultHttpBufferSize = 4096;
private readonly HttpClient _httpClient;
private readonly RequestDelegate _next;
private readonly ConditionalProxyMiddlewareOptions _options;
private readonly string _pathPrefix;
private readonly bool _pathPrefixIsRoot;
public ConditionalProxyMiddleware(
RequestDelegate next,
string pathPrefix,
ConditionalProxyMiddlewareOptions options)
{
if (!pathPrefix.StartsWith("/"))
{
pathPrefix = "/" + pathPrefix;
}
_next = next;
_pathPrefix = pathPrefix;
_pathPrefixIsRoot = string.Equals(_pathPrefix, "/", StringComparison.Ordinal);
_options = options;
_httpClient = new HttpClient(new HttpClientHandler());
_httpClient.Timeout = _options.RequestTimeout;
}
public async Task Invoke(HttpContext context)
{
if (context.Request.Path.StartsWithSegments(_pathPrefix) || _pathPrefixIsRoot)
{
var didProxyRequest = await PerformProxyRequest(context);
if (didProxyRequest)
{
return;
}
}
// Not a request we can proxy
await _next.Invoke(context);
}
private async Task<bool> PerformProxyRequest(HttpContext context)
{
var requestMessage = new HttpRequestMessage();
// Copy the request headers
foreach (var header in context.Request.Headers)
{
if (!requestMessage.Headers.TryAddWithoutValidation(header.Key, header.Value.ToArray()))
{
requestMessage.Content?.Headers.TryAddWithoutValidation(header.Key, header.Value.ToArray());
}
}
requestMessage.Headers.Host = _options.Host + ":" + _options.Port;
var uriString =
$"{_options.Scheme}://{_options.Host}:{_options.Port}{context.Request.Path}{context.Request.QueryString}";
requestMessage.RequestUri = new Uri(uriString);
requestMessage.Method = new HttpMethod(context.Request.Method);
using (
var responseMessage = await _httpClient.SendAsync(
requestMessage,
HttpCompletionOption.ResponseHeadersRead,
context.RequestAborted))
{
if (responseMessage.StatusCode == HttpStatusCode.NotFound)
{
// Let some other middleware handle this
return false;
}
// We can handle this
context.Response.StatusCode = (int) responseMessage.StatusCode;
foreach (var header in responseMessage.Headers)
{
context.Response.Headers[header.Key] = header.Value.ToArray();
}
foreach (var header in responseMessage.Content.Headers)
{
context.Response.Headers[header.Key] = header.Value.ToArray();
}
// SendAsync removes chunking from the response. This removes the header so it doesn't expect a chunked response.
context.Response.Headers.Remove("transfer-encoding");
using (var responseStream = await responseMessage.Content.ReadAsStreamAsync())
{
try
{
await responseStream.CopyToAsync(context.Response.Body, DefaultHttpBufferSize, context.RequestAborted);
}
catch (OperationCanceledException)
{
// The CopyToAsync task will be canceled if the client disconnects (e.g., user
// closes or refreshes the browser tab). Don't treat this as an error.
}
}
return true;
}
}
}
}

View File

@@ -1,20 +0,0 @@
using System;
namespace Microsoft.AspNetCore.SpaServices.Webpack
{
internal class ConditionalProxyMiddlewareOptions
{
public ConditionalProxyMiddlewareOptions(string scheme, string host, string port, TimeSpan requestTimeout)
{
Scheme = scheme;
Host = host;
Port = port;
RequestTimeout = requestTimeout;
}
public string Scheme { get; }
public string Host { get; }
public string Port { get; }
public TimeSpan RequestTimeout { get; }
}
}

View File

@@ -1,148 +0,0 @@
using System;
using System.IO;
using System.Threading;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.AspNetCore.SpaServices.Webpack;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Microsoft.AspNetCore.Builder
{
/// <summary>
/// Extension methods that can be used to enable Webpack dev middleware support.
/// </summary>
public static class WebpackDevMiddleware
{
private const string DefaultConfigFile = "webpack.config.js";
private static readonly JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings
{
// Note that the aspnet-webpack JS code specifically expects options to be serialized with
// PascalCase property names, so it's important to be explicit about this contract resolver
ContractResolver = new DefaultContractResolver(),
TypeNameHandling = TypeNameHandling.None
};
/// <summary>
/// Enables Webpack dev middleware support. This hosts an instance of the Webpack compiler in memory
/// in your application so that you can always serve up-to-date Webpack-built resources without having
/// to run the compiler manually. Since the Webpack compiler instance is retained in memory, incremental
/// compilation is vastly faster that re-running the compiler from scratch.
///
/// Incoming requests that match Webpack-built files will be handled by returning the Webpack compiler
/// output directly, regardless of files on disk. If compilation is in progress when the request arrives,
/// the response will pause until updated compiler output is ready.
/// </summary>
/// <param name="appBuilder">The <see cref="IApplicationBuilder"/>.</param>
/// <param name="options">Options for configuring the Webpack compiler instance.</param>
public static void UseWebpackDevMiddleware(
this IApplicationBuilder appBuilder,
WebpackDevMiddlewareOptions options = null)
{
// Prepare options
if (options == null)
{
options = new WebpackDevMiddlewareOptions();
}
// Validate options
if (options.ReactHotModuleReplacement && !options.HotModuleReplacement)
{
throw new ArgumentException(
"To enable ReactHotModuleReplacement, you must also enable HotModuleReplacement.");
}
// Unlike other consumers of NodeServices, WebpackDevMiddleware dosen't share Node instances, nor does it
// use your DI configuration. It's important for WebpackDevMiddleware to have its own private Node instance
// because it must *not* restart when files change (if it did, you'd lose all the benefits of Webpack
// middleware). And since this is a dev-time-only feature, it doesn't matter if the default transport isn't
// as fast as some theoretical future alternative.
var nodeServicesOptions = new NodeServicesOptions(appBuilder.ApplicationServices);
nodeServicesOptions.WatchFileExtensions = new string[] { }; // Don't watch anything
if (!string.IsNullOrEmpty(options.ProjectPath))
{
nodeServicesOptions.ProjectPath = options.ProjectPath;
}
if (options.EnvironmentVariables != null)
{
foreach (var kvp in options.EnvironmentVariables)
{
nodeServicesOptions.EnvironmentVariables[kvp.Key] = kvp.Value;
}
}
var nodeServices = NodeServicesFactory.CreateNodeServices(nodeServicesOptions);
// Get a filename matching the middleware Node script
var script = EmbeddedResourceReader.Read(typeof(WebpackDevMiddleware),
"/Content/Node/webpack-dev-middleware.js");
var nodeScript = new StringAsTempFile(script, nodeServicesOptions.ApplicationStoppingToken); // Will be cleaned up on process exit
// Ideally, this would be relative to the application's PathBase (so it could work in virtual directories)
// but it's not clear that such information exists during application startup, as opposed to within the context
// of a request.
var hmrEndpoint = !string.IsNullOrEmpty(options.HotModuleReplacementEndpoint)
? options.HotModuleReplacementEndpoint
: "/__webpack_hmr"; // Matches webpack's built-in default
// Tell Node to start the server hosting webpack-dev-middleware
var devServerOptions = new
{
webpackConfigPath = Path.Combine(nodeServicesOptions.ProjectPath, options.ConfigFile ?? DefaultConfigFile),
suppliedOptions = options,
understandsMultiplePublicPaths = true,
hotModuleReplacementEndpointUrl = hmrEndpoint
};
var devServerInfo =
nodeServices.InvokeExportAsync<WebpackDevServerInfo>(nodeScript.FileName, "createWebpackDevServer",
JsonConvert.SerializeObject(devServerOptions, jsonSerializerSettings)).Result;
// If we're talking to an older version of aspnet-webpack, it will return only a single PublicPath,
// not an array of PublicPaths. Handle that scenario.
if (devServerInfo.PublicPaths == null)
{
devServerInfo.PublicPaths = new[] { devServerInfo.PublicPath };
}
// Proxy the corresponding requests through ASP.NET and into the Node listener
// Anything under /<publicpath> (e.g., /dist) is proxied as a normal HTTP request with a typical timeout (100s is the default from HttpClient),
// plus /__webpack_hmr is proxied with infinite timeout, because it's an EventSource (long-lived request).
foreach (var publicPath in devServerInfo.PublicPaths)
{
appBuilder.UseProxyToLocalWebpackDevMiddleware(publicPath + hmrEndpoint, devServerInfo.Port, Timeout.InfiniteTimeSpan);
appBuilder.UseProxyToLocalWebpackDevMiddleware(publicPath, devServerInfo.Port, TimeSpan.FromSeconds(100));
}
}
private static void UseProxyToLocalWebpackDevMiddleware(this IApplicationBuilder appBuilder, string publicPath, int proxyToPort, TimeSpan requestTimeout)
{
// Note that this is hardcoded to make requests to "localhost" regardless of the hostname of the
// server as far as the client is concerned. This is because ConditionalProxyMiddlewareOptions is
// the one making the internal HTTP requests, and it's going to be to some port on this machine
// because aspnet-webpack hosts the dev server there. We can't use the hostname that the client
// sees, because that could be anything (e.g., some upstream load balancer) and we might not be
// able to make outbound requests to it from here.
// Also note that the webpack HMR service always uses HTTP, even if your app server uses HTTPS,
// because the HMR service has no need for HTTPS (the client doesn't see it directly - all traffic
// to it is proxied), and the HMR service couldn't use HTTPS anyway (in general it wouldn't have
// the necessary certificate).
var proxyOptions = new ConditionalProxyMiddlewareOptions(
"http", "localhost", proxyToPort.ToString(), requestTimeout);
appBuilder.UseMiddleware<ConditionalProxyMiddleware>(publicPath, proxyOptions);
}
#pragma warning disable CS0649
class WebpackDevServerInfo
{
public int Port { get; set; }
public string[] PublicPaths { get; set; }
// For back-compatibility with older versions of aspnet-webpack, in the case where your webpack
// configuration contains exactly one config entry. This will be removed soon.
public string PublicPath { get; set; }
}
}
#pragma warning restore CS0649
}

View File

@@ -1,54 +0,0 @@
using System.Collections.Generic;
namespace Microsoft.AspNetCore.SpaServices.Webpack
{
/// <summary>
/// Options for configuring a Webpack dev middleware compiler.
/// </summary>
public class WebpackDevMiddlewareOptions
{
/// <summary>
/// If true, hot module replacement (HMR) will be enabled. This automatically updates Webpack-built
/// resources (such as JavaScript, CSS, or images) in your web browser whenever source files are changed.
/// </summary>
public bool HotModuleReplacement { get; set; }
/// <summary>
/// If set, overrides the URL that Webpack's client-side code will connect to when listening for updates.
/// This must be a root-relative URL similar to "/__webpack_hmr" (which is the default endpoint).
/// </summary>
public string HotModuleReplacementEndpoint { get; set; }
/// <summary>
/// Overrides the internal port number that client-side HMR code will connect to.
/// </summary>
public int HotModuleReplacementServerPort { get; set; }
/// <summary>
/// If true, enables React-specific extensions to Webpack's hot module replacement (HMR) feature.
/// This enables React components to be updated without losing their in-memory state.
/// </summary>
public bool ReactHotModuleReplacement { get; set; }
/// <summary>
/// Specifies additional options to be passed to the Webpack Hot Middleware client, if used.
/// </summary>
public IDictionary<string, string> HotModuleReplacementClientOptions { get; set; }
/// <summary>
/// Specifies the Webpack configuration file to be used. If not set, defaults to 'webpack.config.js'.
/// </summary>
public string ConfigFile { get; set; }
/// <summary>
/// The root path of your project. Webpack runs in this context.
/// </summary>
public string ProjectPath { get; set; }
/// <summary>
/// Specifies additional environment variables to be passed to the Node instance hosting
/// the webpack compiler.
/// </summary>
public IDictionary<string, string> EnvironmentVariables { get; set; }
}
}

View File

@@ -1,5 +0,0 @@
/node_modules/
**/*.js
**/*.d.ts
**/*.metadata.json
/compiled

View File

@@ -1,3 +0,0 @@
!/*.js
!/*.d.ts
/compiled

View File

@@ -1,12 +0,0 @@
Copyright (c) .NET Foundation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
these files except in compliance with the License. You may obtain a copy of the
License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.

View File

@@ -1,33 +0,0 @@
{
"name": "aspnet-angular",
"version": "0.1.1",
"description": "Helpers for using Angular in ASP.NET Core projects",
"main": "index.js",
"scripts": {
"prepublish": "rimraf *.d.ts && ngc && echo 'Finished building NPM package \"aspnet-angular\"'",
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "https://github.com/aspnet/JavaScriptServices.git"
},
"author": "Microsoft",
"license": "Apache-2.0",
"bugs": {
"url": "https://github.com/aspnet/JavaScriptServices/issues"
},
"devDependencies": {
"@angular/common": "^4.3.2",
"@angular/compiler": "^4.3.2",
"@angular/compiler-cli": "^4.3.2",
"@angular/core": "^4.3.2",
"@angular/http": "^4.3.2",
"@angular/platform-browser": "^4.3.2",
"rimraf": "^2.6.1",
"rxjs": "^5.4.2",
"zone.js": "^0.8.16"
},
"peerDependencies": {
"@angular/core": "^4.2.5 || ^5.0.0-beta"
}
}

View File

@@ -1,94 +0,0 @@
import { Provider, NgModule, Inject } from '@angular/core';
import { Headers, Http, ResponseOptions, RequestOptionsArgs, Response } from '@angular/http';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/observable/of';
import 'rxjs/add/operator/map';
const globalSerializedStateKey = 'HTTP_STATE_TRANSFER';
const backingStoreDIToken = 'HTTP_STATE_BACKING_STORE';
export interface CacheOptions {
permanent: boolean;
}
export interface CachedHttpResponse {
headers: { [name: string]: any } | null;
status: number;
statusText: string | null;
text: string;
url: string;
}
export type BackingStore = { [key: string]: CachedHttpResponse };
export class HttpWithStateTransfer {
private backingStore: BackingStore;
private http: Http;
constructor(@Inject(Http) http: Http, @Inject(backingStoreDIToken) backingStore: BackingStore) {
this.http = http;
this.backingStore = backingStore;
}
public stateForTransfer(): any {
return { [globalSerializedStateKey]: this.backingStore };
}
public get(url: string, options?: CacheOptions, requestOptions?: RequestOptionsArgs): Observable<Response> {
return this.getCachedResponse(/* cacheKey */ url, () => this.http.get(url, requestOptions), options);
}
private getCachedResponse(cacheKey: string, provider: () => Observable<Response>, options?: CacheOptions): Observable<Response> {
// By default, the cache is only used for the *first* client-side read. So, we're only performing
// a one-time transfer of server-side response to the client. If you want to keep and reuse cached
// responses continually during server-side and client-side execution, set 'permanent' to 'true.
const isClient = typeof window !== 'undefined';
const isPermanent = options && options.permanent;
const allowReadFromCache = isClient || isPermanent;
if (allowReadFromCache && this.backingStore.hasOwnProperty(cacheKey)) {
const cachedValue = this.backingStore[cacheKey];
if (!isPermanent) {
delete this.backingStore[cacheKey];
}
return Observable.of(new Response(new ResponseOptions({
body: cachedValue.text,
headers: new Headers(cachedValue.headers),
status: cachedValue.status,
url: cachedValue.url
})));
}
return provider()
.map(response => {
const allowWriteToCache = !isClient || isPermanent;
if (allowWriteToCache) {
this.backingStore[cacheKey] = {
headers: response.headers ? response.headers.toJSON() : null,
status: response.status,
statusText: response.statusText,
text: response.text(),
url: response.url
};
}
return response;
});
}
}
export function defaultBackingStoreFactory() {
const transferredData = typeof window !== 'undefined' ? (window as any)[globalSerializedStateKey] : null;
return transferredData || {};
}
@NgModule({
providers: [
// The backing store is a separate DI service so you could override exactly how it gets
// transferred from server to client
{ provide: backingStoreDIToken, useFactory: defaultBackingStoreFactory },
{ provide: HttpWithStateTransfer, useClass: HttpWithStateTransfer },
]
})
export class HttpWithStateTransferModule {
}

View File

@@ -1 +0,0 @@
export * from './HttpWithStateTransfer';

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"experimentalDecorators": true,
"moduleResolution": "node",
"module": "commonjs",
"target": "es5",
"declaration": true,
"outDir": ".",
"lib": ["es2015", "dom"]
},
"files": [
"src/index.ts"
],
"exclude": [
"node_modules"
],
"angularCompilerOptions": {
"genDir": "compiled"
}
}

View File

@@ -1,8 +0,0 @@
/typings/
/node_modules/
/**/*.js
/**/.d.ts
!/src/**/*.d.ts
yarn.lock

View File

@@ -1,4 +0,0 @@
!/*.js
!/*.d.ts
/typings/
yarn.lock

View File

@@ -1,12 +0,0 @@
Copyright (c) .NET Foundation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
these files except in compliance with the License. You may obtain a copy of the
License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.

View File

@@ -1,6 +0,0 @@
# Not for general use
This NPM package is an internal implementation detail of the `Microsoft.AspNetCore.SpaServices` NuGet package.
You should not use this package directly in your own applications, because it is not supported, and there are no
guarantees about how its APIs will change in the future.

View File

@@ -1,27 +0,0 @@
{
"name": "aspnet-prerendering",
"version": "3.0.1",
"description": "Helpers for server-side rendering of JavaScript applications in ASP.NET Core projects. Works in conjunction with the Microsoft.AspNetCore.SpaServices NuGet package.",
"main": "index.js",
"scripts": {
"prepublish": "rimraf *.d.ts && tsc && echo 'Finished building NPM package \"aspnet-prerendering\"'",
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Microsoft",
"license": "Apache-2.0",
"bugs": {
"url": "https://github.com/aspnet/JavaScriptServices/issues"
},
"repository": {
"type": "git",
"url": "https://github.com/aspnet/JavaScriptServices.git"
},
"dependencies": {
"domain-task": "^3.0.0"
},
"devDependencies": {
"@types/node": "^6.0.42",
"rimraf": "^2.5.4",
"typescript": "^2.2.1"
}
}

View File

@@ -1,112 +0,0 @@
import * as url from 'url';
import * as path from 'path';
import * as domain from 'domain';
import { run as domainTaskRun, baseUrl as domainTaskBaseUrl } from 'domain-task/main';
import { BootFunc, BootFuncParams, BootModuleInfo, RenderToStringCallback, RenderToStringFunc } from './PrerenderingInterfaces';
const defaultTimeoutMilliseconds = 30 * 1000;
export function createServerRenderer(bootFunc: BootFunc): RenderToStringFunc {
const resultFunc = (callback: RenderToStringCallback, applicationBasePath: string, bootModule: BootModuleInfo, absoluteRequestUrl: string, requestPathAndQuery: string, customDataParameter: any, overrideTimeoutMilliseconds: number, requestPathBase: string) => {
// Prepare a promise that will represent the completion of all domain tasks in this execution context.
// The boot code will wait for this before performing its final render.
let domainTaskCompletionPromiseResolve;
const domainTaskCompletionPromise = new Promise((resolve, reject) => {
domainTaskCompletionPromiseResolve = resolve;
});
const parsedAbsoluteRequestUrl = url.parse(absoluteRequestUrl);
const params: BootFuncParams = {
// It's helpful for boot funcs to receive the query as a key-value object, so parse it here
// e.g., react-redux-router requires location.query to be a key-value object for consistency with client-side behaviour
location: url.parse(requestPathAndQuery, /* parseQueryString */ true),
origin: parsedAbsoluteRequestUrl.protocol + '//' + parsedAbsoluteRequestUrl.host,
url: requestPathAndQuery,
baseUrl: (requestPathBase || '') + '/',
absoluteUrl: absoluteRequestUrl,
domainTasks: domainTaskCompletionPromise,
data: customDataParameter
};
const absoluteBaseUrl = params.origin + params.baseUrl; // Should be same value as page's <base href>
// Open a new domain that can track all the async tasks involved in the app's execution
domainTaskRun(/* code to run */ () => {
// Workaround for Node bug where native Promise continuations lose their domain context
// (https://github.com/nodejs/node-v0.x-archive/issues/8648)
// The domain.active property is set by the domain-context module
bindPromiseContinuationsToDomain(domainTaskCompletionPromise, domain['active']);
// Make the base URL available to the 'domain-tasks/fetch' helper within this execution context
domainTaskBaseUrl(absoluteBaseUrl);
// Begin rendering, and apply a timeout
const bootFuncPromise = bootFunc(params);
if (!bootFuncPromise || typeof bootFuncPromise.then !== 'function') {
callback(`Prerendering failed because the boot function in ${bootModule.moduleName} did not return a promise.`, null);
return;
}
const timeoutMilliseconds = overrideTimeoutMilliseconds || defaultTimeoutMilliseconds; // e.g., pass -1 to override as 'never time out'
const bootFuncPromiseWithTimeout = timeoutMilliseconds > 0
? wrapWithTimeout(bootFuncPromise, timeoutMilliseconds,
`Prerendering timed out after ${timeoutMilliseconds}ms because the boot function in '${bootModule.moduleName}' `
+ 'returned a promise that did not resolve or reject. Make sure that your boot function always resolves or '
+ 'rejects its promise. You can change the timeout value using the \'asp-prerender-timeout\' tag helper.')
: bootFuncPromise;
// Actually perform the rendering
bootFuncPromiseWithTimeout.then(successResult => {
callback(null, successResult);
}, error => {
callback(error, null);
});
}, /* completion callback */ errorOrNothing => {
if (errorOrNothing) {
callback(errorOrNothing, null);
} else {
// There are no more ongoing domain tasks (typically data access operations), so we can resolve
// the domain tasks promise which notifies the boot code that it can do its final render.
domainTaskCompletionPromiseResolve();
}
});
};
// Indicate to the prerendering code bundled into Microsoft.AspNetCore.SpaServices that this is a serverside rendering
// function, so it can be invoked directly. This flag exists only so that, in its absence, we can run some different
// backward-compatibility logic.
resultFunc['isServerRenderer'] = true;
return resultFunc;
}
function wrapWithTimeout<T>(promise: Promise<T>, timeoutMilliseconds: number, timeoutRejectionValue: any): Promise<T> {
return new Promise<T>((resolve, reject) => {
const timeoutTimer = setTimeout(() => {
reject(timeoutRejectionValue);
}, timeoutMilliseconds);
promise.then(
resolvedValue => {
clearTimeout(timeoutTimer);
resolve(resolvedValue);
},
rejectedValue => {
clearTimeout(timeoutTimer);
reject(rejectedValue);
}
)
});
}
function bindPromiseContinuationsToDomain(promise: Promise<any>, domainInstance: domain.Domain) {
const originalThen = promise.then;
promise.then = (function then(resolve, reject) {
if (typeof resolve === 'function') {
resolve = domainInstance.bind(resolve);
}
if (typeof reject === 'function') {
reject = domainInstance.bind(reject);
}
return originalThen.call(this, resolve, reject);
}) as any;
}

View File

@@ -1,39 +0,0 @@
export interface RenderToStringFunc {
(callback: RenderToStringCallback, applicationBasePath: string, bootModule: BootModuleInfo, absoluteRequestUrl: string, requestPathAndQuery: string, customDataParameter: any, overrideTimeoutMilliseconds: number, requestPathBase: string): void;
}
export interface RenderToStringCallback {
(error: any, result?: RenderResult): void;
}
export interface RenderToStringResult {
html: string;
statusCode?: number;
globals?: { [key: string]: any };
}
export interface RedirectResult {
redirectUrl: string;
}
export type RenderResult = RenderToStringResult | RedirectResult;
export interface BootFunc {
(params: BootFuncParams): Promise<RenderResult>;
}
export interface BootFuncParams {
location: any; // e.g., Location object containing information '/some/path'
origin: string; // e.g., 'https://example.com:1234'
url: string; // e.g., '/some/path'
baseUrl: string; // e.g., '' or '/myVirtualDir'
absoluteUrl: string; // e.g., 'https://example.com:1234/some/path'
domainTasks: Promise<any>;
data: any; // any custom object passed through from .NET
}
export interface BootModuleInfo {
moduleName: string;
exportName?: string;
webpackConfig?: string;
}

View File

@@ -1,2 +0,0 @@
export * from './Prerendering';
export * from './PrerenderingInterfaces';

View File

@@ -1,16 +0,0 @@
{
"compilerOptions": {
"moduleResolution": "node",
"module": "commonjs",
"target": "es5",
"declaration": true,
"outDir": ".",
"lib": ["es2015", "dom"]
},
"files": [
"src/index.ts"
],
"exclude": [
"node_modules"
]
}

View File

@@ -1,3 +0,0 @@
/node_modules/
/*.js
/*.d.ts

View File

@@ -1,3 +0,0 @@
!/*.js
!/*.d.ts
/typings/

View File

@@ -1,12 +0,0 @@
Copyright (c) .NET Foundation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
these files except in compliance with the License. You may obtain a copy of the
License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.

View File

@@ -1,11 +0,0 @@
# Not for general use
This NPM package is an internal implementation detail of the `Microsoft.AspNetCore.SpaServices` NuGet package.
You should not use this package directly in your own applications, because it is not supported, and there are no
guarantees about how its APIs will change in the future.
## History
* Version 1.x amends the Webpack config to insert `react-transform` and `react-transform-hmr` entries on `babel-loader`.
* Version 2.x drops support for the Babel plugin, and instead amends the Webpack config to insert `react-hot-loader/webpack` and `react-hot-loader/patch` entries. This means it works with React Hot Loader v3.

View File

@@ -1,28 +0,0 @@
{
"name": "aspnet-webpack-react",
"version": "3.0.0",
"description": "Helpers for using Webpack with React in ASP.NET Core projects. Works in conjunction with the Microsoft.AspNetCore.SpaServices NuGet package.",
"main": "index.js",
"scripts": {
"prepublish": "rimraf *.d.ts && tsc && echo 'Finished building NPM package \"aspnet-webpack-react\"'",
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Microsoft",
"license": "Apache-2.0",
"bugs": {
"url": "https://github.com/aspnet/JavaScriptServices/issues"
},
"repository": {
"type": "git",
"url": "https://github.com/aspnet/JavaScriptServices.git"
},
"devDependencies": {
"@types/webpack": "^2.2.0",
"rimraf": "^2.5.4",
"typescript": "^2.0.0",
"webpack": "^2.2.0"
},
"peerDependencies": {
"webpack": "^2.2.0"
}
}

View File

@@ -1,66 +0,0 @@
import * as webpack from 'webpack';
const reactHotLoaderWebpackLoader = 'react-hot-loader/webpack';
const reactHotLoaderPatch = 'react-hot-loader/patch';
const supportedTypeScriptLoaders = ['ts-loader', 'awesome-typescript-loader'];
export function addReactHotModuleReplacementConfig(webpackConfig: webpack.Configuration) {
const moduleConfig = webpackConfig.module as webpack.NewModule;
const moduleRules = moduleConfig.rules;
if (!moduleRules) {
return; // Unknown rules list format. Might be Webpack 1.x, which is not supported.
}
// Find the rule that loads TypeScript files, and prepend 'react-hot-loader/webpack'
// to its array of loaders
for (let ruleIndex = 0; ruleIndex < moduleRules.length; ruleIndex++) {
// We only support NewUseRule (i.e., { use: ... }) because OldUseRule doesn't accept array values
const rule = moduleRules[ruleIndex] as webpack.NewUseRule;
if (!rule.use) {
continue;
}
// We're looking for the first 'use' value that's a TypeScript loader
const loadersArray = rule.use instanceof Array ? rule.use : [rule.use];
const isTypescriptLoader = supportedTypeScriptLoaders.some(typeScriptLoaderName => containsLoader(loadersArray, typeScriptLoaderName));
if (!isTypescriptLoader) {
continue;
}
// This is the one - prefix it with the react-hot-loader loader
// (unless it's already in there somewhere)
if (!containsLoader(loadersArray, reactHotLoaderWebpackLoader)) {
loadersArray.unshift(reactHotLoaderWebpackLoader);
rule.use = loadersArray; // In case we normalised it to an array
}
break;
}
// Ensure the entrypoint is prefixed with 'react-hot-loader/patch' (unless it's already in there).
// We only support entrypoints of the form { name: value } (not just 'name' or ['name'])
// because that gives us a place to prepend the new value
if (!webpackConfig.entry || typeof webpackConfig.entry === 'string' || webpackConfig.entry instanceof Array) {
throw new Error('Cannot enable React HMR because \'entry\' in Webpack config is not of the form { name: value }');
}
const entryConfig = webpackConfig.entry as webpack.Entry;
Object.getOwnPropertyNames(entryConfig).forEach(entrypointName => {
if (typeof(entryConfig[entrypointName]) === 'string') {
// Normalise to array
entryConfig[entrypointName] = [entryConfig[entrypointName] as string];
}
let entryValueArray = entryConfig[entrypointName] as string[];
if (entryValueArray.indexOf(reactHotLoaderPatch) < 0) {
entryValueArray.unshift(reactHotLoaderPatch);
}
});
}
function containsLoader(loadersArray: webpack.Loader[], loaderName: string) {
return loadersArray.some(loader => {
// Allow 'use' values to be either { loader: 'name' } or 'name'
// No need to support legacy webpack.OldLoader
const actualLoaderName = (loader as webpack.NewLoader).loader || (loader as string);
return actualLoaderName && new RegExp(`\\b${ loaderName }\\b`).test(actualLoaderName);
});
}

View File

@@ -1,6 +0,0 @@
export { addReactHotModuleReplacementConfig } from './HotModuleReplacement';
// Temporarily alias addReactHotModuleReplacementConfig as addReactHotModuleReplacementBabelTransform for backward
// compatibility with aspnet-webpack 1.x. In aspnet-webpack 2.0, we can drop the old name (and also deprecate
// some other no-longer-supported functionality, such as LoadViaWebpack).
export { addReactHotModuleReplacementConfig as addReactHotModuleReplacementBabelTransform } from './HotModuleReplacement';

View File

@@ -1,16 +0,0 @@
{
"compilerOptions": {
"moduleResolution": "node",
"module": "commonjs",
"target": "es5",
"declaration": true,
"outDir": ".",
"lib": ["es2015"]
},
"files": [
"src/index.ts"
],
"exclude": [
"node_modules"
]
}

View File

@@ -1,3 +0,0 @@
/node_modules/
/*.js
/*.d.ts

View File

@@ -1,3 +0,0 @@
!/*.js
!/*.d.ts
/typings/

View File

@@ -1,12 +0,0 @@
Copyright (c) .NET Foundation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
these files except in compliance with the License. You may obtain a copy of the
License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.

View File

@@ -1,6 +0,0 @@
# Not for general use
This NPM package is an internal implementation detail of the `Microsoft.AspNetCore.SpaServices` NuGet package.
You should not use this package directly in your own applications, because it is not supported, and there are no
guarantees about how its APIs will change in the future.

File diff suppressed because it is too large Load Diff

View File

@@ -1,38 +0,0 @@
{
"name": "aspnet-webpack",
"version": "2.0.1",
"description": "Helpers for using Webpack in ASP.NET Core projects. Works in conjunction with the Microsoft.AspNetCore.SpaServices NuGet package.",
"main": "index.js",
"scripts": {
"prepublish": "rimraf *.d.ts && tsc && echo 'Finished building NPM package \"aspnet-webpack\"'",
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "Microsoft",
"license": "Apache-2.0",
"bugs": {
"url": "https://github.com/aspnet/JavaScriptServices/issues"
},
"repository": {
"type": "git",
"url": "https://github.com/aspnet/JavaScriptServices.git"
},
"dependencies": {
"connect": "^3.4.1",
"es6-promise": "^3.1.2",
"memory-fs": "^0.3.0",
"require-from-string": "^1.1.0",
"webpack-dev-middleware": "^1.8.4",
"webpack-node-externals": "^1.4.3"
},
"devDependencies": {
"@types/connect": "^3.4.30",
"@types/node": "^6.0.42",
"@types/webpack": "^2.2.0",
"rimraf": "^2.5.4",
"typescript": "^2.0.0",
"webpack": "^1.13.2"
},
"peerDependencies": {
"webpack": "^1.13.2 || ^2.1.0-beta"
}
}

View File

@@ -1,143 +0,0 @@
// When you're using Webpack, it's often convenient to be able to require modules from regular JavaScript
// and have them transformed by Webpack. This is especially useful when doing ASP.NET server-side prerendering,
// because it means your boot module can use whatever source language you like (e.g., TypeScript), and means
// that your loader plugins (e.g., require('./mystyles.less')) work in exactly the same way on the server as
// on the client.
import 'es6-promise';
import * as path from 'path';
import * as webpack from 'webpack';
import { requireNewCopy } from './RequireNewCopy';
// Strange import syntax to work around https://github.com/Microsoft/TypeScript/issues/2719
import { requirefromstring } from './typings/require-from-string';
import { memoryfs } from './typings/memory-fs';
const nodeExternals = require('webpack-node-externals');
const requireFromString = require('require-from-string') as typeof requirefromstring.requireFromString;
const MemoryFS = require('memory-fs') as typeof memoryfs.MemoryFS;
// Ensure we only go through the compile process once per [config, module] pair
const loadViaWebpackPromisesCache: { [key: string]: any } = {};
export interface LoadViaWebpackCallback<T> {
(error: any, result: T): void;
}
export function loadViaWebpack<T>(webpackConfigPath: string, modulePath: string, callback: LoadViaWebpackCallback<T>) {
const cacheKey = JSON.stringify(webpackConfigPath) + JSON.stringify(modulePath);
if (!(cacheKey in loadViaWebpackPromisesCache)) {
loadViaWebpackPromisesCache[cacheKey] = loadViaWebpackNoCache(webpackConfigPath, modulePath);
}
loadViaWebpackPromisesCache[cacheKey].then(result => {
callback(null, result);
}, error => {
callback(error, null);
})
}
function setExtension(filePath: string, newExtension: string) {
const oldExtensionIfAny = path.extname(filePath);
const basenameWithoutExtension = path.basename(filePath, oldExtensionIfAny);
return path.join(path.dirname(filePath), basenameWithoutExtension) + newExtension;
}
function loadViaWebpackNoCache<T>(webpackConfigPath: string, modulePath: string) {
return new Promise<T>((resolve, reject) => {
// Load the Webpack config and make alterations needed for loading the output into Node
const webpackConfig: webpack.Configuration = requireNewCopy(webpackConfigPath);
webpackConfig.entry = modulePath;
webpackConfig.target = 'node';
// Make sure we preserve the 'path' and 'publicPath' config values if specified, as these
// can affect the build output (e.g., when using 'file' loader, the publicPath value gets
// set as a prefix on output paths).
webpackConfig.output = webpackConfig.output || {};
webpackConfig.output.path = webpackConfig.output.path || '/';
webpackConfig.output.filename = 'webpack-output.js';
webpackConfig.output.libraryTarget = 'commonjs';
const outputVirtualPath = path.join(webpackConfig.output.path, webpackConfig.output.filename);
// In Node, we want any JavaScript modules under /node_modules/ to be loaded natively and not bundled into the
// output (partly because it's faster, but also because otherwise there'd be different instances of modules
// depending on how they were loaded, which could lead to errors).
// ---
// NOTE: We have to use webpack-node-externals rather than webpack-externals-plugin because
// webpack-externals-plugin doesn't correctly resolve relative paths, which means you can't
// use css-loader, since tries to require('./../../node_modules/css-loader/lib/css-base.js') (see #132)
// ---
// So, ensure that webpackConfig.externals is an array, and push WebpackNodeExternals into it:
let externalsArray: any[] = (webpackConfig.externals as any[]) || [];
if (!(externalsArray instanceof Array)) {
externalsArray = [externalsArray];
}
webpackConfig.externals = externalsArray;
externalsArray.push(nodeExternals({
// However, we do *not* want to treat non-JS files under /node_modules/ as externals (i.e., things
// that should be loaded via regular CommonJS 'require' statements). For example, if you reference
// a .css file inside an NPM module (e.g., require('somepackage/somefile.css')), then we do need to
// load that via Webpack rather than as a regular CommonJS module.
//
// So, configure webpack-externals-plugin to 'whitelist' (i.e., not treat as external) any file
// that has an extension other than .js. Also, since some libraries such as font-awesome refer to
// their own files with cache-busting querystrings (e.g., (url('./something.css?v=4.1.2'))), we
// need to treat '?' as an alternative 'end of filename' marker.
//
// The complex, awkward regex can be eliminated once webpack-externals-plugin merges
// https://github.com/liady/webpack-node-externals/pull/12
//
// This regex looks for at least one dot character that is *not* followed by "js<end-or-questionmark>", but
// is followed by some series of non-dot characters followed by <end-or-questionmark>:
whitelist: [/\.(?!js(\?|$))([^.]+(\?|$))/]
}));
// The CommonsChunkPlugin is not compatible with a CommonJS environment like Node, nor is it needed in that case
webpackConfig.plugins = webpackConfig.plugins.filter(plugin => {
return !(plugin instanceof webpack.optimize.CommonsChunkPlugin);
});
// The typical use case for DllReferencePlugin is for referencing vendor modules. In a Node
// environment, it doesn't make sense to load them from a DLL bundle, nor would that even
// work, because then you'd get different module instances depending on whether a module
// was referenced via a normal CommonJS 'require' or via Webpack. So just remove any
// DllReferencePlugin from the config.
// If someone wanted to load their own DLL modules (not an NPM module) via DllReferencePlugin,
// that scenario is not supported today. We would have to add some extra option to the
// asp-prerender tag helper to let you specify a list of DLL bundles that should be evaluated
// in this context. But even then you'd need special DLL builds for the Node environment so that
// external dependencies were fetched via CommonJS requires, so it's unclear how that could work.
// The ultimate escape hatch here is just prebuilding your code as part of the application build
// and *not* using asp-prerender-webpack-config at all, then you can do anything you want.
webpackConfig.plugins = webpackConfig.plugins.filter(plugin => {
// DllReferencePlugin is missing from webpack.d.ts for some reason, hence referencing it
// as a key-value object property
return !(plugin instanceof webpack['DllReferencePlugin']);
});
// Create a compiler instance that stores its output in memory, then load its output
const compiler = webpack(webpackConfig);
compiler.outputFileSystem = new MemoryFS();
compiler.run((err, stats) => {
if (err) {
reject(err);
} else {
// We're in a callback, so need an explicit try/catch to propagate any errors up the promise chain
try {
if (stats.hasErrors()) {
throw new Error('Webpack compilation reported errors. Compiler output follows: '
+ stats.toString({ chunks: false }));
}
// The dynamically-built module will only appear in node-inspector if it has some nonempty
// file path. The following value is arbitrary (since there's no real compiled file on disk)
// but is sufficient to enable debugging.
const fakeModulePath = setExtension(modulePath, '.js');
const fileContent = compiler.outputFileSystem.readFileSync(outputVirtualPath, 'utf8');
const moduleInstance = requireFromString<T>(fileContent, fakeModulePath);
resolve(moduleInstance);
} catch(ex) {
reject(ex);
}
}
});
});
}

View File

@@ -1,22 +0,0 @@
export function requireNewCopy(moduleNameOrPath: string): any {
// Store a reference to whatever's in the 'require' cache,
// so we don't permanently destroy it, and then ensure there's
// no cache entry for this module
const resolvedModule = require.resolve(moduleNameOrPath);
const wasCached = resolvedModule in require.cache;
let cachedInstance;
if (wasCached) {
cachedInstance = require.cache[resolvedModule];
delete require.cache[resolvedModule];
}
try {
// Return a new copy
return require(resolvedModule);
} finally {
// Restore the cached entry, if any
if (wasCached) {
require.cache[resolvedModule] = cachedInstance;
}
}
}

View File

@@ -1,363 +0,0 @@
import * as connect from 'connect';
import * as webpack from 'webpack';
import * as url from 'url';
import * as fs from 'fs';
import * as path from 'path';
import * as querystring from 'querystring';
import { requireNewCopy } from './RequireNewCopy';
import { hasSufficientPermissions } from './WebpackTestPermissions';
export type CreateDevServerResult = {
Port: number,
PublicPaths: string[]
};
export interface CreateDevServerCallback {
(error: any, result: CreateDevServerResult): void;
}
// These are the options passed by WebpackDevMiddleware.cs
interface CreateDevServerOptions {
webpackConfigPath: string;
suppliedOptions: DevServerOptions;
hotModuleReplacementEndpointUrl: string;
}
type EsModuleExports<T> = { __esModule: true, default: T };
type StringMap<T> = [(key: string) => T];
// These are the options configured in C# and then JSON-serialized, hence the C#-style naming
interface DevServerOptions {
HotModuleReplacement: boolean;
HotModuleReplacementServerPort: number;
HotModuleReplacementClientOptions: StringMap<string>;
ReactHotModuleReplacement: boolean;
}
// We support these three kinds of webpack.config.js export. We don't currently support exported promises
// (though we might be able to add that in the future, if there's a need).
type WebpackConfigOrArray = webpack.Configuration | webpack.Configuration[];
interface WebpackConfigFunc {
(env?: any): WebpackConfigOrArray;
}
type WebpackConfigExport = WebpackConfigOrArray | WebpackConfigFunc;
type WebpackConfigModuleExports = WebpackConfigExport | EsModuleExports<WebpackConfigExport>;
function attachWebpackDevMiddleware(app: any, webpackConfig: webpack.Configuration, enableHotModuleReplacement: boolean, enableReactHotModuleReplacement: boolean, hmrClientOptions: StringMap<string>, hmrServerEndpoint: string) {
// Build the final Webpack config based on supplied options
if (enableHotModuleReplacement) {
// For this, we only support the key/value config format, not string or string[], since
// those ones don't clearly indicate what the resulting bundle name will be
const entryPoints = webpackConfig.entry;
const isObjectStyleConfig = entryPoints
&& typeof entryPoints === 'object'
&& !(entryPoints instanceof Array);
if (!isObjectStyleConfig) {
throw new Error('To use HotModuleReplacement, your webpack config must specify an \'entry\' value as a key-value object (e.g., "entry: { main: \'ClientApp/boot-client.ts\' }")');
}
// Augment all entry points so they support HMR (unless they already do)
Object.getOwnPropertyNames(entryPoints).forEach(entryPointName => {
const webpackHotMiddlewareEntryPoint = 'webpack-hot-middleware/client';
const webpackHotMiddlewareOptions = '?' + querystring.stringify(hmrClientOptions);
if (typeof entryPoints[entryPointName] === 'string') {
entryPoints[entryPointName] = [webpackHotMiddlewareEntryPoint + webpackHotMiddlewareOptions, entryPoints[entryPointName]];
} else if (firstIndexOfStringStartingWith(entryPoints[entryPointName], webpackHotMiddlewareEntryPoint) < 0) {
entryPoints[entryPointName].unshift(webpackHotMiddlewareEntryPoint + webpackHotMiddlewareOptions);
}
// Now also inject eventsource polyfill so this can work on IE/Edge (unless it's already there)
// To avoid this being a breaking change for everyone who uses aspnet-webpack, we only do this if you've
// referenced event-source-polyfill in your package.json. Note that having event-source-polyfill available
// on the server in node_modules doesn't imply that you've also included it in your client-side bundle,
// but the converse is true (if it's not in node_modules, then you obviously aren't trying to use it at
// all, so it would definitely not work to take a dependency on it).
const eventSourcePolyfillEntryPoint = 'event-source-polyfill';
if (npmModuleIsPresent(eventSourcePolyfillEntryPoint)) {
const entryPointsArray: string[] = entryPoints[entryPointName]; // We know by now that it's an array, because if it wasn't, we already wrapped it in one
if (entryPointsArray.indexOf(eventSourcePolyfillEntryPoint) < 0) {
const webpackHmrIndex = firstIndexOfStringStartingWith(entryPointsArray, webpackHotMiddlewareEntryPoint);
if (webpackHmrIndex < 0) {
// This should not be possible, since we just added it if it was missing
throw new Error('Cannot find ' + webpackHotMiddlewareEntryPoint + ' in entry points array: ' + entryPointsArray);
}
// Insert the polyfill just before the HMR entrypoint
entryPointsArray.splice(webpackHmrIndex, 0, eventSourcePolyfillEntryPoint);
}
}
});
webpackConfig.plugins = [].concat(webpackConfig.plugins || []); // Be sure not to mutate the original array, as it might be shared
webpackConfig.plugins.push(
new webpack.HotModuleReplacementPlugin()
);
// Set up React HMR support if requested. This requires the 'aspnet-webpack-react' package.
if (enableReactHotModuleReplacement) {
let aspNetWebpackReactModule: any;
try {
aspNetWebpackReactModule = require('aspnet-webpack-react');
} catch(ex) {
throw new Error('ReactHotModuleReplacement failed because of an error while loading \'aspnet-webpack-react\'. Error was: ' + ex.stack);
}
aspNetWebpackReactModule.addReactHotModuleReplacementBabelTransform(webpackConfig);
}
}
// Attach Webpack dev middleware and optional 'hot' middleware
const compiler = webpack(webpackConfig);
app.use(require('webpack-dev-middleware')(compiler, {
noInfo: true,
publicPath: ensureLeadingSlash(webpackConfig.output.publicPath),
watchOptions: webpackConfig.watchOptions
}));
// After each compilation completes, copy the in-memory filesystem to disk.
// This is needed because the debuggers in both VS and VS Code assume that they'll be able to find
// the compiled files on the local disk (though it would be better if they got the source file from
// the browser they are debugging, which would be more correct and make this workaround unnecessary).
// Without this, Webpack plugins like HMR that dynamically modify the compiled output in the dev
// middleware's in-memory filesystem only (and not on disk) would confuse the debugger, because the
// file on disk wouldn't match the file served to the browser, and the source map line numbers wouldn't
// match up. Breakpoints would either not be hit, or would hit the wrong lines.
(compiler as any).plugin('done', stats => {
copyRecursiveToRealFsSync(compiler.outputFileSystem, '/', [/\.hot-update\.(js|json|js\.map)$/]);
});
if (enableHotModuleReplacement) {
let webpackHotMiddlewareModule;
try {
webpackHotMiddlewareModule = require('webpack-hot-middleware');
} catch (ex) {
throw new Error('HotModuleReplacement failed because of an error while loading \'webpack-hot-middleware\'. Error was: ' + ex.stack);
}
app.use(workaroundIISExpressEventStreamFlushingIssue(hmrServerEndpoint));
app.use(webpackHotMiddlewareModule(compiler, {
path: hmrServerEndpoint
}));
}
}
function workaroundIISExpressEventStreamFlushingIssue(path: string): connect.NextHandleFunction {
// IIS Express makes HMR seem very slow, because when it's reverse-proxying an EventStream response
// from Kestrel, it doesn't pass through the lines to the browser immediately, even if you're calling
// response.Flush (or equivalent) in your ASP.NET Core code. For some reason, it waits until the following
// line is sent. By default, that wouldn't be until the next HMR heartbeat, which can be up to 5 seconds later.
// In effect, it looks as if your code is taking 5 seconds longer to compile than it really does.
//
// As a workaround, this connect middleware intercepts requests to the HMR endpoint, and modifies the response
// stream so that all EventStream 'data' lines are immediately followed with a further blank line. This is
// harmless in non-IIS-Express cases, because it's OK to have extra blank lines in an EventStream response.
// The implementation is simplistic - rather than using a true stream reader, we just patch the 'write'
// method. This relies on webpack's HMR code always writing complete EventStream messages with a single
// 'write' call. That works fine today, but if webpack's HMR code was changed, this workaround might have
// to be updated.
const eventStreamLineStart = /^data\:/;
return (req, res, next) => {
// We only want to interfere with requests to the HMR endpoint, so check this request matches
const urlMatchesPath = (req.url === path) || (req.url.split('?', 1)[0] === path);
if (urlMatchesPath) {
const origWrite = res.write;
res.write = function (chunk) {
const result = origWrite.apply(this, arguments);
// We only want to interfere with actual EventStream data lines, so check it is one
if (typeof (chunk) === 'string') {
if (eventStreamLineStart.test(chunk) && chunk.charAt(chunk.length - 1) === '\n') {
origWrite.call(this, '\n\n');
}
}
return result;
}
}
return next();
};
}
function copyRecursiveToRealFsSync(from: typeof fs, rootDir: string, exclude: RegExp[]) {
from.readdirSync(rootDir).forEach(filename => {
const fullPath = pathJoinSafe(rootDir, filename);
const shouldExclude = exclude.filter(re => re.test(fullPath)).length > 0;
if (!shouldExclude) {
const fileStat = from.statSync(fullPath);
if (fileStat.isFile()) {
const fileBuf = from.readFileSync(fullPath);
fs.writeFileSync(fullPath, fileBuf);
} else if (fileStat.isDirectory()) {
if (!fs.existsSync(fullPath)) {
fs.mkdirSync(fullPath);
}
copyRecursiveToRealFsSync(from, fullPath, exclude);
}
}
});
}
function ensureLeadingSlash(value: string) {
if (value !== null && value.substring(0, 1) !== '/') {
value = '/' + value;
}
return value;
}
function pathJoinSafe(rootPath: string, filePath: string) {
// On Windows, MemoryFileSystem's readdirSync output produces directory entries like 'C:'
// which then trigger errors if you call statSync for them. Avoid this by detecting drive
// names at the root, and adding a backslash (so 'C:' becomes 'C:\', which works).
if (rootPath === '/' && path.sep === '\\' && filePath.match(/^[a-z0-9]+\:$/i)) {
return filePath + '\\';
} else {
return path.join(rootPath, filePath);
}
}
function beginWebpackWatcher(webpackConfig: webpack.Configuration) {
const compiler = webpack(webpackConfig);
compiler.watch(webpackConfig.watchOptions || {}, (err, stats) => {
// The default error reporter is fine for now, but could be customized here in the future if desired
});
}
export function createWebpackDevServer(callback: CreateDevServerCallback, optionsJson: string) {
const options: CreateDevServerOptions = JSON.parse(optionsJson);
// See the large comment in WebpackTestPermissions.ts for details about this
if (!hasSufficientPermissions()) {
console.log('WARNING: Webpack dev middleware is not enabled because the server process does not have sufficient permissions. You should either remove the UseWebpackDevMiddleware call from your code, or to make it work, give your server process user account permission to write to your application directory and to read all ancestor-level directories.');
callback(null, {
Port: 0,
PublicPaths: []
});
return;
}
// Read the webpack config's export, and normalize it into the more general 'array of configs' format
const webpackConfigModuleExports: WebpackConfigModuleExports = requireNewCopy(options.webpackConfigPath);
let webpackConfigExport = (webpackConfigModuleExports as EsModuleExports<{}>).__esModule === true
? (webpackConfigModuleExports as EsModuleExports<WebpackConfigExport>).default
: (webpackConfigModuleExports as WebpackConfigExport);
if (webpackConfigExport instanceof Function) {
// If you export a function, we'll call it with an undefined 'env' arg, since we have nothing else
// to pass. This is the same as what the webpack CLI tool does if you specify no '--env.x' values.
// In the future, we could add support for configuring the 'env' param in Startup.cs. But right
// now, it's not clear that people will want to do that (and they can always make up their own
// default env values in their webpack.config.js).
webpackConfigExport = webpackConfigExport();
}
const webpackConfigArray = webpackConfigExport instanceof Array ? webpackConfigExport : [webpackConfigExport];
const enableHotModuleReplacement = options.suppliedOptions.HotModuleReplacement;
const enableReactHotModuleReplacement = options.suppliedOptions.ReactHotModuleReplacement;
if (enableReactHotModuleReplacement && !enableHotModuleReplacement) {
callback('To use ReactHotModuleReplacement, you must also enable the HotModuleReplacement option.', null);
return;
}
// The default value, 0, means 'choose randomly'
const suggestedHMRPortOrZero = options.suppliedOptions.HotModuleReplacementServerPort || 0;
const app = connect();
const listener = app.listen(suggestedHMRPortOrZero, () => {
try {
// For each webpack config that specifies a public path, add webpack dev middleware for it
const normalizedPublicPaths: string[] = [];
webpackConfigArray.forEach(webpackConfig => {
if (webpackConfig.target === 'node') {
// For configs that target Node, it's meaningless to set up an HTTP listener, since
// Node isn't going to load those modules over HTTP anyway. It just loads them directly
// from disk. So the most relevant thing we can do with such configs is just write
// updated builds to disk, just like "webpack --watch".
beginWebpackWatcher(webpackConfig);
} else {
// For configs that target browsers, we can set up an HTTP listener, and dynamically
// modify the config to enable HMR etc. This just requires that we have a publicPath.
const publicPath = (webpackConfig.output.publicPath || '').trim();
if (!publicPath) {
throw new Error('To use the Webpack dev server, you must specify a value for \'publicPath\' on the \'output\' section of your webpack config (for any configuration that targets browsers)');
}
const publicPathNoTrailingSlash = removeTrailingSlash(publicPath);
normalizedPublicPaths.push(publicPathNoTrailingSlash);
// This is the URL the client will connect to, except that since it's a relative URL
// (no leading slash), Webpack will resolve it against the runtime <base href> URL
// plus it also adds the publicPath
const hmrClientEndpoint = removeLeadingSlash(options.hotModuleReplacementEndpointUrl);
// This is the URL inside the Webpack middleware Node server that we'll proxy to.
// We have to prefix with the public path because Webpack will add the publicPath
// when it resolves hmrClientEndpoint as a relative URL.
const hmrServerEndpoint = ensureLeadingSlash(publicPathNoTrailingSlash + options.hotModuleReplacementEndpointUrl);
// We always overwrite the 'path' option as it needs to match what the .NET side is expecting
const hmrClientOptions = options.suppliedOptions.HotModuleReplacementClientOptions || <StringMap<string>>{};
hmrClientOptions['path'] = hmrClientEndpoint;
const dynamicPublicPathKey = 'dynamicPublicPath';
if (!(dynamicPublicPathKey in hmrClientOptions)) {
// dynamicPublicPath default to true, so we can work with nonempty pathbases (virtual directories)
hmrClientOptions[dynamicPublicPathKey] = true;
} else {
// ... but you can set it to any other value explicitly if you want (e.g., false)
hmrClientOptions[dynamicPublicPathKey] = JSON.parse(hmrClientOptions[dynamicPublicPathKey]);
}
attachWebpackDevMiddleware(app, webpackConfig, enableHotModuleReplacement, enableReactHotModuleReplacement, hmrClientOptions, hmrServerEndpoint);
}
});
// Tell the ASP.NET app what addresses we're listening on, so that it can proxy requests here
callback(null, {
Port: listener.address().port,
PublicPaths: normalizedPublicPaths
});
} catch (ex) {
callback(ex.stack, null);
}
});
}
function removeLeadingSlash(str: string) {
if (str.indexOf('/') === 0) {
str = str.substring(1);
}
return str;
}
function removeTrailingSlash(str: string) {
if (str.lastIndexOf('/') === str.length - 1) {
str = str.substring(0, str.length - 1);
}
return str;
}
function getPath(publicPath: string) {
return url.parse(publicPath).path;
}
function firstIndexOfStringStartingWith(array: string[], prefixToFind: string) {
for (let index = 0; index < array.length; index++) {
const candidate = array[index];
if ((typeof candidate === 'string') && (candidate.substring(0, prefixToFind.length) === prefixToFind)) {
return index;
}
}
return -1; // Not found
}
function npmModuleIsPresent(moduleName: string) {
try {
require.resolve(moduleName);
return true;
} catch (ex) {
return false;
}
}

View File

@@ -1,58 +0,0 @@
import * as fs from 'fs';
import * as path from 'path';
const isWindows = /^win/.test(process.platform);
// On Windows, Node (still as of v8.1.3) has an issue whereby, when locating JavaScript modules
// on disk, it walks up the directory hierarchy to the disk root, testing whether each directory
// is a symlink or not. This fails with an exception if the process doesn't have permission to
// read those directories. This is a problem when hosting in full IIS, because in typical cases
// the process does not have read permission for higher-level directories.
//
// NodeServices itself works around this by injecting a patched version of Node's 'lstat' API that
// suppresses these irrelevant errors during module loads. This covers most scenarios, but isn't
// enough to make Webpack dev middleware work, because typical Webpack configs use loaders such as
// 'awesome-typescript-loader', which works by forking a child process to do some of its work. The
// child process does not get the patched 'lstat', and hence fails. It's an especially bad failure,
// because the Webpack compiler doesn't even surface the exception - it just never completes the
// compilation process, causing the application to hang indefinitely.
//
// Additionally, Webpack dev middleware will want to write its output to disk, which is also going
// to fail in a typical IIS process, because you won't have 'write' permission to the app dir by
// default. We have to actually write the build output to disk (and not purely keep it in the in-
// memory file system) because the server-side prerendering Node instance is a separate process
// that only knows about code changes when it sees the compiled files on disk change.
//
// In the future, we'll hopefully get Node to fix its underlying issue, and figure out whether VS
// could give 'write' access to the app dir when launching sites in IIS. But until then, disable
// Webpack dev middleware if we detect the server process doesn't have the necessary permissions.
export function hasSufficientPermissions() {
if (isWindows) {
return canReadDirectoryAndAllAncestors(process.cwd());
} else {
return true;
}
}
function canReadDirectoryAndAllAncestors(dir: string): boolean {
if (!canReadDirectory(dir)) {
return false;
}
const parentDir = path.resolve(dir, '..');
if (parentDir === dir) {
// There are no more parent directories - we've reached the disk root
return true;
} else {
return canReadDirectoryAndAllAncestors(parentDir);
}
}
function canReadDirectory(dir: string): boolean {
try {
fs.statSync(dir);
return true;
} catch(ex) {
return false;
}
}

View File

@@ -1,2 +0,0 @@
export { createWebpackDevServer } from './WebpackDevMiddleware';
export { loadViaWebpack } from './LoadViaWebpack';

Some files were not shown because too many files have changed in this diff Show More