Compare commits

..

66 Commits

Author SHA1 Message Date
Nate McMaster (automated)
f22297a4db Update dependencies.props
[auto-updated: dependencies]
2018-03-28 10:51:38 -07:00
Nate McMaster
c42db123bd Add NuGetPackageVerifier 2018-03-21 10:35:29 -07:00
Pranav K
8583f205f9 Update KoreBuild channel 2018-03-16 12:30:44 -07:00
Pranav K
ae4c4d6e8f Branching for 2.1.0-preview2 2018-03-16 11:15:16 -07:00
Ryan Brandenburg
8553647ce8 Set 2.0 baselines 2018-03-16 10:50:18 -07:00
ASP.NET CI
67560266ab Update dependencies.props
[auto-updated: dependencies]
2018-03-08 13:04:43 -08:00
Pranav K
ce49379afb Prepend FeatureBranchVersionPrefix if FeatureBranchVersionSuffix is specified 2018-03-06 10:04:26 -08:00
Pranav K
4dc05d96ac Use dotnet-core feed in repos 2018-03-06 10:04:26 -08:00
Nate McMaster
53742ad649 Merge branch 'release/2.1' into dev 2018-03-02 14:26:50 -08:00
ASP.NET CI
91a1d83acd Update dependencies.props
[auto-updated: dependencies]
2018-02-26 11:06:22 -08:00
Pranav K
1d504e4565 Use FeatureBranchVersionSuffix when generating VersionSuffix 2018-02-21 18:27:00 -08:00
Steve Sanderson
873cfa9adf In SpaProxy, don't fail if there are non-forwardable headers. Fixes #1543. 2018-02-21 14:03:30 +00:00
ASP.NET CI
41b8642c2d Update dependencies.props
[auto-updated: dependencies]
2018-02-18 12:22:24 -08:00
ASP.NET CI
d6b67ca75d Update dependencies.props
[auto-updated: dependencies]
2018-02-11 12:29:16 -08:00
ASP.NET CI
4af70a0907 Update dependencies.props
[auto-updated: dependencies]
2018-02-09 11:47:59 -08:00
Steve Sanderson
bf5f40b1ed In Websocket proxy, don't forward User-Agent. Fixes #1469. 2018-02-08 11:40:14 +00:00
ASP.NET CI
a515f6bb0a Update dependencies.props
[auto-updated: dependencies]
2018-02-03 02:51:14 +00:00
ASP.NET CI
12a2314a5e Update dependencies.props
[auto-updated: dependencies]
2018-02-01 03:41:40 +00:00
Nate McMaster
f35c814fc7 Update dependencies.props to 2.1.0-preview-28193, build tools to 2.1.0-preview1-1010 [ci skip]
Scripted changes:
- updated travis and appveyor.yml files to only build dev, ci, and release branches
- updated dependencies.props
- updated korebuild-lock.txt
- updated korebuild.json to release/2.1 channel
2018-01-31 15:01:11 -08:00
Steve Sanderson
dbaa453d18 Bump aspnet-webpack version to 2.0.3 for release 2018-01-25 12:58:55 -08:00
Steve Sanderson
b2373e157e Support Webpack configs authored in TypeScript. Covers #1301 2018-01-25 12:14:43 -08:00
Steve Sanderson
08c2f231ea Bump aspnet-webpack version to 2.0.2. Also, further minor tweak to TypeScript annotations. 2018-01-24 17:49:51 -08:00
Steve Sanderson
6274733565 TypeScript annotation fixes 2018-01-24 17:44:47 -08:00
waterfoul
5f6f288056 Added support for Thenables 2018-01-24 17:26:38 -08:00
Steve Sanderson
78e583d0fb Comment and XML doc tweaks 2018-01-24 17:00:59 -08:00
Jordan McDonald
7c07beb494 adding support to pass Env param to webpack 2018-01-24 17:00:59 -08:00
Sławomir Rosiek
e7ffb8bb71 Returning provided promise in addTask 2018-01-25 00:42:58 +00:00
Steve Sanderson
3e6f7f3e45 Loosen aspnet-webpack peerDependency requirement back to cover what it allowed before (so it's not a breaking change) 2018-01-24 16:39:20 -08:00
Keven van Zuijlen
0d83504863 Bump webpack peerDependency version so NPM doesn't give a warning 2018-01-24 16:38:08 -08:00
Pranav K
370b5f7341 Updating version to preview2 2018-01-24 15:00:28 -08:00
Pranav K
0b53b92bc6 Merge branch 'release/2.1' into dev 2018-01-23 15:49:47 -08:00
Pranav K
116f33c66c Branching for 2.1.0-preview1 2018-01-23 15:31:36 -08:00
Nate McMaster
c3964a0437 Merge branch 'release/2.0.0' into dev 2018-01-09 13:59:19 -08:00
ASP.NET CI
f291f87dfc Update dependencies.props
[auto-updated: dependencies]
2018-01-06 14:57:16 -08:00
ASP.NET CI
a1c2c18326 Update dependencies.props
[auto-updated: dependencies]
2018-01-04 01:24:07 +00:00
Steve Sanderson
d6588c31bf In Angular CLI middleware, remove additional level of timeouts since it's now covered upstream. Part of #1447 2018-01-03 11:47:17 +00:00
Steve Sanderson
15d2f5a898 Allow explicit configuration of StaticFileOptions in new SPA APIs. Fixes #1424. 2018-01-02 15:44:59 +00:00
Steve Sanderson
814441c933 Allow configuration of SPA startup timeout. Part of #1447 2018-01-02 14:15:33 +00:00
Steve Sanderson
a98c1459b5 When a SPA dev server (or prerendering build) takes too long to start up, only fail current request, not future requests. Fixes #1447 2018-01-02 14:15:20 +00:00
ASP.NET CI
975d537a0a Update dependencies.props
[auto-updated: dependencies]
2017-12-31 21:17:41 +00:00
ASP.NET CI
4af2e8670e Update dependencies.props
[auto-updated: dependencies]
2017-12-18 17:15:05 -08:00
ASP.NET CI
160c91f1b9 Update dependencies.props
[auto-updated: dependencies]
2017-12-13 21:00:41 +00:00
Steve Sanderson
8ded472fe9 Add status code support to SpaPrerenderingExtensions 2017-12-12 12:28:31 +00:00
ASP.NET CI
f9c62ebb5a Update dependencies.props
[auto-updated: dependencies]
2017-12-10 13:01:04 -08:00
Nate McMaster
d5a664e481 Bump version to 2.0.2 2017-12-06 16:20:14 -08:00
John Goldsmith
74512dc3b2 Change HMR install to devDependencies
Changing hot module replacement install from dependencies to devDependencies.  Raised in this Issue: https://github.com/aspnet/JavaScriptServices/issues/1409
2017-12-05 10:42:15 +00:00
Ryan Brandenburg
e6285f30ae Update bootstrappers 2017-12-01 12:29:52 -08:00
Pranav K
bd5793e284 Specify runtime versions to install 2017-11-29 14:09:27 -08:00
Meir017
02bbcb68f1 fixed docs on SocketNodeInstance
namespace was incorrect
2017-11-24 09:53:35 +00:00
Steve Sanderson
18140929e7 Make AngularCliBuilder provide better information about timeouts 2017-11-22 15:14:58 +00:00
Pranav K
50ba6114ee Replace aspnetcore-ci-dev feed with aspnetcore-dev 2017-11-21 15:48:06 -08:00
Nate McMaster
cd3e3c667c Use MSBuild to set NuGet feeds instead of NuGet.config 2017-11-20 12:43:30 -08:00
Pranav K
0de9f0e3ce Use MicrosoftNETCoreApp21PackageVersion to determine the runtime framework in netcoreapp2.1 2017-11-17 13:00:25 -08:00
Steve Sanderson
9b1509a52b Handle @angular/cli not accept requests immediately on startup 2017-11-17 14:56:47 +00:00
Pranav K
a8809f9a96 Update samples and tests to target netcoreapp2.1 2017-11-16 16:59:15 -08:00
Nate McMaster
64389a9bbe Update build tools to 2.0.2-rc1-15526 and dependencies to 2.0.1-rtm-105 2017-10-13 13:11:55 -07:00
Nate McMaster
86e94d7812 Update how PackageReference versions are set
Changes:
 - Remove floating versions
 - Disable myget feeds during a Universe build
 - Use package-specific MSBuild variables. Pattern = `packageId.Pascalize() + "PackageVersion"`, with a few exceptions.
2017-10-09 11:10:58 -07:00
Nate McMaster
9f05a3d34b Use MSBuild to set NuGet feeds instead of NuGet.config 2017-10-02 14:12:55 -07:00
Nate McMaster
63e0af2ee8 Import dependencies.props last to ensure TargetFramework is set first 2017-09-29 17:02:10 -07:00
Nate McMaster
dc5e980efa Update build scripts, tools, and dependencies for 2.0.x 2017-09-20 17:23:18 -07:00
Nate McMaster
e0ab3ddcca Update the list of packages patching in 2.0.x 2017-09-20 13:40:18 -07:00
Ryan Brandenburg
0c058894c2 Update bootstrappers 2017-09-19 14:44:54 -07:00
Ryan Brandenburg
98385cbcb0 Update bootstrappers 2017-09-19 14:44:54 -07:00
Nate McMaster
77cac3b6be Update package feeds and dependencies for 2.0.1 (#1284) 2017-09-18 12:42:26 -07:00
Nate McMaster
051150475f Bump version to 2.0.1 2017-09-15 18:00:22 -07:00
Pranav K
128683be0e Pinning versions for 2.0.0 2017-08-17 15:00:10 -07:00
185 changed files with 12219 additions and 85 deletions

View File

@@ -4,11 +4,9 @@ install:
- ps: Install-Product node 6.9.2 x64 - ps: Install-Product node 6.9.2 x64
branches: branches:
only: only:
- master
- release
- dev - dev
- /^release\/.*$/
- /^(.*\/)?ci-.*$/ - /^(.*\/)?ci-.*$/
- /^rel\/.*/
build_script: build_script:
- ps: .\run.ps1 default-build - ps: .\run.ps1 default-build
clone_depth: 1 clone_depth: 1
@@ -16,6 +14,6 @@ environment:
global: global:
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true
DOTNET_CLI_TELEMETRY_OPTOUT: 1 DOTNET_CLI_TELEMETRY_OPTOUT: 1
test: off test: 'off'
deploy: off deploy: 'off'
os: Visual Studio 2017 os: Visual Studio 2017

View File

@@ -17,3 +17,8 @@ os:
osx_image: xcode7.1 osx_image: xcode7.1
script: script:
- ./build.sh - ./build.sh
branches:
only:
- dev
- /^release\/.*$/
- /^(.*\/)?ci-.*$/

View File

@@ -1,6 +1,7 @@
<Project> <Project>
<Import Project="version.props" /> <Import Project="version.props" />
<Import Project="build\dependencies.props" /> <Import Project="build\dependencies.props" />
<Import Project="build\sources.props" />
<PropertyGroup> <PropertyGroup>
<Product>Microsoft ASP.NET Core</Product> <Product>Microsoft ASP.NET Core</Product>

View File

@@ -1,5 +1,6 @@
<Project> <Project>
<PropertyGroup> <PropertyGroup>
<RuntimeFrameworkVersion Condition=" '$(TargetFramework)' == 'netcoreapp2.0' ">$(MicrosoftNETCoreApp20PackageVersion)</RuntimeFrameworkVersion> <RuntimeFrameworkVersion Condition=" '$(TargetFramework)' == 'netcoreapp2.0' ">$(MicrosoftNETCoreApp20PackageVersion)</RuntimeFrameworkVersion>
<RuntimeFrameworkVersion Condition=" '$(TargetFramework)' == 'netcoreapp2.1' ">$(MicrosoftNETCoreApp21PackageVersion)</RuntimeFrameworkVersion>
</PropertyGroup> </PropertyGroup>
</Project> </Project>

View File

@@ -1,13 +1,19 @@
 
Microsoft Visual Studio Solution File, Format Version 12.00 Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15 # Visual Studio 15
VisualStudioVersion = 15.0.27019.1 VisualStudioVersion = 15.0.26730.16
MinimumVisualStudioVersion = 15.0.26730.03 MinimumVisualStudioVersion = 15.0.26730.03
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{27304DDE-AFB2-4F8B-B765-E3E2F11E886C}" Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{27304DDE-AFB2-4F8B-B765-E3E2F11E886C}"
ProjectSection(SolutionItems) = preProject ProjectSection(SolutionItems) = preProject
src\Directory.Build.props = src\Directory.Build.props src\Directory.Build.props = src\Directory.Build.props
EndProjectSection EndProjectSection
EndProject EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.NodeServices", "src\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj", "{66B77203-1469-41DF-92F2-2BE6900BD36F}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.NodeServices.Sockets", "src\Microsoft.AspNetCore.NodeServices.Sockets\Microsoft.AspNetCore.NodeServices.Sockets.csproj", "{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.SpaServices", "src\Microsoft.AspNetCore.SpaServices\Microsoft.AspNetCore.SpaServices.csproj", "{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "misc", "misc", "{99EAF1FE-22C8-4526-BE78-74B24125D37F}" Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "misc", "misc", "{99EAF1FE-22C8-4526-BE78-74B24125D37F}"
ProjectSection(SolutionItems) = preProject ProjectSection(SolutionItems) = preProject
.gitignore = .gitignore .gitignore = .gitignore
@@ -15,6 +21,16 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "misc", "misc", "{99EAF1FE-2
README.md = README.md README.md = README.md
EndProjectSection EndProjectSection
EndProject EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "samples", "samples", "{23836492-E7F4-4376-85BF-A635C304AC46}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "misc", "misc", "{E6A161EA-646C-4033-9090-95BE809AB8D9}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LatencyTest", "samples\misc\LatencyTest\LatencyTest.csproj", "{1931B19A-EC42-4D56-B2D0-FB06D17244DA}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Webpack", "samples\misc\Webpack\Webpack.csproj", "{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NodeServicesExamples", "samples\misc\NodeServicesExamples\NodeServicesExamples.csproj", "{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{645F7363-1240-4FB6-9422-B32A327C979F}" Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{645F7363-1240-4FB6-9422-B32A327C979F}"
ProjectSection(SolutionItems) = preProject ProjectSection(SolutionItems) = preProject
Directory.Build.props = Directory.Build.props Directory.Build.props = Directory.Build.props
@@ -29,6 +45,30 @@ Global
Release|Any CPU = Release|Any CPU Release|Any CPU = Release|Any CPU
EndGlobalSection EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution GlobalSection(ProjectConfigurationPlatforms) = postSolution
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Release|Any CPU.Build.0 = Release|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Release|Any CPU.Build.0 = Release|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Debug|Any CPU.Build.0 = Debug|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Release|Any CPU.ActiveCfg = Release|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Release|Any CPU.Build.0 = Release|Any CPU
{1931B19A-EC42-4D56-B2D0-FB06D17244DA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1931B19A-EC42-4D56-B2D0-FB06D17244DA}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1931B19A-EC42-4D56-B2D0-FB06D17244DA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1931B19A-EC42-4D56-B2D0-FB06D17244DA}.Release|Any CPU.Build.0 = Release|Any CPU
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}.Debug|Any CPU.Build.0 = Debug|Any CPU
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}.Release|Any CPU.ActiveCfg = Release|Any CPU
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}.Release|Any CPU.Build.0 = Release|Any CPU
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}.Release|Any CPU.Build.0 = Release|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.Build.0 = Debug|Any CPU {D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Release|Any CPU.ActiveCfg = Release|Any CPU {D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Release|Any CPU.ActiveCfg = Release|Any CPU
@@ -38,6 +78,13 @@ Global
HideSolutionNode = FALSE HideSolutionNode = FALSE
EndGlobalSection EndGlobalSection
GlobalSection(NestedProjects) = preSolution GlobalSection(NestedProjects) = preSolution
{66B77203-1469-41DF-92F2-2BE6900BD36F} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
{E6A161EA-646C-4033-9090-95BE809AB8D9} = {23836492-E7F4-4376-85BF-A635C304AC46}
{1931B19A-EC42-4D56-B2D0-FB06D17244DA} = {E6A161EA-646C-4033-9090-95BE809AB8D9}
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344} = {E6A161EA-646C-4033-9090-95BE809AB8D9}
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE} = {E6A161EA-646C-4033-9090-95BE809AB8D9}
{D40BD1C4-6A6F-4213-8535-1057F3EB3400} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C} {D40BD1C4-6A6F-4213-8535-1057F3EB3400} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
EndGlobalSection EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution GlobalSection(ExtensibilityGlobals) = postSolution

View File

@@ -2,8 +2,6 @@
<configuration> <configuration>
<packageSources> <packageSources>
<clear /> <clear />
<add key="AspNetCore" value="https://dotnet.myget.org/F/aspnetcore-ci-dev/api/v3/index.json" /> <!-- Restore sources should be defined in build/sources.props. -->
<add key="AspNetCoreTools" value="https://dotnet.myget.org/F/aspnetcore-tools/api/v3/index.json" />
<add key="NuGet" value="https://api.nuget.org/v3/index.json" />
</packageSources> </packageSources>
</configuration> </configuration>

View File

@@ -0,0 +1,7 @@
{
"Default": {
"rules": [
"DefaultCompositeRule"
]
}
}

View File

@@ -1 +1,96 @@
This branch exists only to build `Microsoft.AspNetCore.SpaServices.Extensions` for an OOB release. It does not contain sources for any other packages or samples. # JavaScriptServices
AppVeyor: [![AppVeyor](https://ci.appveyor.com/api/projects/status/gprilrckx116vc9m/branch/dev?svg=true)](https://ci.appveyor.com/project/aspnetci/javascriptservices/branch/dev)
This project is part of ASP.NET Core. You can find samples, documentation and getting started instructions for ASP.NET Core at the [Home](https://github.com/aspnet/home) repo.
## What is this?
`JavaScriptServices` is a set of client-side technologies for ASP.NET Core. It provides infrastructure that you'll find useful if you:
- Use Angular / React / Vue / Aurelia / Knockout / etc.
- Build your client-side resources using Webpack.
- Execute JavaScript on the server at runtime.
Read [Building Single Page Applications on ASP.NET Core with JavaScriptServices](https://blogs.msdn.microsoft.com/webdev/2017/02/14/building-single-page-applications-on-asp-net-core-with-javascriptservices/) for more details.
This repo contains:
* A set of NuGet/NPM packages that implement functionality for:
* Invoking arbitrary NPM packages at runtime from .NET code ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.NodeServices#simple-usage-example))
* Server-side prerendering of SPA components ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#server-side-prerendering))
* Webpack dev middleware ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#webpack-dev-middleware))
* Hot module replacement (HMR) ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#webpack-hot-module-replacement))
* Server-side and client-side routing integration ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#routing-helper-mapspafallbackroute))
* Server-side and client-side validation integration
* "Lazy loading" for Knockout apps
* Samples and docs
It's cross-platform (Windows, Linux, or macOS) and works with .NET Core 2.0 or later.
## Creating new applications
Prerequisites:
* [.NET Core 2.0](https://www.microsoft.com/net/core) (or later) SDK
* [Node.js](https://nodejs.org/) version 6 (or later)
With these prerequisites, you can immediately create new ASP.NET Core applications that use Angular, React, or React+Redux without having to install anything extra.
### Option 1: Creating Angular/React/Redux applications from the command line (cross-platform)
In an empty directory, run (for example) `dotnet new angular`. Other supported SPA frameworks include React and React+Redux. You can see the list of available SPA templates by running `dotnet new spa`.
Once the generator has run and restored all the dependencies, you can start up your new ASP.NET Core SPA:
npm install
dotnet run
### Option 2: Creating Angular/React/Redux applications using Visual Studio 2017 Update 3 or later (Windows only)
Using the `File`->`New Project` dialog, select *ASP.NET Core Web Application*. You will then be offered the option to create an application with Angular, React, or React+Redux. When the application is created, you can build and run it in the normal way.
### More info and other SPA frameworks
For a more detailed (albeit somewhat outdated) walkthrough, see [getting started with the `aspnetcore-spa` generator](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/).
If you want to build an ASP.NET Core application with Aurelia, Knockout, or Vue, you can use the `Microsoft.AspNetCore.SpaTemplates` package. On the command line, run `dotnet new --install Microsoft.AspNetCore.SpaTemplates`. Then you will be able to run `dotnet new aurelia` (or `dotnet new vue`, etc.) to create your new application.
## Adding to existing applications
If you have an existing ASP.NET Core application, or if you just want to use the underlying JavaScriptServices packages directly, you can install these packages using NuGet and NPM:
* `Microsoft.AspNetCore.NodeServices`
* This provides a fast and robust way for .NET code to run JavaScript on the server inside a Node.js environment. You can use this to consume arbitrary functionality from NPM packages at runtime in your ASP.NET Core app.
* Most applications developers don't need to use this directly, but you can do so if you want to implement your own functionality that involves calling Node.js code from .NET at runtime.
* Find [documentation and usage examples here](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.NodeServices#microsoftaspnetcorenodeservices).
* `Microsoft.AspNetCore.SpaServices`
* This provides infrastructure that's generally useful when building Single Page Applications (SPAs) with technologies such as Angular or React (for example, server-side prerendering and webpack middleware). Internally, it uses the `NodeServices` package to implement its features.
* Find [documentation and usage examples here](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#microsoftaspnetcorespaservices)
There were previously other packages called `Microsoft.AspNetCore.AngularServices` and `Microsoft.AspNetCore.ReactServices` but these are not currently needed - all applicable functionality is in `Microsoft.AspNetCore.SpaServices`, because it's sufficiently general.
If you want to build a helper library for some other SPA framework, you can do so by taking a dependency on `Microsoft.AspNetCore.SpaServices` and wrapping its functionality in whatever way is most useful for your SPA framework.
## Samples
The [`samples` directory](https://github.com/aspnet/JavaScriptServices/tree/dev/samples) contains examples of:
- Using the JavaScript services family of packages with Angular and React.
- A standalone `NodeServices` usage for runtime code transpilation and image processing.
**To run the samples:**
* Clone this repo
* At the repo's root directory (the one containing `src`, `samples`, etc.), run `dotnet restore`
* Change directory to the sample you want to run (for example, `cd samples/angular/MusicStore`)
* Restore Node dependencies by running `npm install`
* If you're trying to run the Angular "Music Store" sample, then also run `gulp` (which you need to have installed globally). None of the other samples require this.
* Run the application (`dotnet run`)
* Browse to [http://localhost:5000](http://localhost:5000)
## Contributing
If you're interested in contributing to the various packages, samples, and project templates in this repo, that's great!
Before working on a pull request, especially if it's more than a trivial fix (for example, for a typo), it's usually a good idea first to file an issue describing what you're proposing to do and how it will work. Then you can find out if it's likely that such a pull request will be accepted, and how it fits into wider ongoing plans.

View File

@@ -1,15 +0,0 @@
<Project>
<Target Name="Push" DependsOnTargets="_PushNuGet" />
<ItemGroup>
<PackagesToPush Include="$(BuildDir)*.nupkg" />
</ItemGroup>
<Target Name="_PushNuGet" Condition="@(PackagesToPush->Count()) != 0">
<Error Text="Missing required property: NuGetPublishFeed" Condition=" '$(NuGetPublishFeed)' == '' "/>
<PushNuGetPackages
Packages="@(PackagesToPush)"
Feed="$(NuGetPublishFeed)"
ApiKey="$(APIKey)" />
</Target>
</Project>

View File

@@ -1,27 +1,27 @@
<Project> <Project>
<PropertyGroup> <PropertyGroup>
<MSBuildAllProjects>$(MSBuildAllProjects);$(MSBuildThisFileFullPath)</MSBuildAllProjects> <MSBuildAllProjects>$(MSBuildAllProjects);$(MSBuildThisFileFullPath)</MSBuildAllProjects>
</PropertyGroup> </PropertyGroup>
<PropertyGroup Label="Package Versions"> <PropertyGroup Label="Package Versions">
<InternalAspNetCoreSdkPackageVersion>2.1.0-preview1-15549</InternalAspNetCoreSdkPackageVersion> <InternalAspNetCoreSdkPackageVersion>2.1.0-preview2-15749</InternalAspNetCoreSdkPackageVersion>
<MicrosoftAspNetCoreDiagnosticsPackageVersion>2.0.1</MicrosoftAspNetCoreDiagnosticsPackageVersion> <MicrosoftAspNetCoreDiagnosticsPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreDiagnosticsPackageVersion>
<MicrosoftAspNetCoreHostingAbstractionsPackageVersion>2.0.1</MicrosoftAspNetCoreHostingAbstractionsPackageVersion> <MicrosoftAspNetCoreHostingAbstractionsPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreHostingAbstractionsPackageVersion>
<MicrosoftAspNetCoreHostingPackageVersion>2.0.1</MicrosoftAspNetCoreHostingPackageVersion> <MicrosoftAspNetCoreHostingPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreHostingPackageVersion>
<MicrosoftAspNetCoreMvcPackageVersion>2.0.1</MicrosoftAspNetCoreMvcPackageVersion> <MicrosoftAspNetCoreMvcPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreMvcPackageVersion>
<MicrosoftAspNetCoreMvcTagHelpersPackageVersion>2.0.1</MicrosoftAspNetCoreMvcTagHelpersPackageVersion> <MicrosoftAspNetCoreMvcTagHelpersPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreMvcTagHelpersPackageVersion>
<MicrosoftAspNetCoreMvcViewFeaturesPackageVersion>2.0.1</MicrosoftAspNetCoreMvcViewFeaturesPackageVersion> <MicrosoftAspNetCoreMvcViewFeaturesPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreMvcViewFeaturesPackageVersion>
<MicrosoftAspNetCoreServerIISIntegrationPackageVersion>2.0.1</MicrosoftAspNetCoreServerIISIntegrationPackageVersion> <MicrosoftAspNetCoreServerIISIntegrationPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreServerIISIntegrationPackageVersion>
<MicrosoftAspNetCoreServerKestrelPackageVersion>2.0.1</MicrosoftAspNetCoreServerKestrelPackageVersion> <MicrosoftAspNetCoreServerKestrelPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreServerKestrelPackageVersion>
<MicrosoftAspNetCoreSpaServicesPackageVersion>2.0.1</MicrosoftAspNetCoreSpaServicesPackageVersion> <MicrosoftAspNetCoreStaticFilesPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreStaticFilesPackageVersion>
<MicrosoftAspNetCoreStaticFilesPackageVersion>2.0.1</MicrosoftAspNetCoreStaticFilesPackageVersion> <MicrosoftAspNetCoreWebSocketsPackageVersion>2.1.0-preview2-30478</MicrosoftAspNetCoreWebSocketsPackageVersion>
<MicrosoftAspNetCoreWebSocketsPackageVersion>2.0.1</MicrosoftAspNetCoreWebSocketsPackageVersion> <MicrosoftExtensionsDependencyInjectionPackageVersion>2.1.0-preview2-30478</MicrosoftExtensionsDependencyInjectionPackageVersion>
<MicrosoftExtensionsDependencyInjectionPackageVersion>2.0.0</MicrosoftExtensionsDependencyInjectionPackageVersion> <MicrosoftExtensionsFileProvidersPhysicalPackageVersion>2.1.0-preview2-30478</MicrosoftExtensionsFileProvidersPhysicalPackageVersion>
<MicrosoftExtensionsFileProvidersPhysicalPackageVersion>2.0.0</MicrosoftExtensionsFileProvidersPhysicalPackageVersion> <MicrosoftExtensionsLoggingConsolePackageVersion>2.1.0-preview2-30478</MicrosoftExtensionsLoggingConsolePackageVersion>
<MicrosoftExtensionsLoggingConsolePackageVersion>2.0.0</MicrosoftExtensionsLoggingConsolePackageVersion> <MicrosoftExtensionsLoggingDebugPackageVersion>2.1.0-preview2-30478</MicrosoftExtensionsLoggingDebugPackageVersion>
<MicrosoftExtensionsLoggingDebugPackageVersion>2.0.0</MicrosoftExtensionsLoggingDebugPackageVersion>
<MicrosoftNETCoreApp20PackageVersion>2.0.0</MicrosoftNETCoreApp20PackageVersion> <MicrosoftNETCoreApp20PackageVersion>2.0.0</MicrosoftNETCoreApp20PackageVersion>
<NewtonsoftJsonPackageVersion>10.0.1</NewtonsoftJsonPackageVersion> <MicrosoftNETCoreApp21PackageVersion>2.1.0-preview2-26326-03</MicrosoftNETCoreApp21PackageVersion>
<SystemThreadingTasksDataflowPackageVersion>4.8.0</SystemThreadingTasksDataflowPackageVersion> <NewtonsoftJsonPackageVersion>11.0.2</NewtonsoftJsonPackageVersion>
<SystemThreadingTasksDataflowPackageVersion>4.9.0-preview2-26326-04</SystemThreadingTasksDataflowPackageVersion>
</PropertyGroup> </PropertyGroup>
<Import Project="$(DotNetPackageVersionPropsPath)" Condition=" '$(DotNetPackageVersionPropsPath)' != '' " /> <Import Project="$(DotNetPackageVersionPropsPath)" Condition=" '$(DotNetPackageVersionPropsPath)' != '' " />
</Project> </Project>

View File

@@ -1,7 +1,14 @@
<Project> <Project>
<Import Project="dependencies.props" />
<PropertyGroup> <PropertyGroup>
<!-- These properties are use by the automation that updates dependencies.props --> <!-- These properties are use by the automation that updates dependencies.props -->
<LineupPackageId>Internal.AspNetCore.Universe.Lineup</LineupPackageId> <LineupPackageId>Internal.AspNetCore.Universe.Lineup</LineupPackageId>
<LineupPackageRestoreSource>https://dotnet.myget.org/F/aspnetcore-ci-dev/api/v3/index.json</LineupPackageRestoreSource> <LineupPackageRestoreSource>https://dotnet.myget.org/F/aspnetcore-release/api/v3/index.json</LineupPackageRestoreSource>
</PropertyGroup> </PropertyGroup>
<ItemGroup>
<DotNetCoreRuntime Include="$(MicrosoftNETCoreApp20PackageVersion)" />
<DotNetCoreRuntime Include="$(MicrosoftNETCoreApp21PackageVersion)" />
</ItemGroup>
</Project> </Project>

View File

@@ -1,3 +0,0 @@
<Project>
<Import Project="Push.targets" />
</Project>

17
build/sources.props Normal file
View File

@@ -0,0 +1,17 @@
<Project>
<Import Project="$(DotNetRestoreSourcePropsPath)" Condition="'$(DotNetRestoreSourcePropsPath)' != ''"/>
<PropertyGroup Label="RestoreSources">
<RestoreSources>$(DotNetRestoreSources)</RestoreSources>
<RestoreSources Condition="'$(DotNetBuildOffline)' != 'true' AND '$(AspNetUniverseBuildOffline)' != 'true' ">
$(RestoreSources);
https://dotnet.myget.org/F/dotnet-core/api/v3/index.json;
https://dotnet.myget.org/F/aspnetcore-release/api/v3/index.json;
https://dotnet.myget.org/F/aspnetcore-tools/api/v3/index.json;
</RestoreSources>
<RestoreSources Condition="'$(DotNetBuildOffline)' != 'true'">
$(RestoreSources);
https://api.nuget.org/v3/index.json;
</RestoreSources>
</PropertyGroup>
</Project>

View File

@@ -1,2 +1,2 @@
version:2.1.0-preview1-15549 version:2.1.0-preview2-15749
commithash:f570e08585fec510dd60cd4bfe8795388b757a95 commithash:5544c9ab20fa5e24b9e155d8958a3c3b6f5f9df9

View File

@@ -1,6 +1,6 @@
{ {
"$schema": "https://raw.githubusercontent.com/aspnet/BuildTools/dev/tools/korebuild.schema.json", "$schema": "https://raw.githubusercontent.com/aspnet/BuildTools/release/2.1/tools/korebuild.schema.json",
"channel": "dev", "channel": "release/2.1",
"toolsets": { "toolsets": {
"nodejs": { "nodejs": {
"required": true, "required": true,

15
run.ps1
View File

@@ -29,6 +29,9 @@ Updates KoreBuild to the latest version even if a lock file is present.
.PARAMETER ConfigFile .PARAMETER ConfigFile
The path to the configuration file that stores values. Defaults to korebuild.json. The path to the configuration file that stores values. Defaults to korebuild.json.
.PARAMETER ToolsSourceSuffix
The Suffix to append to the end of the ToolsSource. Useful for query strings in blob stores.
.PARAMETER Arguments .PARAMETER Arguments
Arguments to be passed to the command Arguments to be passed to the command
@@ -63,6 +66,7 @@ param(
[Alias('u')] [Alias('u')]
[switch]$Update, [switch]$Update,
[string]$ConfigFile, [string]$ConfigFile,
[string]$ToolsSourceSuffix,
[Parameter(ValueFromRemainingArguments = $true)] [Parameter(ValueFromRemainingArguments = $true)]
[string[]]$Arguments [string[]]$Arguments
) )
@@ -79,7 +83,7 @@ function Get-KoreBuild {
$lockFile = Join-Path $Path 'korebuild-lock.txt' $lockFile = Join-Path $Path 'korebuild-lock.txt'
if (!(Test-Path $lockFile) -or $Update) { if (!(Test-Path $lockFile) -or $Update) {
Get-RemoteFile "$ToolsSource/korebuild/channels/$Channel/latest.txt" $lockFile Get-RemoteFile "$ToolsSource/korebuild/channels/$Channel/latest.txt" $lockFile $ToolsSourceSuffix
} }
$version = Get-Content $lockFile | Where-Object { $_ -like 'version:*' } | Select-Object -first 1 $version = Get-Content $lockFile | Where-Object { $_ -like 'version:*' } | Select-Object -first 1
@@ -96,7 +100,7 @@ function Get-KoreBuild {
try { try {
$tmpfile = Join-Path ([IO.Path]::GetTempPath()) "KoreBuild-$([guid]::NewGuid()).zip" $tmpfile = Join-Path ([IO.Path]::GetTempPath()) "KoreBuild-$([guid]::NewGuid()).zip"
Get-RemoteFile $remotePath $tmpfile Get-RemoteFile $remotePath $tmpfile $ToolsSourceSuffix
if (Get-Command -Name 'Expand-Archive' -ErrorAction Ignore) { if (Get-Command -Name 'Expand-Archive' -ErrorAction Ignore) {
# Use built-in commands where possible as they are cross-plat compatible # Use built-in commands where possible as they are cross-plat compatible
Expand-Archive -Path $tmpfile -DestinationPath $korebuildPath Expand-Archive -Path $tmpfile -DestinationPath $korebuildPath
@@ -124,7 +128,7 @@ function Join-Paths([string]$path, [string[]]$childPaths) {
return $path return $path
} }
function Get-RemoteFile([string]$RemotePath, [string]$LocalPath) { function Get-RemoteFile([string]$RemotePath, [string]$LocalPath, [string]$RemoteSuffix) {
if ($RemotePath -notlike 'http*') { if ($RemotePath -notlike 'http*') {
Copy-Item $RemotePath $LocalPath Copy-Item $RemotePath $LocalPath
return return
@@ -134,7 +138,7 @@ function Get-RemoteFile([string]$RemotePath, [string]$LocalPath) {
while ($retries -gt 0) { while ($retries -gt 0) {
$retries -= 1 $retries -= 1
try { try {
Invoke-WebRequest -UseBasicParsing -Uri $RemotePath -OutFile $LocalPath Invoke-WebRequest -UseBasicParsing -Uri $($RemotePath + $RemoteSuffix) -OutFile $LocalPath
return return
} }
catch { catch {
@@ -161,7 +165,8 @@ if (Test-Path $ConfigFile) {
if (!($Channel) -and (Get-Member -Name 'channel' -InputObject $config)) { [string] $Channel = $config.channel } if (!($Channel) -and (Get-Member -Name 'channel' -InputObject $config)) { [string] $Channel = $config.channel }
if (!($ToolsSource) -and (Get-Member -Name 'toolsSource' -InputObject $config)) { [string] $ToolsSource = $config.toolsSource} if (!($ToolsSource) -and (Get-Member -Name 'toolsSource' -InputObject $config)) { [string] $ToolsSource = $config.toolsSource}
} }
} catch { }
catch {
Write-Warning "$ConfigFile could not be read. Its settings will be ignored." Write-Warning "$ConfigFile could not be read. Its settings will be ignored."
Write-Warning $Error[0] Write-Warning $Error[0]
} }

16
run.sh
View File

@@ -17,6 +17,7 @@ update=false
repo_path="$DIR" repo_path="$DIR"
channel='' channel=''
tools_source='' tools_source=''
tools_source_suffix=''
# #
# Functions # Functions
@@ -35,6 +36,7 @@ __usage() {
echo " -d|--dotnet-home <DIR> The directory where .NET Core tools will be stored. Defaults to '\$DOTNET_HOME' or '\$HOME/.dotnet." echo " -d|--dotnet-home <DIR> The directory where .NET Core tools will be stored. Defaults to '\$DOTNET_HOME' or '\$HOME/.dotnet."
echo " --path <PATH> The directory to build. Defaults to the directory containing the script." echo " --path <PATH> The directory to build. Defaults to the directory containing the script."
echo " -s|--tools-source|-ToolsSource <URL> The base url where build tools can be downloaded. Overrides the value from the config file." echo " -s|--tools-source|-ToolsSource <URL> The base url where build tools can be downloaded. Overrides the value from the config file."
echo " --tools-source-suffix|-ToolsSourceSuffix <SUFFIX> The suffix to append to tools-source. Useful for query strings."
echo " -u|--update Update to the latest KoreBuild even if the lock file is present." echo " -u|--update Update to the latest KoreBuild even if the lock file is present."
echo "" echo ""
echo "Description:" echo "Description:"
@@ -50,7 +52,7 @@ get_korebuild() {
local version local version
local lock_file="$repo_path/korebuild-lock.txt" local lock_file="$repo_path/korebuild-lock.txt"
if [ ! -f "$lock_file" ] || [ "$update" = true ]; then if [ ! -f "$lock_file" ] || [ "$update" = true ]; then
__get_remote_file "$tools_source/korebuild/channels/$channel/latest.txt" "$lock_file" __get_remote_file "$tools_source/korebuild/channels/$channel/latest.txt" "$lock_file" "$tools_source_suffix"
fi fi
version="$(grep 'version:*' -m 1 "$lock_file")" version="$(grep 'version:*' -m 1 "$lock_file")"
if [[ "$version" == '' ]]; then if [[ "$version" == '' ]]; then
@@ -66,7 +68,7 @@ get_korebuild() {
local remote_path="$tools_source/korebuild/artifacts/$version/korebuild.$version.zip" local remote_path="$tools_source/korebuild/artifacts/$version/korebuild.$version.zip"
tmpfile="$(mktemp)" tmpfile="$(mktemp)"
echo -e "${MAGENTA}Downloading KoreBuild ${version}${RESET}" echo -e "${MAGENTA}Downloading KoreBuild ${version}${RESET}"
if __get_remote_file "$remote_path" "$tmpfile"; then if __get_remote_file "$remote_path" "$tmpfile" "$tools_source_suffix"; then
unzip -q -d "$korebuild_path" "$tmpfile" unzip -q -d "$korebuild_path" "$tmpfile"
fi fi
rm "$tmpfile" || true rm "$tmpfile" || true
@@ -98,6 +100,7 @@ __machine_has() {
__get_remote_file() { __get_remote_file() {
local remote_path=$1 local remote_path=$1
local local_path=$2 local local_path=$2
local remote_path_suffix=$3
if [[ "$remote_path" != 'http'* ]]; then if [[ "$remote_path" != 'http'* ]]; then
cp "$remote_path" "$local_path" cp "$remote_path" "$local_path"
@@ -106,14 +109,14 @@ __get_remote_file() {
local failed=false local failed=false
if __machine_has wget; then if __machine_has wget; then
wget --tries 10 --quiet -O "$local_path" "$remote_path" || failed=true wget --tries 10 --quiet -O "$local_path" "${remote_path}${remote_path_suffix}" || failed=true
else else
failed=true failed=true
fi fi
if [ "$failed" = true ] && __machine_has curl; then if [ "$failed" = true ] && __machine_has curl; then
failed=false failed=false
curl --retry 10 -sSL -f --create-dirs -o "$local_path" "$remote_path" || failed=true curl --retry 10 -sSL -f --create-dirs -o "$local_path" "${remote_path}${remote_path_suffix}" || failed=true
fi fi
if [ "$failed" = true ]; then if [ "$failed" = true ]; then
@@ -164,6 +167,11 @@ while [[ $# -gt 0 ]]; do
tools_source="${1:-}" tools_source="${1:-}"
[ -z "$tools_source" ] && __usage [ -z "$tools_source" ] && __usage
;; ;;
--tools-source-suffix|-ToolsSourceSuffix)
shift
tools_source_suffix="${1:-}"
[ -z "$tools_source_suffix" ] && __usage
;;
-u|--update|-Update) -u|--update|-Update)
update=true update=true
;; ;;

View File

@@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFrameworks>netcoreapp2.1;net461</TargetFrameworks>
<IsPackable>false</IsPackable>
<OutputType>exe</OutputType>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\src\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj" />
<ProjectReference Include="..\..\..\src\Microsoft.AspNetCore.NodeServices.Sockets\Microsoft.AspNetCore.NodeServices.Sockets.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="$(MicrosoftExtensionsDependencyInjectionPackageVersion)" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,52 @@
using System;
using System.Diagnostics;
using System.IO;
using System.Threading.Tasks;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.AspNetCore.NodeServices.Sockets;
using Microsoft.Extensions.DependencyInjection;
namespace ConsoleApplication
{
// This project is a micro-benchmark for .NET->Node RPC via NodeServices. It doesn't reflect
// real-world usage patterns (you're not likely to make hundreds of sequential calls like this),
// but is a starting point for comparing the overhead of different hosting models and transports.
public class Program
{
public static void Main(string[] args) {
// Set up the DI system
var services = new ServiceCollection();
services.AddNodeServices(options => {
// To compare with Socket hosting, uncomment the following line
// Since .NET Core 1.1, the HTTP hosting model has become basically as fast as the Socket hosting model
//options.UseSocketHosting();
options.WatchFileExtensions = new string[] {}; // Don't watch anything
});
var serviceProvider = services.BuildServiceProvider();
// Now instantiate an INodeServices and use it
using (var nodeServices = serviceProvider.GetRequiredService<INodeServices>()) {
MeasureLatency(nodeServices).Wait();
}
}
private static async Task MeasureLatency(INodeServices nodeServices) {
// Ensure the connection is open, so we can measure per-request timings below
var response = await nodeServices.InvokeAsync<string>("latencyTest", "C#");
Console.WriteLine(response);
// Now perform a series of requests, capturing the time taken
const int requestCount = 100;
var watch = Stopwatch.StartNew();
for (var i = 0; i < requestCount; i++) {
await nodeServices.InvokeAsync<string>("latencyTest", "C#");
}
// Display results
var elapsedSeconds = (float)watch.ElapsedTicks / Stopwatch.Frequency;
Console.WriteLine("\nTotal time: {0:F2} milliseconds", 1000 * elapsedSeconds);
Console.WriteLine("\nTime per invocation: {0:F2} milliseconds", 1000 * elapsedSeconds / requestCount);
}
}
}

View File

@@ -0,0 +1,4 @@
module.exports = function(callback, incomingParam1) {
var result = 'Hello, ' + incomingParam1 + '!';
callback(/* error */ null, result);
}

View File

@@ -0,0 +1,2 @@
/node_modules/
/Properties/launchSettings.json

View File

@@ -0,0 +1,57 @@
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.AspNetCore.SpaServices.Prerendering;
namespace NodeServicesExamples.Controllers
{
public class HomeController : Controller
{
public IActionResult Index(int pageIndex)
{
return View();
}
public IActionResult ES2015Transpilation()
{
return View();
}
public async Task<IActionResult> Chart([FromServices] INodeServices nodeServices)
{
var options = new { width = 400, height = 200, showArea = true, showPoint = true, fullWidth = true };
var data = new
{
labels = new[] { "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" },
series = new[] {
new[] { 1, 5, 2, 5, 4, 3 },
new[] { 2, 3, 4, 8, 1, 2 },
new[] { 5, 4, 3, 2, 1, 0 }
}
};
ViewData["ChartMarkup"] = await nodeServices.InvokeAsync<string>("./Node/renderChart", "line", options, data);
return View();
}
public async Task<IActionResult> Prerendering([FromServices] ISpaPrerenderer prerenderer)
{
var result = await prerenderer.RenderToString("./Node/prerenderPage");
if (!string.IsNullOrEmpty(result.RedirectUrl))
{
return Redirect(result.RedirectUrl);
}
ViewData["PrerenderedHtml"] = result.Html;
ViewData["PrerenderedGlobals"] = result.CreateGlobalsAssignmentScript();
return View();
}
public IActionResult Error()
{
return View("~/Views/Shared/Error.cshtml");
}
}
}

View File

@@ -0,0 +1,14 @@
var createServerRenderer = require('aspnet-prerendering').createServerRenderer;
module.exports = createServerRenderer(function(params) {
return new Promise(function (resolve, reject) {
var message = 'The HTML was returned by the prerendering boot function. '
+ 'The boot function received the following params:'
+ '<pre>' + JSON.stringify(params, null, 4) + '</pre>';
resolve({
html: '<h3>Hello, world!</h3>' + message,
globals: { sampleData: { nodeVersion: process.version } }
});
});
});

View File

@@ -0,0 +1,8 @@
var generate = require('node-chartist');
module.exports = function (callback, type, options, data) {
generate(type, options, data).then(
result => callback(null, result), // Success case
error => callback(error) // Error case
);
};

View File

@@ -0,0 +1,12 @@
var fs = require('fs');
var babelCore = require('babel-core');
module.exports = function(cb, physicalPath, requestPath) {
var originalContents = fs.readFileSync(physicalPath);
var result = babelCore.transform(originalContents, {
presets: ['es2015'],
sourceMaps: 'inline',
sourceFileName: '/sourcemapped' + requestPath
});
cb(null, result.code);
}

View File

@@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFrameworks>netcoreapp2.1;net461</TargetFrameworks>
<TypeScriptCompileBlocked>true</TypeScriptCompileBlocked>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\src\Microsoft.AspNetCore.SpaServices\Microsoft.AspNetCore.SpaServices.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Diagnostics" Version="$(MicrosoftAspNetCoreDiagnosticsPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Hosting" Version="$(MicrosoftAspNetCoreHostingPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Server.IISIntegration" Version="$(MicrosoftAspNetCoreServerIISIntegrationPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Mvc" Version="$(MicrosoftAspNetCoreMvcPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Server.Kestrel" Version="$(MicrosoftAspNetCoreServerKestrelPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.StaticFiles" Version="$(MicrosoftAspNetCoreStaticFilesPackageVersion)" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="$(MicrosoftExtensionsLoggingDebugPackageVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish">
<Exec Command="npm install" />
</Target>
</Project>

View File

@@ -0,0 +1,70 @@
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using System.IO;
namespace NodeServicesExamples
{
public class Startup
{
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
services.AddMvc();
// Enable Node Services
services.AddNodeServices();
services.AddSpaPrerenderer();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, ILoggerFactory loggerFactory, IHostingEnvironment env, INodeServices nodeServices)
{
app.UseDeveloperExceptionPage();
// Dynamically transpile any .js files under the '/js/' directory
app.Use(next => async context => {
var requestPath = context.Request.Path.Value;
if (requestPath.StartsWith("/js/") && requestPath.EndsWith(".js")) {
var fileInfo = env.WebRootFileProvider.GetFileInfo(requestPath);
if (fileInfo.Exists) {
var transpiled = await nodeServices.InvokeAsync<string>("./Node/transpilation.js", fileInfo.PhysicalPath, requestPath);
await context.Response.WriteAsync(transpiled);
return;
}
}
// Not a JS file, or doesn't exist - let some other middleware handle it
await next.Invoke(context);
});
app.UseStaticFiles();
app.UseMvc(routes =>
{
routes.MapRoute(
name: "default",
template: "{controller=Home}/{action=Index}/{id?}");
});
}
public static void Main(string[] args)
{
var host = new WebHostBuilder()
.ConfigureLogging(factory =>
{
factory.AddConsole();
factory.AddDebug();
})
.UseContentRoot(Directory.GetCurrentDirectory())
.UseIISIntegration()
.UseKestrel()
.UseStartup<Startup>()
.Build();
host.Run();
}
}
}

View File

@@ -0,0 +1,12 @@
<h1>Server-rendered chart</h1>
<p>
This sample demonstrates how arbitrary NPM modules can be invoked from .NET code.
</p>
<p>
In this case, we use <code>node-chartist</code> to render the following chart on the server. The output is
identical to what you'd get if you used <a href='https://gionkunz.github.io/chartist-js/'>chartist.js</a>
on the client, except that in this example, we're not executing any client-side code at all.
</p>
@Html.Raw(ViewData["ChartMarkup"])

View File

@@ -0,0 +1,16 @@
<h1>ES2015 Transpilation</h1>
<p>
This sample demonstrates a way of intercepting requests for .js files and dynamically transpiling them
from ES2015 code to browser-compatible ES5 code using the Babel library.
</p>
<p>
To see that it's working, open your browser's 'Debug' console and look for the log message. This is
produced by the file <a href='/js/main.js'>/js/main.js</a>, which is transpiled from ES2015 dynamically
when requested.
</p>
@section scripts {
<script src='/js/main.js'></script>
}

View File

@@ -0,0 +1,13 @@
<h1>NodeServices examples</h1>
<p>
These examples demonstrate the direct use of the NodeServices package, independently of the usual SPA scenarios.
In general, NodeServices offers an efficient way to use Node-provided functionality (e.g., NPM modules) from inside
a .NET application.
</p>
<ul>
<li><a asp-action="ES2015Transpilation">ES2015 transpilation</a></li>
<li><a asp-action="Chart">Server-side chart rendering</a></li>
<li><a asp-action="Prerendering">Server-side SPA prerendering</a></li>
</ul>

View File

@@ -0,0 +1,21 @@
<h1>Server-side prerendering</h1>
<p>
This sample demonstrates how you can invoke a JavaScript module that contains
prerendering logic for a Single-Page Application framework.
</p>
</p>
Your prerendering boot function will receive parameters that describe the page
being rendered and any data supplied by the .NET code. The return value should be
a promise that resolves with data to be injected into the page, such as the
rendered HTML and any global data that should be made available to client-side code.
</p>
@Html.Raw(ViewData["PrerenderedHtml"])
<script>@Html.Raw(ViewData["PrerenderedGlobals"])</script>
<script>
// Demonstrates how client-side code can receive data from the prerendering process
console.log('Received Node version from prerendering logic: ' + sampleData.nodeVersion);
</script>

View File

@@ -0,0 +1,6 @@
@{
ViewData["Title"] = "Error";
}
<h1 class="text-danger">Error.</h1>
<h2 class="text-danger">An error occurred while processing your request.</h2>

View File

@@ -0,0 +1,12 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<title>NodeServices Examples</title>
<link rel="stylesheet" href="~/css/chartist.min.css" />
</head>
<body>
@RenderBody()
@RenderSection("scripts", required: false)
</body>
</html>

View File

@@ -0,0 +1,2 @@
@using NodeServicesExamples
@addTagHelper "*, Microsoft.AspNetCore.Mvc.TagHelpers"

View File

@@ -0,0 +1,3 @@
@{
Layout = "_Layout";
}

View File

@@ -0,0 +1 @@
{}

View File

@@ -0,0 +1,6 @@
{
"compilerOptions": {
"target": "ES6",
"module": "commonjs"
}
}

View File

@@ -0,0 +1,10 @@
{
"name": "nodeservicesexamples",
"version": "0.0.0",
"dependencies": {
"aspnet-prerendering": "^2.0.6",
"babel-core": "^6.7.4",
"babel-preset-es2015": "^6.6.0",
"node-chartist": "^1.0.2"
}
}

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

View File

@@ -0,0 +1,7 @@
class Greeting {
getMessage() {
return 'Hello from the ES2015 class';
}
}
console.log(new Greeting().getMessage());

View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<system.webServer>
<handlers>
<add name="httpPlatformHandler" path="*" verb="*" modules="httpPlatformHandler" resourceType="Unspecified" />
</handlers>
<httpPlatform processPath="%DNX_PATH%" arguments="%DNX_ARGS%" stdoutLogEnabled="false" forwardWindowsAuthToken="false" startupTimeLimit="3600" />
</system.webServer>
</configuration>

233
samples/misc/Webpack/.gitignore vendored Normal file
View File

@@ -0,0 +1,233 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
yarn.lock
wwwroot/dist
# User-specific files
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
build/
bld/
[Bb]in/
[Oo]bj/
# Visual Studio 2015 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.pch
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# TODO: Comment the next line if you want to checkin your web deploy settings
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/packages/*
# except build/, which is used as an MSBuild target.
!**/packages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/packages/repositories.config
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Microsoft Azure ApplicationInsights config file
ApplicationInsights.config
# Windows Store app package directory
AppPackages/
BundleArtifacts/
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.pfx
*.publishsettings
node_modules/
orleans.codegen.cs
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
# SQL Server files
*.mdf
*.ldf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
# FAKE - F# Make
.fake/

View File

@@ -0,0 +1,51 @@
using System.Threading.Tasks;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Extensions;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.AspNetCore.SpaServices.Prerendering;
using Microsoft.Extensions.DependencyInjection;
namespace Webpack.ActionResults
{
// This is an example of how you could invoke the prerendering API from an ActionResult, so as to
// prerender a SPA component as the entire response page (instead of injecting the SPA component
// into a Razor view's output)
public class PrerenderResult : ActionResult
{
private JavaScriptModuleExport _moduleExport;
private object _dataToSupply;
public PrerenderResult(JavaScriptModuleExport moduleExport, object dataToSupply = null)
{
_moduleExport = moduleExport;
_dataToSupply = dataToSupply;
}
public override async Task ExecuteResultAsync(ActionContext context)
{
var nodeServices = context.HttpContext.RequestServices.GetRequiredService<INodeServices>();
var hostEnv = context.HttpContext.RequestServices.GetRequiredService<IHostingEnvironment>();
var applicationLifetime = context.HttpContext.RequestServices.GetRequiredService<IApplicationLifetime>();
var applicationBasePath = hostEnv.ContentRootPath;
var request = context.HttpContext.Request;
var response = context.HttpContext.Response;
var prerenderedHtml = await Prerenderer.RenderToString(
applicationBasePath,
nodeServices,
applicationLifetime.ApplicationStopping,
_moduleExport,
request.GetEncodedUrl(),
request.Path + request.QueryString.Value,
_dataToSupply,
/* timeoutMilliseconds */ 30000,
/* requestPathBase */ "/"
);
response.ContentType = "text/html";
await response.WriteAsync(prerenderedHtml.Html);
}
}
}

View File

@@ -0,0 +1,13 @@
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.SpaServices.Prerendering;
namespace Webpack.ActionResults
{
public static class PrerenderResultExtensions
{
public static PrerenderResult Prerender(this ControllerBase controller, JavaScriptModuleExport exportToPrerender, object dataToSupply = null)
{
return new PrerenderResult(exportToPrerender, dataToSupply);
}
}
}

View File

@@ -0,0 +1,4 @@
import { HelloWorld } from './HelloWorld';
import './styles/main.less';
new HelloWorld().doIt();

View File

@@ -0,0 +1,5 @@
export class HelloWorld {
public doIt() {
console.log('Hello from MyApp');
}
}

View File

@@ -0,0 +1,5 @@
@headerColor: red;
h1 {
color: @headerColor;
}

View File

@@ -0,0 +1,16 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
namespace Webpack.Controllers
{
public class HomeController : Controller
{
public IActionResult Index()
{
return View();
}
}
}

View File

@@ -0,0 +1,25 @@
{
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:51463/",
"sslPort": 0
}
},
"profiles": {
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"dotnet cli": {
"commandName": "Project",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
}
}
}

60
samples/misc/Webpack/Startup.cs Executable file
View File

@@ -0,0 +1,60 @@
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.SpaServices.Webpack;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using System.IO;
using Microsoft.AspNetCore.NodeServices;
namespace Webpack
{
public class Startup
{
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
services.AddMvc();
services.AddNodeServices();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, ILoggerFactory loggerFactory, IHostingEnvironment env)
{
app.UseDeveloperExceptionPage();
// For real apps, you should only use Webpack Dev Middleware at development time. For production,
// you'll get better performance and reliability if you precompile the webpack output and simply
// serve the resulting static files. For examples of setting up this automatic switch between
// development-style and production-style webpack usage, see the 'templates' dir in this repo.
app.UseWebpackDevMiddleware(new WebpackDevMiddlewareOptions {
HotModuleReplacement = true
});
app.UseStaticFiles();
app.UseMvc(routes =>
{
routes.MapRoute(
name: "default",
template: "{controller=Home}/{action=Index}/{id?}");
});
}
public static void Main(string[] args)
{
var host = new WebHostBuilder()
.ConfigureLogging(factory =>
{
factory.AddConsole();
factory.AddDebug();
})
.UseContentRoot(Directory.GetCurrentDirectory())
.UseIISIntegration()
.UseKestrel()
.UseStartup<Startup>()
.Build();
host.Run();
}
}
}

View File

@@ -0,0 +1,10 @@
@{
ViewData["Title"] = "Home Page";
}
<h1>Hello</h1>
Hi there. Enter some text: <input />
@section scripts {
<script src="dist/main.js"></script>
}

View File

@@ -0,0 +1,6 @@
@{
ViewData["Title"] = "Error";
}
<h1 class="text-danger">Error.</h1>
<h2 class="text-danger">An error occurred while processing your request.</h2>

View File

@@ -0,0 +1,14 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>@ViewData["Title"]</title>
<environment names="Production">
<link rel="stylesheet" href="dist/my-styles.css" />
</environment>
</head>
<body>
@RenderBody()
@RenderSection("scripts", required: false)
</body>
</html>

View File

@@ -0,0 +1,2 @@
@using Webpack
@addTagHelper "*, Microsoft.AspNetCore.Mvc.TagHelpers"

View File

@@ -0,0 +1,3 @@
@{
Layout = "_Layout";
}

View File

@@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFrameworks>netcoreapp2.1;net461</TargetFrameworks>
<TypeScriptCompileBlocked>true</TypeScriptCompileBlocked>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\src\Microsoft.AspNetCore.SpaServices\Microsoft.AspNetCore.SpaServices.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Diagnostics" Version="$(MicrosoftAspNetCoreDiagnosticsPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Hosting" Version="$(MicrosoftAspNetCoreHostingPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Server.IISIntegration" Version="$(MicrosoftAspNetCoreServerIISIntegrationPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Mvc" Version="$(MicrosoftAspNetCoreMvcPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Server.Kestrel" Version="$(MicrosoftAspNetCoreServerKestrelPackageVersion)" />
<PackageReference Include="Microsoft.AspNetCore.StaticFiles" Version="$(MicrosoftAspNetCoreStaticFilesPackageVersion)" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="$(MicrosoftExtensionsLoggingDebugPackageVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish">
<Exec Command="npm install" />
</Target>
</Project>

View File

@@ -0,0 +1,10 @@
{
"Logging": {
"IncludeScopes": false,
"LogLevel": {
"Default": "Verbose",
"System": "Information",
"Microsoft": "Information"
}
}
}

View File

@@ -0,0 +1,20 @@
{
"name": "Webpack",
"version": "0.0.0",
"devDependencies": {
"css-loader": "^0.23.1",
"extendify": "^1.0.0",
"extract-text-webpack-plugin": "^1.0.1",
"less": "^2.6.0",
"less-loader": "^2.2.2",
"style-loader": "^0.13.0",
"webpack-hot-middleware": "^2.7.1"
},
"dependencies": {
"aspnet-prerendering": "^1.0.4",
"aspnet-webpack": "^1.0.3",
"ts-loader": "^0.8.1",
"typescript": "^2.0.0",
"webpack": "^1.13.3"
}
}

View File

@@ -0,0 +1,13 @@
{
"compilerOptions": {
"moduleResolution": "node",
"module": "commonjs",
"target": "es5",
"jsx": "preserve",
"sourceMap": true,
"lib": ["es6", "dom"]
},
"exclude": [
"node_modules"
]
}

View File

@@ -0,0 +1,8 @@
module.exports = {
devtool: 'inline-source-map',
module: {
loaders: [
{ test: /\.less$/, loader: 'style-loader!css-loader!less-loader' }
]
}
};

View File

@@ -0,0 +1,25 @@
var path = require('path');
var merge = require('extendify')({ isDeep: true, arrays: 'concat' });
var devConfig = require('./webpack.config.dev');
var prodConfig = require('./webpack.config.prod');
var isDevelopment = process.env.ASPNETCORE_ENVIRONMENT === 'Development';
module.exports = merge({
resolve: {
extensions: [ '', '.js', '.jsx', '.ts', '.tsx' ]
},
module: {
loaders: [
{ test: /\.ts(x?)$/, exclude: /node_modules/, loader: 'ts-loader?silent' }
],
},
entry: {
main: ['./Clientside/App.ts']
},
output: {
path: path.join(__dirname, 'wwwroot', 'dist'),
filename: '[name].js',
publicPath: '/dist/'
},
plugins: []
}, isDevelopment ? devConfig : prodConfig);

View File

@@ -0,0 +1,15 @@
var webpack = require('webpack');
var ExtractTextPlugin = require('extract-text-webpack-plugin');
var extractLESS = new ExtractTextPlugin('my-styles.css');
module.exports = {
module: {
loaders: [
{ test: /\.less$/, loader: extractLESS.extract(['css-loader', 'less-loader']) },
]
},
plugins: [
extractLESS,
new webpack.optimize.UglifyJsPlugin({ minimize: true, compressor: { warnings: false } })
]
};

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

View File

@@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<system.webServer>
<handlers>
<add name="httpPlatformHandler" path="*" verb="*" modules="httpPlatformHandler" resourceType="Unspecified"/>
</handlers>
<httpPlatform processPath="%DNX_PATH%" arguments="%DNX_ARGS%" stdoutLogEnabled="false"/>
</system.webServer>
</configuration>

View File

@@ -0,0 +1,3 @@
/bin/
/node_modules/
yarn.lock

View File

@@ -0,0 +1,524 @@
(function(e, a) { for(var i in a) e[i] = a[i]; }(exports, /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(1);
/***/ },
/* 1 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
__webpack_require__(2);
var net = __webpack_require__(3);
var path = __webpack_require__(4);
var readline = __webpack_require__(5);
var ArgsUtil_1 = __webpack_require__(6);
var ExitWhenParentExits_1 = __webpack_require__(7);
var virtualConnectionServer = __webpack_require__(8);
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
var dynamicRequire = eval('require');
// Signal to the .NET side when we're ready to accept invocations
var server = net.createServer().on('listening', function () {
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
// Each virtual connection represents a separate invocation
virtualConnectionServer.createInterface(server).on('connection', function (connection) {
readline.createInterface(connection, null).on('line', function (line) {
try {
// Get a reference to the function to invoke
var invocation = JSON.parse(line);
var invokedModule = dynamicRequire(path.resolve(process.cwd(), invocation.moduleName));
var invokedFunction = invocation.exportedFunctionName ? invokedModule[invocation.exportedFunctionName] : invokedModule;
// Prepare a callback for accepting non-streamed JSON responses
var hasInvokedCallback_1 = false;
var invocationCallback = function (errorValue, successValue) {
if (hasInvokedCallback_1) {
throw new Error('Cannot supply more than one result. The callback has already been invoked,'
+ ' or the result stream has already been accessed');
}
hasInvokedCallback_1 = true;
connection.end(JSON.stringify({
result: successValue,
errorMessage: errorValue && (errorValue.message || errorValue),
errorDetails: errorValue && (errorValue.stack || null)
}));
};
// Also support streamed binary responses
Object.defineProperty(invocationCallback, 'stream', {
enumerable: true,
get: function () {
hasInvokedCallback_1 = true;
return connection;
}
});
// Actually invoke it, passing through any supplied args
invokedFunction.apply(null, [invocationCallback].concat(invocation.args));
}
catch (ex) {
connection.end(JSON.stringify({
errorMessage: ex.message,
errorDetails: ex.stack
}));
}
});
});
// Begin listening now. The underlying transport varies according to the runtime platform.
// On Windows it's Named Pipes; on Linux/OSX it's Domain Sockets.
var useWindowsNamedPipes = /^win/.test(process.platform);
var parsedArgs = ArgsUtil_1.parseArgs(process.argv);
var listenAddress = (useWindowsNamedPipes ? '\\\\.\\pipe\\' : '/tmp/') + parsedArgs.listenAddress;
server.listen(listenAddress);
ExitWhenParentExits_1.exitWhenParentExits(parseInt(parsedArgs.parentPid));
/***/ },
/* 2 */
/***/ function(module, exports) {
// When Node writes to stdout/strerr, we capture that and convert the lines into calls on the
// active .NET ILogger. But by default, stdout/stderr don't have any way of distinguishing
// linebreaks inside log messages from the linebreaks that delimit separate log messages,
// so multiline strings will end up being written to the ILogger as multiple independent
// log messages. This makes them very hard to make sense of, especially when they represent
// something like stack traces.
//
// To fix this, we intercept stdout/stderr writes, and replace internal linebreaks with a
// marker token. When .NET receives the lines, it converts the marker tokens back to regular
// linebreaks within the logged messages.
//
// Note that it's better to do the interception at the stdout/stderr level, rather than at
// the console.log/console.error (etc.) level, because this takes place after any native
// message formatting has taken place (e.g., inserting values for % placeholders).
var findInternalNewlinesRegex = /\n(?!$)/g;
var encodedNewline = '__ns_newline__';
encodeNewlinesWrittenToStream(process.stdout);
encodeNewlinesWrittenToStream(process.stderr);
function encodeNewlinesWrittenToStream(outputStream) {
var origWriteFunction = outputStream.write;
outputStream.write = function (value) {
// Only interfere with the write if it's definitely a string
if (typeof value === 'string') {
var argsClone = Array.prototype.slice.call(arguments, 0);
argsClone[0] = encodeNewlinesInString(value);
origWriteFunction.apply(this, argsClone);
}
else {
origWriteFunction.apply(this, arguments);
}
};
}
function encodeNewlinesInString(str) {
return str.replace(findInternalNewlinesRegex, encodedNewline);
}
/***/ },
/* 3 */
/***/ function(module, exports) {
module.exports = require("net");
/***/ },
/* 4 */
/***/ function(module, exports) {
module.exports = require("path");
/***/ },
/* 5 */
/***/ function(module, exports) {
module.exports = require("readline");
/***/ },
/* 6 */
/***/ function(module, exports) {
"use strict";
function parseArgs(args) {
// Very simplistic parsing which is sufficient for the cases needed. We don't want to bring in any external
// dependencies (such as an args-parsing library) to this file.
var result = {};
var currentKey = null;
args.forEach(function (arg) {
if (arg.indexOf('--') === 0) {
var argName = arg.substring(2);
result[argName] = undefined;
currentKey = argName;
}
else if (currentKey) {
result[currentKey] = arg;
currentKey = null;
}
});
return result;
}
exports.parseArgs = parseArgs;
/***/ },
/* 7 */
/***/ function(module, exports) {
/*
In general, we want the Node child processes to be terminated as soon as the parent .NET processes exit,
because we have no further use for them. If the .NET process shuts down gracefully, it will run its
finalizers, one of which (in OutOfProcessNodeInstance.cs) will kill its associated Node process immediately.
But if the .NET process is terminated forcefully (e.g., on Linux/OSX with 'kill -9'), then it won't have
any opportunity to shut down its child processes, and by default they will keep running. In this case, it's
up to the child process to detect this has happened and terminate itself.
There are many possible approaches to detecting when a parent process has exited, most of which behave
differently between Windows and Linux/OS X:
- On Windows, the parent process can mark its child as being a 'job' that should auto-terminate when
the parent does (http://stackoverflow.com/a/4657392). Not cross-platform.
- The child Node process can get a callback when the parent disconnects (process.on('disconnect', ...)).
But despite http://stackoverflow.com/a/16487966, no callback fires in any case I've tested (Windows / OS X).
- The child Node process can get a callback when its stdin/stdout are disconnected, as described at
http://stackoverflow.com/a/15693934. This works well on OS X, but calling stdout.resume() on Windows
causes the process to terminate prematurely.
- I don't know why, but on Windows, it's enough to invoke process.stdin.resume(). For some reason this causes
the child Node process to exit as soon as the parent one does, but I don't see this documented anywhere.
- You can poll to see if the parent process, or your stdin/stdout connection to it, is gone
- You can directly pass a parent process PID to the child, and then have the child poll to see if it's
still running (e.g., using process.kill(pid, 0), which doesn't kill it but just tests whether it exists,
as per https://nodejs.org/api/process.html#process_process_kill_pid_signal)
- Or, on each poll, you can try writing to process.stdout. If the parent has died, then this will throw.
However I don't see this documented anywhere. It would be nice if you could just poll for whether or not
process.stdout is still connected (without actually writing to it) but I haven't found any property whose
value changes until you actually try to write to it.
Of these, the only cross-platform approach that is actually documented as a valid strategy is simply polling
to check whether the parent PID is still running. So that's what we do here.
*/
"use strict";
var pollIntervalMs = 1000;
function exitWhenParentExits(parentPid) {
setInterval(function () {
if (!processExists(parentPid)) {
// Can't log anything at this point, because out stdout was connected to the parent,
// but the parent is gone.
process.exit();
}
}, pollIntervalMs);
}
exports.exitWhenParentExits = exitWhenParentExits;
function processExists(pid) {
try {
// Sending signal 0 - on all platforms - tests whether the process exists. As long as it doesn't
// throw, that means it does exist.
process.kill(pid, 0);
return true;
}
catch (ex) {
// If the reason for the error is that we don't have permission to ask about this process,
// report that as a separate problem.
if (ex.code === 'EPERM') {
throw new Error("Attempted to check whether process " + pid + " was running, but got a permissions error.");
}
return false;
}
}
/***/ },
/* 8 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var events_1 = __webpack_require__(9);
var VirtualConnection_1 = __webpack_require__(10);
// Keep this in sync with the equivalent constant in the .NET code. Both sides split up their transmissions into frames with this max length,
// and both will reject longer frames.
var MaxFrameBodyLength = 16 * 1024;
/**
* Accepts connections to a net.Server and adapts them to behave as multiplexed connections. That is, for each physical socket connection,
* we track a list of 'virtual connections' whose API is a Duplex stream. The remote clients may open and close as many virtual connections
* as they wish, reading and writing to them independently, without the overhead of establishing new physical connections each time.
*/
function createInterface(server) {
var emitter = new events_1.EventEmitter();
server.on('connection', function (socket) {
// For each physical socket connection, maintain a set of virtual connections. Issue a notification whenever
// a new virtual connections is opened.
var childSockets = new VirtualConnectionsCollection(socket, function (virtualConnection) {
emitter.emit('connection', virtualConnection);
});
});
return emitter;
}
exports.createInterface = createInterface;
/**
* Tracks the 'virtual connections' associated with a single physical socket connection.
*/
var VirtualConnectionsCollection = (function () {
function VirtualConnectionsCollection(_socket, _onVirtualConnectionCallback) {
var _this = this;
this._socket = _socket;
this._onVirtualConnectionCallback = _onVirtualConnectionCallback;
this._currentFrameHeader = null;
this._virtualConnections = {};
// If the remote end closes the physical socket, treat all the virtual connections as being closed remotely too
this._socket.on('close', function () {
Object.getOwnPropertyNames(_this._virtualConnections).forEach(function (id) {
// A 'null' frame signals that the connection was closed remotely
_this._virtualConnections[id].onReceivedData(null);
});
});
this._socket.on('readable', this._onIncomingDataAvailable.bind(this));
}
/**
* This is called whenever the underlying socket signals that it may have some data available to read. It will synchronously read as many
* message frames as it can from the underlying socket, opens virtual connections as needed, and dispatches data to them.
*/
VirtualConnectionsCollection.prototype._onIncomingDataAvailable = function () {
var exhaustedAllData = false;
while (!exhaustedAllData) {
// We might already have a pending frame header from the previous time this method ran, but if not, that's the next thing we need to read
if (this._currentFrameHeader === null) {
this._currentFrameHeader = this._readNextFrameHeader();
}
if (this._currentFrameHeader === null) {
// There's not enough data to fill a frameheader, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
}
else {
var frameBodyLength = this._currentFrameHeader.bodyLength;
var frameBodyOrNull = frameBodyLength > 0 ? this._socket.read(this._currentFrameHeader.bodyLength) : null;
if (frameBodyOrNull !== null || frameBodyLength === 0) {
// We have a complete frame header+body pair, so we can now dispatch this to a virtual connection. We set _currentFrameHeader back to null
// so that the next thing we try to read is the next frame header.
var headerCopy = this._currentFrameHeader;
this._currentFrameHeader = null;
this._onReceivedCompleteFrame(headerCopy, frameBodyOrNull);
}
else {
// There's not enough data to fill the pending frame body, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
}
}
}
};
VirtualConnectionsCollection.prototype._onReceivedCompleteFrame = function (header, bodyIfNotEmpty) {
// An incoming zero-length frame signals that there's no more data to read.
// Signal this to the Node stream APIs by pushing a 'null' chunk to it.
var virtualConnection = this._getOrOpenVirtualConnection(header);
virtualConnection.onReceivedData(header.bodyLength > 0 ? bodyIfNotEmpty : null);
};
VirtualConnectionsCollection.prototype._getOrOpenVirtualConnection = function (header) {
if (this._virtualConnections.hasOwnProperty(header.connectionIdString)) {
// It's an existing virtual connection
return this._virtualConnections[header.connectionIdString];
}
else {
// It's a new one
return this._openVirtualConnection(header);
}
};
VirtualConnectionsCollection.prototype._openVirtualConnection = function (header) {
var _this = this;
var beginWriteCallback = function (data, writeCompletedCallback) {
// Only send nonempty frames, since empty ones are a signal to close the virtual connection
if (data.length > 0) {
_this._sendFrame(header.connectionIdBinary, data, writeCompletedCallback);
}
};
var newVirtualConnection = new VirtualConnection_1.VirtualConnection(beginWriteCallback);
newVirtualConnection.on('end', function () {
// The virtual connection was closed remotely. Clean up locally.
_this._onVirtualConnectionWasClosed(header.connectionIdString);
});
newVirtualConnection.on('finish', function () {
// The virtual connection was closed locally. Clean up locally, and notify the remote that we're done.
_this._onVirtualConnectionWasClosed(header.connectionIdString);
_this._sendFrame(header.connectionIdBinary, new Buffer(0));
});
this._virtualConnections[header.connectionIdString] = newVirtualConnection;
this._onVirtualConnectionCallback(newVirtualConnection);
return newVirtualConnection;
};
/**
* Attempts to read a complete frame header, synchronously, from the underlying socket.
* If not enough data is available synchronously, returns null without consuming any data from the socket.
*/
VirtualConnectionsCollection.prototype._readNextFrameHeader = function () {
var headerBuf = this._socket.read(12);
if (headerBuf !== null) {
// We have enough data synchronously
var connectionIdBinary = headerBuf.slice(0, 8);
var connectionIdString = connectionIdBinary.toString('hex');
var bodyLength = headerBuf.readInt32LE(8);
if (bodyLength < 0 || bodyLength > MaxFrameBodyLength) {
// Throwing here is going to bring down the whole process, so this cannot be allowed to happen in real use.
// But it won't happen in real use, because this is only used with our .NET client, which doesn't violate this rule.
throw new Error('Illegal frame body length: ' + bodyLength);
}
return { connectionIdBinary: connectionIdBinary, connectionIdString: connectionIdString, bodyLength: bodyLength };
}
else {
// Not enough bytes are available synchronously, so none were consumed
return null;
}
};
VirtualConnectionsCollection.prototype._sendFrame = function (connectionIdBinary, data, callback) {
// For all sends other than the last one, only invoke the callback if it failed.
// Also, only invoke the callback at most once.
var hasInvokedCallback = false;
var finalCallback = callback && (function (error) {
if (!hasInvokedCallback) {
hasInvokedCallback = true;
callback(error);
}
});
var notFinalCallback = callback && (function (error) {
if (error) {
finalCallback(error);
}
});
// The amount of data we're writing might exceed MaxFrameBodyLength, so split into frames as needed.
// Note that we always send at least one frame, even if it's empty (because that's the close-virtual-connection signal).
// If needed, this could be changed to send frames asynchronously, so that large sends could proceed in parallel
// (though that would involve making a clone of 'data', to avoid the risk of it being mutated during the send).
var bytesSent = 0;
do {
var nextFrameBodyLength = Math.min(MaxFrameBodyLength, data.length - bytesSent);
var isFinalChunk = (bytesSent + nextFrameBodyLength) === data.length;
this._socket.write(connectionIdBinary, notFinalCallback);
this._sendInt32LE(nextFrameBodyLength, notFinalCallback);
this._socket.write(data.slice(bytesSent, bytesSent + nextFrameBodyLength), isFinalChunk ? finalCallback : notFinalCallback);
bytesSent += nextFrameBodyLength;
} while (bytesSent < data.length);
};
/**
* Sends a number serialized in the correct format for .NET to receive as a System.Int32
*/
VirtualConnectionsCollection.prototype._sendInt32LE = function (value, callback) {
var buf = new Buffer(4);
buf.writeInt32LE(value, 0);
this._socket.write(buf, callback);
};
VirtualConnectionsCollection.prototype._onVirtualConnectionWasClosed = function (id) {
if (this._virtualConnections.hasOwnProperty(id)) {
delete this._virtualConnections[id];
}
};
return VirtualConnectionsCollection;
}());
/***/ },
/* 9 */
/***/ function(module, exports) {
module.exports = require("events");
/***/ },
/* 10 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var stream_1 = __webpack_require__(11);
/**
* Represents a virtual connection. Multiple virtual connections may be multiplexed over a single physical socket connection.
*/
var VirtualConnection = (function (_super) {
__extends(VirtualConnection, _super);
function VirtualConnection(_beginWriteCallback) {
var _this = _super.call(this) || this;
_this._beginWriteCallback = _beginWriteCallback;
_this._flowing = false;
_this._receivedDataQueue = [];
return _this;
}
VirtualConnection.prototype._read = function () {
this._flowing = true;
// Keep pushing data until we run out, or the underlying framework asks us to stop.
// When we finish, the 'flowing' state is detemined by whether more data is still being requested.
while (this._flowing && this._receivedDataQueue.length > 0) {
var nextChunk = this._receivedDataQueue.shift();
this._flowing = this.push(nextChunk);
}
};
VirtualConnection.prototype._write = function (chunk, encodingIfString, callback) {
if (typeof chunk === 'string') {
chunk = new Buffer(chunk, encodingIfString);
}
this._beginWriteCallback(chunk, callback);
};
VirtualConnection.prototype.onReceivedData = function (dataOrNullToSignalEOF) {
if (this._flowing) {
this._flowing = this.push(dataOrNullToSignalEOF);
}
else {
this._receivedDataQueue.push(dataOrNullToSignalEOF);
}
};
return VirtualConnection;
}(stream_1.Duplex));
exports.VirtualConnection = VirtualConnection;
/***/ },
/* 11 */
/***/ function(module, exports) {
module.exports = require("stream");
/***/ }
/******/ ])));

View File

@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Description>Socket-based RPC for Microsoft.AspNetCore.NodeServices.</Description>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<None Remove="node_modules\**\*" />
<EmbeddedResource Include="Content\**\*" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="System.Threading.Tasks.Dataflow" Version="$(SystemThreadingTasksDataflowPackageVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish" Condition=" '$(IsCrossTargetingBuild)' != 'true' ">
<Exec Command="npm install" />
<Exec Command="node node_modules/webpack/bin/webpack.js" />
</Target>
</Project>

View File

@@ -0,0 +1,40 @@
using System.IO;
using System.IO.Pipes;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
internal class NamedPipeConnection : StreamConnection
{
private bool _disposedValue = false;
private NamedPipeClientStream _namedPipeClientStream;
#pragma warning disable 1998 // Because in the NET451 code path, there's nothing to await
public override async Task<Stream> Open(string address)
{
_namedPipeClientStream = new NamedPipeClientStream(
".",
address,
PipeDirection.InOut,
PipeOptions.Asynchronous);
await _namedPipeClientStream.ConnectAsync().ConfigureAwait(false);
return _namedPipeClientStream;
}
#pragma warning restore 1998
public override void Dispose()
{
if (!_disposedValue)
{
if (_namedPipeClientStream != null)
{
_namedPipeClientStream.Dispose();
}
_disposedValue = true;
}
}
}
}

View File

@@ -0,0 +1,26 @@
using System;
using System.IO;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
internal abstract class StreamConnection : IDisposable
{
public abstract Task<Stream> Open(string address);
public abstract void Dispose();
public static StreamConnection Create()
{
var useNamedPipes = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows);
if (useNamedPipes)
{
return new NamedPipeConnection();
}
else
{
return new UnixDomainSocketConnection();
}
}
}
}

View File

@@ -0,0 +1,40 @@
using System.IO;
using System.Net.Sockets;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
internal class UnixDomainSocketConnection : StreamConnection
{
private bool _disposedValue = false;
private NetworkStream _networkStream;
private Socket _socket;
public override async Task<Stream> Open(string address)
{
var endPoint = new UnixDomainSocketEndPoint("/tmp/" + address);
_socket = new Socket(endPoint.AddressFamily, SocketType.Stream, ProtocolType.Unspecified);
await _socket.ConnectAsync(endPoint).ConfigureAwait(false);
_networkStream = new NetworkStream(_socket);
return _networkStream;
}
public override void Dispose()
{
if (!_disposedValue)
{
if (_networkStream != null)
{
_networkStream.Dispose();
}
if (_socket != null)
{
_socket.Dispose();
}
_disposedValue = true;
}
}
}
}

View File

@@ -0,0 +1,86 @@
using System;
using System.Net;
using System.Net.Sockets;
using System.Text;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
// From System.IO.Pipes/src/System/Net/Sockets/UnixDomainSocketEndPoint.cs (an internal class in System.IO.Pipes)
internal sealed class UnixDomainSocketEndPoint : EndPoint
{
private const AddressFamily EndPointAddressFamily = AddressFamily.Unix;
private static readonly Encoding s_pathEncoding = Encoding.UTF8;
private static readonly int s_nativePathOffset = 2; // = offsetof(struct sockaddr_un, sun_path). It's the same on Linux and OSX
private static readonly int s_nativePathLength = 91; // sockaddr_un.sun_path at http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/sys_un.h.html, -1 for terminator
private static readonly int s_nativeAddressSize = s_nativePathOffset + s_nativePathLength;
private readonly string _path;
private readonly byte[] _encodedPath;
public UnixDomainSocketEndPoint(string path)
{
if (path == null)
{
throw new ArgumentNullException(nameof(path));
}
_path = path;
_encodedPath = s_pathEncoding.GetBytes(_path);
if (path.Length == 0 || _encodedPath.Length > s_nativePathLength)
{
throw new ArgumentOutOfRangeException(nameof(path));
}
}
internal UnixDomainSocketEndPoint(SocketAddress socketAddress)
{
if (socketAddress == null)
{
throw new ArgumentNullException(nameof(socketAddress));
}
if (socketAddress.Family != EndPointAddressFamily ||
socketAddress.Size > s_nativeAddressSize)
{
throw new ArgumentOutOfRangeException(nameof(socketAddress));
}
if (socketAddress.Size > s_nativePathOffset)
{
_encodedPath = new byte[socketAddress.Size - s_nativePathOffset];
for (int i = 0; i < _encodedPath.Length; i++)
{
_encodedPath[i] = socketAddress[s_nativePathOffset + i];
}
_path = s_pathEncoding.GetString(_encodedPath, 0, _encodedPath.Length);
}
else
{
_encodedPath = Array.Empty<byte>();
_path = string.Empty;
}
}
public override SocketAddress Serialize()
{
var result = new SocketAddress(AddressFamily.Unix, s_nativeAddressSize);
for (int index = 0; index < _encodedPath.Length; index++)
{
result[s_nativePathOffset + index] = _encodedPath[index];
}
result[s_nativePathOffset + _encodedPath.Length] = 0; // path must be null-terminated
return result;
}
public override EndPoint Create(SocketAddress socketAddress) => new UnixDomainSocketEndPoint(socketAddress);
public override AddressFamily AddressFamily => EndPointAddressFamily;
public override string ToString() => _path;
}
}

View File

@@ -0,0 +1,240 @@
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.NodeServices.HostingModels;
using Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections;
using Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Microsoft.AspNetCore.NodeServices.Sockets
{
/// <summary>
/// A specialisation of the OutOfProcessNodeInstance base class that uses a lightweight binary streaming protocol
/// to perform RPC invocations. The physical transport is Named Pipes on Windows, or Domain Sockets on Linux/Mac.
/// For details on the binary streaming protocol, see <see cref="Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections.VirtualConnectionClient" />
/// The advantage versus using HTTP for RPC is that this is faster (not surprisingly - there's much less overhead
/// because we don't need most of the functionality of HTTP.
///
/// The address of the pipe/socket is selected randomly here on the .NET side and sent to the child process as a
/// command-line argument (the address space is wide enough that there's no real risk of a clash, unlike when
/// selecting TCP port numbers).
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.HostingModels.OutOfProcessNodeInstance" />
internal class SocketNodeInstance : OutOfProcessNodeInstance
{
private readonly static JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver(),
TypeNameHandling = TypeNameHandling.None
};
private readonly static int streamBufferSize = 16 * 1024;
private readonly static UTF8Encoding utf8EncodingWithoutBom = new UTF8Encoding(false);
private readonly SemaphoreSlim _connectionCreationSemaphore = new SemaphoreSlim(1);
private bool _connectionHasFailed;
private StreamConnection _physicalConnection;
private string _socketAddress;
private VirtualConnectionClient _virtualConnectionClient;
public SocketNodeInstance(NodeServicesOptions options, string socketAddress)
: base(
EmbeddedResourceReader.Read(
typeof(SocketNodeInstance),
"/Content/Node/entrypoint-socket.js"),
options.ProjectPath,
options.WatchFileExtensions,
MakeNewCommandLineOptions(socketAddress),
options.ApplicationStoppingToken,
options.NodeInstanceOutputLogger,
options.EnvironmentVariables,
options.InvocationTimeoutMilliseconds,
options.LaunchWithDebugging,
options.DebuggingPort)
{
_socketAddress = socketAddress;
}
protected override async Task<T> InvokeExportAsync<T>(NodeInvocationInfo invocationInfo, CancellationToken cancellationToken)
{
if (_connectionHasFailed)
{
// _connectionHasFailed implies a protocol-level error. The old instance is no longer of any use.
var allowConnectionDraining = false;
// This special exception type forces NodeServicesImpl to restart the Node instance
throw new NodeInvocationException(
"The SocketNodeInstance socket connection failed. See logs to identify the reason.",
details: null,
nodeInstanceUnavailable: true,
allowConnectionDraining: allowConnectionDraining);
}
if (_virtualConnectionClient == null)
{
// Although we could pass the cancellationToken into EnsureVirtualConnectionClientCreated and
// have it signal cancellations upstream, that would be a bad thing to do, because all callers
// wait for the same connection task. There's no reason why the first caller should have the
// special ability to cancel the connection process in a way that would affect subsequent
// callers. So, each caller just independently stops awaiting connection if that call is cancelled.
await ThrowOnCancellation(EnsureVirtualConnectionClientCreated(), cancellationToken);
}
// For each invocation, we open a new virtual connection. This gives an API equivalent to opening a new
// physical connection to the child process, but without the overhead of doing so, because it's really
// just multiplexed into the existing physical connection stream.
bool shouldDisposeVirtualConnection = true;
Stream virtualConnection = null;
try
{
virtualConnection = _virtualConnectionClient.OpenVirtualConnection();
// Send request
WriteJsonLine(virtualConnection, invocationInfo);
// Determine what kind of response format is expected
if (typeof(T) == typeof(Stream))
{
// Pass through streamed binary response
// It is up to the consumer to dispose this stream, so don't do so here
shouldDisposeVirtualConnection = false;
return (T)(object)virtualConnection;
}
else
{
// Parse and return non-streamed JSON response
var response = await ReadJsonAsync<RpcJsonResponse<T>>(virtualConnection, cancellationToken);
if (response.ErrorMessage != null)
{
throw new NodeInvocationException(response.ErrorMessage, response.ErrorDetails);
}
return response.Result;
}
}
finally
{
if (shouldDisposeVirtualConnection)
{
virtualConnection.Dispose();
}
}
}
private async Task EnsureVirtualConnectionClientCreated()
{
// Asynchronous equivalent to a 'lock(...) { ... }'
await _connectionCreationSemaphore.WaitAsync();
try
{
if (_virtualConnectionClient == null)
{
_physicalConnection = StreamConnection.Create();
var connection = await _physicalConnection.Open(_socketAddress);
_virtualConnectionClient = new VirtualConnectionClient(connection);
_virtualConnectionClient.OnError += (ex) =>
{
// This callback is fired only if there's a protocol-level failure (e.g., child process disconnected
// unexpectedly). It does *not* fire when RPC calls return errors. Since there's been a protocol-level
// failure, this Node instance is no longer usable and should be discarded.
_connectionHasFailed = true;
OutputLogger.LogError(0, ex, ex.Message);
};
}
}
finally
{
_connectionCreationSemaphore.Release();
}
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (_virtualConnectionClient != null)
{
_virtualConnectionClient.Dispose();
_virtualConnectionClient = null;
}
if (_physicalConnection != null)
{
_physicalConnection.Dispose();
_physicalConnection = null;
}
}
base.Dispose(disposing);
}
private static void WriteJsonLine(Stream stream, object serializableObject)
{
using (var streamWriter = new StreamWriter(stream, utf8EncodingWithoutBom, streamBufferSize, true))
using (var jsonWriter = new JsonTextWriter(streamWriter))
{
jsonWriter.CloseOutput = false;
jsonWriter.AutoCompleteOnClose = false;
var serializer = JsonSerializer.Create(jsonSerializerSettings);
serializer.Serialize(jsonWriter, serializableObject);
jsonWriter.Flush();
streamWriter.WriteLine();
streamWriter.Flush();
}
}
private static async Task<T> ReadJsonAsync<T>(Stream stream, CancellationToken cancellationToken)
{
var json = Encoding.UTF8.GetString(await ReadAllBytesAsync(stream, cancellationToken));
return JsonConvert.DeserializeObject<T>(json, jsonSerializerSettings);
}
private static async Task<byte[]> ReadAllBytesAsync(Stream input, CancellationToken cancellationToken)
{
byte[] buffer = new byte[streamBufferSize];
using (var ms = new MemoryStream())
{
int read;
while ((read = await input.ReadAsync(buffer, 0, buffer.Length, cancellationToken)) > 0)
{
ms.Write(buffer, 0, read);
}
return ms.ToArray();
}
}
private static string MakeNewCommandLineOptions(string listenAddress)
{
return $"--listenAddress {listenAddress}";
}
private static Task ThrowOnCancellation(Task task, CancellationToken cancellationToken)
{
return task.IsCompleted
? task // If the task is already completed, no need to wrap it in a further layer of task
: task.ContinueWith(
_ => {}, // If the task completes, allow execution to continue
cancellationToken,
TaskContinuationOptions.ExecuteSynchronously,
TaskScheduler.Default);
}
#pragma warning disable 649 // These properties are populated via JSON deserialization
private class RpcJsonResponse<TResult>
{
public TResult Result { get; set; }
public string ErrorMessage { get; set; }
public string ErrorDetails { get; set; }
}
#pragma warning restore 649
}
}

View File

@@ -0,0 +1,21 @@
using System;
namespace Microsoft.AspNetCore.NodeServices.Sockets
{
/// <summary>
/// Extension methods that help with populating a <see cref="NodeServicesOptions"/> object.
/// </summary>
public static class NodeServicesOptionsExtensions
{
/// <summary>
/// Configures the <see cref="INodeServices"/> service so that it will use out-of-process
/// Node.js instances and perform RPC calls over binary sockets (on Windows, this is
/// implemented as named pipes; on other platforms it uses domain sockets).
/// </summary>
public static void UseSocketHosting(this NodeServicesOptions options)
{
var pipeName = "pni-" + Guid.NewGuid().ToString("D"); // Arbitrary non-clashing string
options.NodeInstanceFactory = () => new SocketNodeInstance(options, pipeName);
}
}
}

View File

@@ -0,0 +1,79 @@
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
import '../../Microsoft.AspNetCore.NodeServices/TypeScript/Util/OverrideStdOutputs';
import * as net from 'net';
import * as path from 'path';
import * as readline from 'readline';
import { Duplex } from 'stream';
import { parseArgs } from '../../Microsoft.AspNetCore.NodeServices/TypeScript/Util/ArgsUtil';
import { exitWhenParentExits } from '../../Microsoft.AspNetCore.NodeServices/TypeScript/Util/ExitWhenParentExits';
import * as virtualConnectionServer from './VirtualConnections/VirtualConnectionServer';
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
const dynamicRequire: (name: string) => any = eval('require');
// Signal to the .NET side when we're ready to accept invocations
const server = net.createServer().on('listening', () => {
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
// Each virtual connection represents a separate invocation
virtualConnectionServer.createInterface(server).on('connection', (connection: Duplex) => {
readline.createInterface(connection, null).on('line', line => {
try {
// Get a reference to the function to invoke
const invocation = JSON.parse(line) as RpcInvocation;
const invokedModule = dynamicRequire(path.resolve(process.cwd(), invocation.moduleName));
const invokedFunction = invocation.exportedFunctionName ? invokedModule[invocation.exportedFunctionName] : invokedModule;
// Prepare a callback for accepting non-streamed JSON responses
let hasInvokedCallback = false;
const invocationCallback = (errorValue, successValue) => {
if (hasInvokedCallback) {
throw new Error('Cannot supply more than one result. The callback has already been invoked,'
+ ' or the result stream has already been accessed');
}
hasInvokedCallback = true;
connection.end(JSON.stringify({
result: successValue,
errorMessage: errorValue && (errorValue.message || errorValue),
errorDetails: errorValue && (errorValue.stack || null)
}));
};
// Also support streamed binary responses
Object.defineProperty(invocationCallback, 'stream', {
enumerable: true,
get: (): Duplex => {
hasInvokedCallback = true;
return connection;
}
});
// Actually invoke it, passing through any supplied args
invokedFunction.apply(null, [invocationCallback].concat(invocation.args));
} catch (ex) {
connection.end(JSON.stringify({
errorMessage: ex.message,
errorDetails: ex.stack
}));
}
});
});
// Begin listening now. The underlying transport varies according to the runtime platform.
// On Windows it's Named Pipes; on Linux/OSX it's Domain Sockets.
const useWindowsNamedPipes = /^win/.test(process.platform);
const parsedArgs = parseArgs(process.argv);
const listenAddress = (useWindowsNamedPipes ? '\\\\.\\pipe\\' : '/tmp/') + parsedArgs.listenAddress;
server.listen(listenAddress);
exitWhenParentExits(parseInt(parsedArgs.parentPid), /* ignoreSigint */ true);
interface RpcInvocation {
moduleName: string;
exportedFunctionName: string;
args: any[];
}

View File

@@ -0,0 +1,43 @@
import { Duplex } from 'stream';
export type EndWriteCallback = (error?: any) => void;
export type BeginWriteCallback = (data: Buffer, callback: EndWriteCallback) => void;
/**
* Represents a virtual connection. Multiple virtual connections may be multiplexed over a single physical socket connection.
*/
export class VirtualConnection extends Duplex {
private _flowing = false;
private _receivedDataQueue: Buffer[] = [];
constructor(private _beginWriteCallback: BeginWriteCallback) {
super();
}
public _read() {
this._flowing = true;
// Keep pushing data until we run out, or the underlying framework asks us to stop.
// When we finish, the 'flowing' state is detemined by whether more data is still being requested.
while (this._flowing && this._receivedDataQueue.length > 0) {
const nextChunk = this._receivedDataQueue.shift();
this._flowing = this.push(nextChunk);
}
}
public _write(chunk: Buffer | string, encodingIfString: string, callback: EndWriteCallback) {
if (typeof chunk === 'string') {
chunk = new Buffer(chunk as string, encodingIfString);
}
this._beginWriteCallback(chunk as Buffer, callback);
}
public onReceivedData(dataOrNullToSignalEOF: Buffer) {
if (this._flowing) {
this._flowing = this.push(dataOrNullToSignalEOF);
} else {
this._receivedDataQueue.push(dataOrNullToSignalEOF);
}
}
}

View File

@@ -0,0 +1,199 @@
import { Server, Socket } from 'net';
import { EventEmitter } from 'events';
import { Duplex } from 'stream';
import { VirtualConnection, EndWriteCallback } from './VirtualConnection';
// Keep this in sync with the equivalent constant in the .NET code. Both sides split up their transmissions into frames with this max length,
// and both will reject longer frames.
const MaxFrameBodyLength = 16 * 1024;
/**
* Accepts connections to a net.Server and adapts them to behave as multiplexed connections. That is, for each physical socket connection,
* we track a list of 'virtual connections' whose API is a Duplex stream. The remote clients may open and close as many virtual connections
* as they wish, reading and writing to them independently, without the overhead of establishing new physical connections each time.
*/
export function createInterface(server: Server): EventEmitter {
const emitter = new EventEmitter();
server.on('connection', (socket: Socket) => {
// For each physical socket connection, maintain a set of virtual connections. Issue a notification whenever
// a new virtual connections is opened.
const childSockets = new VirtualConnectionsCollection(socket, virtualConnection => {
emitter.emit('connection', virtualConnection);
});
});
return emitter;
}
/**
* Tracks the 'virtual connections' associated with a single physical socket connection.
*/
class VirtualConnectionsCollection {
private _currentFrameHeader: FrameHeader = null;
private _virtualConnections: { [id: string]: VirtualConnection } = {};
constructor(private _socket: Socket, private _onVirtualConnectionCallback: (virtualConnection: Duplex) => void) {
// If the remote end closes the physical socket, treat all the virtual connections as being closed remotely too
this._socket.on('close', () => {
Object.getOwnPropertyNames(this._virtualConnections).forEach(id => {
// A 'null' frame signals that the connection was closed remotely
this._virtualConnections[id].onReceivedData(null);
});
});
this._socket.on('readable', this._onIncomingDataAvailable.bind(this));
}
/**
* This is called whenever the underlying socket signals that it may have some data available to read. It will synchronously read as many
* message frames as it can from the underlying socket, opens virtual connections as needed, and dispatches data to them.
*/
private _onIncomingDataAvailable() {
let exhaustedAllData = false;
while (!exhaustedAllData) {
// We might already have a pending frame header from the previous time this method ran, but if not, that's the next thing we need to read
if (this._currentFrameHeader === null) {
this._currentFrameHeader = this._readNextFrameHeader();
}
if (this._currentFrameHeader === null) {
// There's not enough data to fill a frameheader, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
} else {
const frameBodyLength = this._currentFrameHeader.bodyLength;
const frameBodyOrNull: Buffer = frameBodyLength > 0 ? this._socket.read(this._currentFrameHeader.bodyLength) : null;
if (frameBodyOrNull !== null || frameBodyLength === 0) {
// We have a complete frame header+body pair, so we can now dispatch this to a virtual connection. We set _currentFrameHeader back to null
// so that the next thing we try to read is the next frame header.
const headerCopy = this._currentFrameHeader;
this._currentFrameHeader = null;
this._onReceivedCompleteFrame(headerCopy, frameBodyOrNull);
} else {
// There's not enough data to fill the pending frame body, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
}
}
}
}
private _onReceivedCompleteFrame(header: FrameHeader, bodyIfNotEmpty: Buffer) {
// An incoming zero-length frame signals that there's no more data to read.
// Signal this to the Node stream APIs by pushing a 'null' chunk to it.
const virtualConnection = this._getOrOpenVirtualConnection(header);
virtualConnection.onReceivedData(header.bodyLength > 0 ? bodyIfNotEmpty : null);
}
private _getOrOpenVirtualConnection(header: FrameHeader) {
if (this._virtualConnections.hasOwnProperty(header.connectionIdString)) {
// It's an existing virtual connection
return this._virtualConnections[header.connectionIdString];
} else {
// It's a new one
return this._openVirtualConnection(header);
}
}
private _openVirtualConnection(header: FrameHeader) {
const beginWriteCallback = (data, writeCompletedCallback) => {
// Only send nonempty frames, since empty ones are a signal to close the virtual connection
if (data.length > 0) {
this._sendFrame(header.connectionIdBinary, data, writeCompletedCallback);
}
};
const newVirtualConnection = new VirtualConnection(beginWriteCallback);
newVirtualConnection.on('end', () => {
// The virtual connection was closed remotely. Clean up locally.
this._onVirtualConnectionWasClosed(header.connectionIdString);
});
newVirtualConnection.on('finish', () => {
// The virtual connection was closed locally. Clean up locally, and notify the remote that we're done.
this._onVirtualConnectionWasClosed(header.connectionIdString);
this._sendFrame(header.connectionIdBinary, new Buffer(0));
});
this._virtualConnections[header.connectionIdString] = newVirtualConnection;
this._onVirtualConnectionCallback(newVirtualConnection);
return newVirtualConnection;
}
/**
* Attempts to read a complete frame header, synchronously, from the underlying socket.
* If not enough data is available synchronously, returns null without consuming any data from the socket.
*/
private _readNextFrameHeader(): FrameHeader {
const headerBuf: Buffer = this._socket.read(12);
if (headerBuf !== null) {
// We have enough data synchronously
const connectionIdBinary = headerBuf.slice(0, 8);
const connectionIdString = connectionIdBinary.toString('hex');
const bodyLength = headerBuf.readInt32LE(8);
if (bodyLength < 0 || bodyLength > MaxFrameBodyLength) {
// Throwing here is going to bring down the whole process, so this cannot be allowed to happen in real use.
// But it won't happen in real use, because this is only used with our .NET client, which doesn't violate this rule.
throw new Error('Illegal frame body length: ' + bodyLength);
}
return { connectionIdBinary, connectionIdString, bodyLength };
} else {
// Not enough bytes are available synchronously, so none were consumed
return null;
}
}
private _sendFrame(connectionIdBinary: Buffer, data: Buffer, callback?: EndWriteCallback) {
// For all sends other than the last one, only invoke the callback if it failed.
// Also, only invoke the callback at most once.
let hasInvokedCallback = false;
const finalCallback: EndWriteCallback = callback && (error => {
if (!hasInvokedCallback) {
hasInvokedCallback = true;
callback(error);
}
});
const notFinalCallback: EndWriteCallback = callback && (error => {
if (error) {
finalCallback(error);
}
});
// The amount of data we're writing might exceed MaxFrameBodyLength, so split into frames as needed.
// Note that we always send at least one frame, even if it's empty (because that's the close-virtual-connection signal).
// If needed, this could be changed to send frames asynchronously, so that large sends could proceed in parallel
// (though that would involve making a clone of 'data', to avoid the risk of it being mutated during the send).
let bytesSent = 0;
do {
const nextFrameBodyLength = Math.min(MaxFrameBodyLength, data.length - bytesSent);
const isFinalChunk = (bytesSent + nextFrameBodyLength) === data.length;
this._socket.write(connectionIdBinary, notFinalCallback);
this._sendInt32LE(nextFrameBodyLength, notFinalCallback);
this._socket.write(data.slice(bytesSent, bytesSent + nextFrameBodyLength), isFinalChunk ? finalCallback : notFinalCallback);
bytesSent += nextFrameBodyLength;
} while (bytesSent < data.length);
}
/**
* Sends a number serialized in the correct format for .NET to receive as a System.Int32
*/
private _sendInt32LE(value: number, callback?: EndWriteCallback) {
const buf = new Buffer(4);
buf.writeInt32LE(value, 0);
this._socket.write(buf, callback);
}
private _onVirtualConnectionWasClosed(id: string) {
if (this._virtualConnections.hasOwnProperty(id)) {
delete this._virtualConnections[id];
}
}
}
interface FrameHeader {
connectionIdBinary: Buffer;
connectionIdString: string;
bodyLength: number;
}

View File

@@ -0,0 +1,11 @@
{
"compilerOptions": {
"target": "es3",
"module": "commonjs",
"moduleResolution": "node",
"types": ["node"]
},
"exclude": [
"node_modules"
]
}

View File

@@ -0,0 +1,150 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Threading.Tasks.Dataflow;
namespace Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections
{
/// <summary>
/// A virtual read/write connection, typically to a remote process. Multiple virtual connections can be
/// multiplexed over a single physical connection (e.g., a named pipe, domain socket, or TCP socket).
/// </summary>
internal class VirtualConnection : Stream
{
private readonly static Task CompletedTask = Task.CompletedTask;
private VirtualConnectionClient _host;
private readonly BufferBlock<byte[]> _receivedDataQueue = new BufferBlock<byte[]>();
private ArraySegment<byte> _receivedDataNotYetUsed;
private bool _wasClosedByRemote;
private bool _isDisposed;
public VirtualConnection(long id, VirtualConnectionClient host)
{
Id = id;
_host = host;
}
public long Id { get; }
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return true; } }
public override long Length
{
get { throw new NotImplementedException(); }
}
public override long Position
{
get { throw new NotImplementedException(); }
set { throw new NotImplementedException(); }
}
public override void Flush()
{
// We're auto-flushing, so this is a no-op.
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_wasClosedByRemote)
{
return 0;
}
var bytesRead = 0;
while (true)
{
// Pull as many applicable bytes as we can out of receivedDataNotYetUsed, then update its offset/length
int bytesToExtract = Math.Min(count - bytesRead, _receivedDataNotYetUsed.Count);
if (bytesToExtract > 0)
{
Buffer.BlockCopy(_receivedDataNotYetUsed.Array, _receivedDataNotYetUsed.Offset, buffer, bytesRead, bytesToExtract);
_receivedDataNotYetUsed = new ArraySegment<byte>(_receivedDataNotYetUsed.Array, _receivedDataNotYetUsed.Offset + bytesToExtract, _receivedDataNotYetUsed.Count - bytesToExtract);
bytesRead += bytesToExtract;
}
// If we've completely filled the output buffer, we're done
if (bytesRead == count)
{
return bytesRead;
}
// We haven't yet filled the output buffer, so we must have exhausted receivedDataNotYetUsed instead.
// We want to get the next block of data from the underlying queue.
byte[] nextReceivedBlock;
if (bytesRead > 0)
{
if (!_receivedDataQueue.TryReceive(null, out nextReceivedBlock))
{
// No more data is available synchronously, and we already have some data, so we can stop now
return bytesRead;
}
}
else
{
// Since we don't yet have anything, wait for the underlying source
nextReceivedBlock = await _receivedDataQueue.ReceiveAsync(cancellationToken);
}
if (nextReceivedBlock.Length == 0)
{
// A zero-length block signals that the remote regards this virtual connection as closed
_wasClosedByRemote = true;
return bytesRead;
}
else
{
// We got some more data, so can continue trying to fill the output buffer
_receivedDataNotYetUsed = new ArraySegment<byte>(nextReceivedBlock, 0, nextReceivedBlock.Length);
}
}
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_wasClosedByRemote)
{
throw new InvalidOperationException("The connection was already closed by the remote party");
}
return count > 0 ? _host.WriteAsync(Id, buffer, offset, count, cancellationToken) : CompletedTask;
}
public override int Read(byte[] buffer, int offset, int count)
{
return ReadAsync(buffer, offset, count, CancellationToken.None).Result;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
WriteAsync(buffer, offset, count, CancellationToken.None).Wait();
}
protected override void Dispose(bool disposing)
{
if (disposing && !_isDisposed)
{
_isDisposed = true;
_host.CloseInnerStream(Id, _wasClosedByRemote);
}
}
public async Task AddDataToQueue(byte[] data)
{
await _receivedDataQueue.SendAsync(data);
}
}
}

View File

@@ -0,0 +1,238 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections
{
/// <summary>
/// A callback that will be invoked if the <see cref="VirtualConnectionClient"/> encounters a read error.
/// </summary>
/// <param name="ex"></param>
public delegate void VirtualConnectionReadErrorHandler(Exception ex);
/// <summary>
/// Wraps an underlying physical read/write stream (e.g., named pipes, domain sockets, or TCP sockets) and
/// exposes an API for making 'virtual connections', which act as independent read/write streams.
/// Traffic over these virtual connections is multiplexed over the underlying physical stream. This is useful
/// for fast stream-based inter-process communication because it avoids the overhead of opening a new physical
/// connection each time a new communication channel is needed.
/// </summary>
internal class VirtualConnectionClient : IDisposable
{
internal const int MaxFrameBodyLength = 16 * 1024;
public event VirtualConnectionReadErrorHandler OnError;
private Stream _underlyingTransport;
private Dictionary<long, VirtualConnection> _activeInnerStreams;
private long _nextInnerStreamId;
private readonly SemaphoreSlim _streamWriterSemaphore = new SemaphoreSlim(1);
private readonly object _readControlLock = new object();
private Exception _readLoopExitedWithException;
private readonly CancellationTokenSource _disposalCancellatonToken = new CancellationTokenSource();
private bool _disposedValue = false;
public VirtualConnectionClient(Stream underlyingTransport)
{
_underlyingTransport = underlyingTransport;
_activeInnerStreams = new Dictionary<long, VirtualConnection>();
RunReadLoop();
}
public Stream OpenVirtualConnection()
{
// Improve discoverability of read-loop errors (in case the developer doesn't add an OnError listener)
ThrowIfReadLoopFailed();
var id = Interlocked.Increment(ref _nextInnerStreamId);
var newInnerStream = new VirtualConnection(id, this);
lock (_activeInnerStreams)
{
_activeInnerStreams.Add(id, newInnerStream);
}
return newInnerStream;
}
// It's async void because nothing waits for it to finish (it continues indefinitely). It signals any errors via
// a separate channel.
private async void RunReadLoop()
{
try
{
while (!_disposalCancellatonToken.IsCancellationRequested)
{
var remoteIsStillConnected = await ProcessNextFrameAsync();
if (!remoteIsStillConnected)
{
CloseAllActiveStreams();
}
}
}
catch (Exception ex)
{
// Not all underlying transports correctly honor cancellation tokens. For example,
// DomainSocketStreamTransport's ReadAsync ignores them, so we only know to stop
// the read loop when the underlying stream is disposed and then it throws ObjectDisposedException.
if (!(ex is TaskCanceledException || ex is ObjectDisposedException))
{
_readLoopExitedWithException = ex;
var evt = OnError;
if (evt != null)
{
evt(ex);
}
}
}
}
private async Task<bool> ProcessNextFrameAsync()
{
// First read frame header
var frameHeaderBuffer = await ReadExactLength(12);
if (frameHeaderBuffer == null)
{
return false; // Underlying stream was closed
}
// Parse frame header, then read the frame body
long streamId = BitConverter.ToInt64(frameHeaderBuffer, 0);
int frameBodyLength = BitConverter.ToInt32(frameHeaderBuffer, 8);
if (frameBodyLength < 0 || frameBodyLength > MaxFrameBodyLength)
{
throw new InvalidDataException("Illegal frame length: " + frameBodyLength);
}
var frameBody = await ReadExactLength(frameBodyLength);
if (frameBody == null)
{
return false; // Underlying stream was closed
}
// Dispatch the frame to the relevant inner stream
VirtualConnection innerStream;
lock (_activeInnerStreams)
{
_activeInnerStreams.TryGetValue(streamId, out innerStream);
}
if (innerStream != null)
{
await innerStream.AddDataToQueue(frameBody);
}
return true;
}
private async Task<byte[]> ReadExactLength(int lengthToRead) {
byte[] buffer = new byte[lengthToRead];
var totalBytesRead = 0;
var ct = _disposalCancellatonToken.Token;
while (totalBytesRead < lengthToRead)
{
var chunkLengthRead = await _underlyingTransport.ReadAsync(buffer, totalBytesRead, lengthToRead - totalBytesRead, ct);
if (chunkLengthRead == 0)
{
// Underlying stream was closed
return null;
}
totalBytesRead += chunkLengthRead;
}
return buffer;
}
private void CloseAllActiveStreams()
{
IList<VirtualConnection> innerStreamsCopy;
// Only hold the lock while cloning the list of inner streams. Release the lock before
// actually disposing them, because each 'dispose' call will try to take another lock
// so it can remove that inner stream from activeInnerStreams.
lock (_activeInnerStreams)
{
innerStreamsCopy = _activeInnerStreams.Values.ToList();
}
foreach (var stream in innerStreamsCopy)
{
stream.Dispose();
}
}
public void Dispose()
{
if (!_disposedValue)
{
_disposedValue = true;
_disposalCancellatonToken.Cancel(); // Stops the read loop
CloseAllActiveStreams();
}
}
public async Task WriteAsync(long innerStreamId, byte[] data, int offset, int count, CancellationToken cancellationToken)
{
// In case the amount of data to be sent exceeds the max frame length, split it into separate frames
// Note that we always send at least one frame, even if it's empty, because the zero-length frame is the signal to close a virtual connection
// (hence 'do..while' instead of just 'while').
int bytesWritten = 0;
do {
// Improve discoverability of read-loop errors (in case the developer doesn't add an OnError listener)
ThrowIfReadLoopFailed();
// Hold the write lock only for the time taken to send a single frame, not all frames, to allow large sends to be proceed in parallel
await _streamWriterSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
// Write stream ID, then length prefix, then chunk payload, then flush
var nextChunkBodyLength = Math.Min(MaxFrameBodyLength, count - bytesWritten);
await _underlyingTransport.WriteAsync(BitConverter.GetBytes(innerStreamId), 0, 8, cancellationToken).ConfigureAwait(false);
await _underlyingTransport.WriteAsync(BitConverter.GetBytes(nextChunkBodyLength), 0, 4, cancellationToken).ConfigureAwait(false);
if (nextChunkBodyLength > 0)
{
await _underlyingTransport.WriteAsync(data, offset + bytesWritten, nextChunkBodyLength, cancellationToken).ConfigureAwait(false);
bytesWritten += nextChunkBodyLength;
}
await _underlyingTransport.FlushAsync(cancellationToken).ConfigureAwait(false);
}
finally
{
_streamWriterSemaphore.Release();
}
} while (bytesWritten < count);
}
public void CloseInnerStream(long innerStreamId, bool isAlreadyClosedRemotely)
{
lock (_activeInnerStreams)
{
if (_activeInnerStreams.ContainsKey(innerStreamId))
{
_activeInnerStreams.Remove(innerStreamId);
}
}
if (!isAlreadyClosedRemotely) {
// Also notify the remote that this innerstream is closed
WriteAsync(innerStreamId, new byte[0], 0, 0, new CancellationToken()).Wait();
}
}
private void ThrowIfReadLoopFailed()
{
if (_readLoopExitedWithException != null)
{
throw new AggregateException("The connection failed - see InnerException for details.", _readLoopExitedWithException);
}
}
}
}

View File

@@ -0,0 +1,109 @@
{
"AssemblyIdentity": "Microsoft.AspNetCore.NodeServices.Sockets, Version=2.0.3.0, Culture=neutral, PublicKeyToken=adb9793829ddae60",
"Types": [
{
"Name": "Microsoft.AspNetCore.NodeServices.Sockets.NodeServicesOptionsExtensions",
"Visibility": "Public",
"Kind": "Class",
"Abstract": true,
"Static": true,
"Sealed": true,
"ImplementedInterfaces": [],
"Members": [
{
"Kind": "Method",
"Name": "UseSocketHosting",
"Parameters": [
{
"Name": "options",
"Type": "Microsoft.AspNetCore.NodeServices.NodeServicesOptions"
}
],
"ReturnType": "System.Void",
"Static": true,
"Extension": true,
"Visibility": "Public",
"GenericParameter": []
}
],
"GenericParameters": []
},
{
"Name": "Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections.VirtualConnectionReadErrorHandler",
"Visibility": "Public",
"Kind": "Class",
"Sealed": true,
"BaseType": "System.MulticastDelegate",
"ImplementedInterfaces": [],
"Members": [
{
"Kind": "Method",
"Name": "Invoke",
"Parameters": [
{
"Name": "ex",
"Type": "System.Exception"
}
],
"ReturnType": "System.Void",
"Virtual": true,
"Visibility": "Public",
"GenericParameter": []
},
{
"Kind": "Method",
"Name": "BeginInvoke",
"Parameters": [
{
"Name": "ex",
"Type": "System.Exception"
},
{
"Name": "callback",
"Type": "System.AsyncCallback"
},
{
"Name": "object",
"Type": "System.Object"
}
],
"ReturnType": "System.IAsyncResult",
"Virtual": true,
"Visibility": "Public",
"GenericParameter": []
},
{
"Kind": "Method",
"Name": "EndInvoke",
"Parameters": [
{
"Name": "result",
"Type": "System.IAsyncResult"
}
],
"ReturnType": "System.Void",
"Virtual": true,
"Visibility": "Public",
"GenericParameter": []
},
{
"Kind": "Constructor",
"Name": ".ctor",
"Parameters": [
{
"Name": "object",
"Type": "System.Object"
},
{
"Name": "method",
"Type": "System.IntPtr"
}
],
"Visibility": "Public",
"GenericParameter": []
}
],
"GenericParameters": []
}
]
}

View File

@@ -0,0 +1,18 @@
{
"name": "nodeservices.sockets",
"version": "1.0.0",
"description": "This is not really an NPM package and will not be published. This file exists only to reference compilation tools.",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "./node_modules/.bin/webpack"
},
"author": "Microsoft",
"license": "Apache-2.0",
"devDependencies": {
"@types/node": "^6.0.42",
"ts-loader": "^0.8.2",
"typescript": "^2.0.0",
"webpack": "^1.13.1"
}
}

View File

@@ -0,0 +1,20 @@
module.exports = {
target: 'node',
externals: ['fs', 'net', 'events', 'readline', 'stream'],
resolve: {
extensions: [ '.ts' ]
},
module: {
loaders: [
{ test: /\.ts$/, loader: 'ts-loader' },
]
},
entry: {
'entrypoint-socket': ['./TypeScript/SocketNodeInstanceEntryPoint'],
},
output: {
libraryTarget: 'commonjs',
path: './Content/Node',
filename: '[name].js'
}
};

View File

@@ -0,0 +1,3 @@
/bin/
/node_modules/
yarn.lock

View File

@@ -0,0 +1,25 @@
using System;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Supplies INodeServices instances.
/// </summary>
public static class NodeServicesFactory
{
/// <summary>
/// Create an <see cref="INodeServices"/> instance according to the supplied options.
/// </summary>
/// <param name="options">Options for creating the <see cref="INodeServices"/> instance.</param>
/// <returns>An <see cref="INodeServices"/> instance.</returns>
public static INodeServices CreateNodeServices(NodeServicesOptions options)
{
if (options == null)
{
throw new ArgumentNullException(nameof (options));
}
return new NodeServicesImpl(options.NodeInstanceFactory);
}
}
}

View File

@@ -0,0 +1,114 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using Microsoft.AspNetCore.NodeServices.HostingModels;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Logging.Console;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Describes options used to configure an <see cref="INodeServices"/> instance.
/// </summary>
public class NodeServicesOptions
{
internal const string TimeoutConfigPropertyName = nameof(InvocationTimeoutMilliseconds);
private const int DefaultInvocationTimeoutMilliseconds = 60 * 1000;
private const string LogCategoryName = "Microsoft.AspNetCore.NodeServices";
private static readonly string[] DefaultWatchFileExtensions = { ".js", ".jsx", ".ts", ".tsx", ".json", ".html" };
/// <summary>
/// Creates a new instance of <see cref="NodeServicesOptions"/>.
/// </summary>
/// <param name="serviceProvider">The <see cref="IServiceProvider"/>.</param>
public NodeServicesOptions(IServiceProvider serviceProvider)
{
if (serviceProvider == null)
{
throw new ArgumentNullException(nameof (serviceProvider));
}
EnvironmentVariables = new Dictionary<string, string>();
InvocationTimeoutMilliseconds = DefaultInvocationTimeoutMilliseconds;
WatchFileExtensions = (string[])DefaultWatchFileExtensions.Clone();
var hostEnv = serviceProvider.GetService<IHostingEnvironment>();
if (hostEnv != null)
{
// In an ASP.NET environment, we can use the IHostingEnvironment data to auto-populate a few
// things that you'd otherwise have to specify manually
ProjectPath = hostEnv.ContentRootPath;
EnvironmentVariables["NODE_ENV"] = hostEnv.IsDevelopment() ? "development" : "production"; // De-facto standard values for Node
}
else
{
ProjectPath = Directory.GetCurrentDirectory();
}
var applicationLifetime = serviceProvider.GetService<IApplicationLifetime>();
if (applicationLifetime != null)
{
ApplicationStoppingToken = applicationLifetime.ApplicationStopping;
}
// If the DI system gives us a logger, use it. Otherwise, set up a default one.
var loggerFactory = serviceProvider.GetService<ILoggerFactory>();
NodeInstanceOutputLogger = loggerFactory != null
? loggerFactory.CreateLogger(LogCategoryName)
: new ConsoleLogger(LogCategoryName, null, false);
// By default, we use this package's built-in out-of-process-via-HTTP hosting/transport
this.UseHttpHosting();
}
/// <summary>
/// Specifies how to construct Node.js instances. An <see cref="INodeInstance"/> encapsulates all details about
/// how Node.js instances are launched and communicated with. A new <see cref="INodeInstance"/> will be created
/// automatically if the previous instance has terminated (e.g., because a source file changed).
/// </summary>
public Func<INodeInstance> NodeInstanceFactory { get; set; }
/// <summary>
/// If set, overrides the path to the root of your application. This path is used when locating Node.js modules relative to your project.
/// </summary>
public string ProjectPath { get; set; }
/// <summary>
/// If set, the Node.js instance should restart when any matching file on disk within your project changes.
/// </summary>
public string[] WatchFileExtensions { get; set; }
/// <summary>
/// The Node.js instance's stdout/stderr will be redirected to this <see cref="ILogger"/>.
/// </summary>
public ILogger NodeInstanceOutputLogger { get; set; }
/// <summary>
/// If true, the Node.js instance will accept incoming V8 debugger connections (e.g., from node-inspector).
/// </summary>
public bool LaunchWithDebugging { get; set; }
/// <summary>
/// If <see cref="LaunchWithDebugging"/> is true, the Node.js instance will listen for V8 debugger connections on this port.
/// </summary>
public int DebuggingPort { get; set; }
/// <summary>
/// If set, starts the Node.js instance with the specified environment variables.
/// </summary>
public IDictionary<string, string> EnvironmentVariables { get; set; }
/// <summary>
/// Specifies the maximum duration, in milliseconds, that your .NET code should wait for Node.js RPC calls to return.
/// </summary>
public int InvocationTimeoutMilliseconds { get; set; }
/// <summary>
/// A token that indicates when the host application is stopping.
/// </summary>
public CancellationToken ApplicationStoppingToken { get; set; }
}
}

View File

@@ -0,0 +1,41 @@
using System;
using Microsoft.AspNetCore.NodeServices;
namespace Microsoft.Extensions.DependencyInjection
{
/// <summary>
/// Extension methods for setting up NodeServices in an <see cref="IServiceCollection" />.
/// </summary>
public static class NodeServicesServiceCollectionExtensions
{
/// <summary>
/// Adds NodeServices support to the <paramref name="serviceCollection"/>.
/// </summary>
/// <param name="serviceCollection">The <see cref="IServiceCollection"/>.</param>
public static void AddNodeServices(this IServiceCollection serviceCollection)
=> AddNodeServices(serviceCollection, _ => {});
/// <summary>
/// Adds NodeServices support to the <paramref name="serviceCollection"/>.
/// </summary>
/// <param name="serviceCollection">The <see cref="IServiceCollection"/>.</param>
/// <param name="setupAction">A callback that will be invoked to populate the <see cref="NodeServicesOptions"/>.</param>
public static void AddNodeServices(this IServiceCollection serviceCollection, Action<NodeServicesOptions> setupAction)
{
if (setupAction == null)
{
throw new ArgumentNullException(nameof (setupAction));
}
serviceCollection.AddSingleton(typeof(INodeServices), serviceProvider =>
{
// First we let NodeServicesOptions take its defaults from the IServiceProvider,
// then we let the developer override those options
var options = new NodeServicesOptions(serviceProvider);
setupAction(options);
return NodeServicesFactory.CreateNodeServices(options);
});
}
}
}

View File

@@ -0,0 +1,361 @@
(function(e, a) { for(var i in a) e[i] = a[i]; }(exports, /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(1);
/***/ },
/* 1 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
__webpack_require__(2);
__webpack_require__(4);
var http = __webpack_require__(5);
var path = __webpack_require__(3);
var ArgsUtil_1 = __webpack_require__(6);
var ExitWhenParentExits_1 = __webpack_require__(7);
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
var dynamicRequire = eval('require');
var server = http.createServer(function (req, res) {
readRequestBodyAsJson(req, function (bodyJson) {
var hasSentResult = false;
var callback = function (errorValue, successValue) {
if (!hasSentResult) {
hasSentResult = true;
if (errorValue) {
respondWithError(res, errorValue);
}
else if (typeof successValue !== 'string') {
// Arbitrary object/number/etc - JSON-serialize it
var successValueJson = void 0;
try {
successValueJson = JSON.stringify(successValue);
}
catch (ex) {
// JSON serialization error - pass it back to .NET
respondWithError(res, ex);
return;
}
res.setHeader('Content-Type', 'application/json');
res.end(successValueJson);
}
else {
// String - can bypass JSON-serialization altogether
res.setHeader('Content-Type', 'text/plain');
res.end(successValue);
}
}
};
// Support streamed responses
Object.defineProperty(callback, 'stream', {
enumerable: true,
get: function () {
if (!hasSentResult) {
hasSentResult = true;
res.setHeader('Content-Type', 'application/octet-stream');
}
return res;
}
});
try {
var resolvedPath = path.resolve(process.cwd(), bodyJson.moduleName);
var invokedModule = dynamicRequire(resolvedPath);
var func = bodyJson.exportedFunctionName ? invokedModule[bodyJson.exportedFunctionName] : invokedModule;
if (!func) {
throw new Error('The module "' + resolvedPath + '" has no export named "' + bodyJson.exportedFunctionName + '"');
}
func.apply(null, [callback].concat(bodyJson.args));
}
catch (synchronousException) {
callback(synchronousException, null);
}
});
});
var parsedArgs = ArgsUtil_1.parseArgs(process.argv);
var requestedPortOrZero = parsedArgs.port || 0; // 0 means 'let the OS decide'
server.listen(requestedPortOrZero, 'localhost', function () {
// Signal to HttpNodeHost which port it should make its HTTP connections on
console.log('[Microsoft.AspNetCore.NodeServices.HttpNodeHost:Listening on port ' + server.address().port + '\]');
// Signal to the NodeServices base class that we're ready to accept invocations
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
ExitWhenParentExits_1.exitWhenParentExits(parseInt(parsedArgs.parentPid), /* ignoreSigint */ true);
function readRequestBodyAsJson(request, callback) {
var requestBodyAsString = '';
request.on('data', function (chunk) { requestBodyAsString += chunk; });
request.on('end', function () { callback(JSON.parse(requestBodyAsString)); });
}
function respondWithError(res, errorValue) {
res.statusCode = 500;
res.end(JSON.stringify({
errorMessage: errorValue.message || errorValue,
errorDetails: errorValue.stack || null
}));
}
/***/ },
/* 2 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var path = __webpack_require__(3);
var startsWith = function (str, prefix) { return str.substring(0, prefix.length) === prefix; };
var appRootDir = process.cwd();
function patchedLStat(pathToStatLong, fsReqWrap) {
try {
// If the lstat completes without errors, we don't modify its behavior at all
return origLStat.apply(this, arguments);
}
catch (ex) {
var shouldOverrideError = startsWith(ex.message, 'EPERM') // It's a permissions error
&& typeof appRootDirLong === 'string'
&& startsWith(appRootDirLong, pathToStatLong) // ... for an ancestor directory
&& ex.stack.indexOf('Object.realpathSync ') >= 0; // ... during symlink resolution
if (shouldOverrideError) {
// Fake the result to give the same result as an 'lstat' on the app root dir.
// This stops Node failing to load modules just because it doesn't know whether
// ancestor directories are symlinks or not. If there's a genuine file
// permissions issue, it will still surface later when Node actually
// tries to read the file.
return origLStat.call(this, appRootDir, fsReqWrap);
}
else {
// In any other case, preserve the original error
throw ex;
}
}
}
;
// It's only necessary to apply this workaround on Windows
var appRootDirLong = null;
var origLStat = null;
if (/^win/.test(process.platform)) {
try {
// Get the app's root dir in Node's internal "long" format (e.g., \\?\C:\dir\subdir)
appRootDirLong = path._makeLong(appRootDir);
// Actually apply the patch, being as defensive as possible
var bindingFs = process.binding('fs');
origLStat = bindingFs.lstat;
if (typeof origLStat === 'function') {
bindingFs.lstat = patchedLStat;
}
}
catch (ex) {
}
}
/***/ },
/* 3 */
/***/ function(module, exports) {
module.exports = require("path");
/***/ },
/* 4 */
/***/ function(module, exports) {
// When Node writes to stdout/strerr, we capture that and convert the lines into calls on the
// active .NET ILogger. But by default, stdout/stderr don't have any way of distinguishing
// linebreaks inside log messages from the linebreaks that delimit separate log messages,
// so multiline strings will end up being written to the ILogger as multiple independent
// log messages. This makes them very hard to make sense of, especially when they represent
// something like stack traces.
//
// To fix this, we intercept stdout/stderr writes, and replace internal linebreaks with a
// marker token. When .NET receives the lines, it converts the marker tokens back to regular
// linebreaks within the logged messages.
//
// Note that it's better to do the interception at the stdout/stderr level, rather than at
// the console.log/console.error (etc.) level, because this takes place after any native
// message formatting has taken place (e.g., inserting values for % placeholders).
var findInternalNewlinesRegex = /\n(?!$)/g;
var encodedNewline = '__ns_newline__';
encodeNewlinesWrittenToStream(process.stdout);
encodeNewlinesWrittenToStream(process.stderr);
function encodeNewlinesWrittenToStream(outputStream) {
var origWriteFunction = outputStream.write;
outputStream.write = function (value) {
// Only interfere with the write if it's definitely a string
if (typeof value === 'string') {
var argsClone = Array.prototype.slice.call(arguments, 0);
argsClone[0] = encodeNewlinesInString(value);
origWriteFunction.apply(this, argsClone);
}
else {
origWriteFunction.apply(this, arguments);
}
};
}
function encodeNewlinesInString(str) {
return str.replace(findInternalNewlinesRegex, encodedNewline);
}
/***/ },
/* 5 */
/***/ function(module, exports) {
module.exports = require("http");
/***/ },
/* 6 */
/***/ function(module, exports) {
"use strict";
function parseArgs(args) {
// Very simplistic parsing which is sufficient for the cases needed. We don't want to bring in any external
// dependencies (such as an args-parsing library) to this file.
var result = {};
var currentKey = null;
args.forEach(function (arg) {
if (arg.indexOf('--') === 0) {
var argName = arg.substring(2);
result[argName] = undefined;
currentKey = argName;
}
else if (currentKey) {
result[currentKey] = arg;
currentKey = null;
}
});
return result;
}
exports.parseArgs = parseArgs;
/***/ },
/* 7 */
/***/ function(module, exports) {
/*
In general, we want the Node child processes to be terminated as soon as the parent .NET processes exit,
because we have no further use for them. If the .NET process shuts down gracefully, it will run its
finalizers, one of which (in OutOfProcessNodeInstance.cs) will kill its associated Node process immediately.
But if the .NET process is terminated forcefully (e.g., on Linux/OSX with 'kill -9'), then it won't have
any opportunity to shut down its child processes, and by default they will keep running. In this case, it's
up to the child process to detect this has happened and terminate itself.
There are many possible approaches to detecting when a parent process has exited, most of which behave
differently between Windows and Linux/OS X:
- On Windows, the parent process can mark its child as being a 'job' that should auto-terminate when
the parent does (http://stackoverflow.com/a/4657392). Not cross-platform.
- The child Node process can get a callback when the parent disconnects (process.on('disconnect', ...)).
But despite http://stackoverflow.com/a/16487966, no callback fires in any case I've tested (Windows / OS X).
- The child Node process can get a callback when its stdin/stdout are disconnected, as described at
http://stackoverflow.com/a/15693934. This works well on OS X, but calling stdout.resume() on Windows
causes the process to terminate prematurely.
- I don't know why, but on Windows, it's enough to invoke process.stdin.resume(). For some reason this causes
the child Node process to exit as soon as the parent one does, but I don't see this documented anywhere.
- You can poll to see if the parent process, or your stdin/stdout connection to it, is gone
- You can directly pass a parent process PID to the child, and then have the child poll to see if it's
still running (e.g., using process.kill(pid, 0), which doesn't kill it but just tests whether it exists,
as per https://nodejs.org/api/process.html#process_process_kill_pid_signal)
- Or, on each poll, you can try writing to process.stdout. If the parent has died, then this will throw.
However I don't see this documented anywhere. It would be nice if you could just poll for whether or not
process.stdout is still connected (without actually writing to it) but I haven't found any property whose
value changes until you actually try to write to it.
Of these, the only cross-platform approach that is actually documented as a valid strategy is simply polling
to check whether the parent PID is still running. So that's what we do here.
*/
"use strict";
var pollIntervalMs = 1000;
function exitWhenParentExits(parentPid, ignoreSigint) {
setInterval(function () {
if (!processExists(parentPid)) {
// Can't log anything at this point, because out stdout was connected to the parent,
// but the parent is gone.
process.exit();
}
}, pollIntervalMs);
if (ignoreSigint) {
// Pressing ctrl+c in the terminal sends a SIGINT to all processes in the foreground process tree.
// By default, the Node process would then exit before the .NET process, because ASP.NET implements
// a delayed shutdown to allow ongoing requests to complete.
//
// This is problematic, because if Node exits first, the CopyToAsync code in ConditionalProxyMiddleware
// will experience a read fault, and logs a huge load of errors. Fortunately, since the Node process is
// already set up to shut itself down if it detects the .NET process is terminated, all we have to do is
// ignore the SIGINT. The Node process will then terminate automatically after the .NET process does.
//
// A better solution would be to have WebpackDevMiddleware listen for SIGINT and gracefully close any
// ongoing EventSource connections before letting the Node process exit, independently of the .NET
// process exiting. However, doing this well in general is very nontrivial (see all the discussion at
// https://github.com/nodejs/node/issues/2642).
process.on('SIGINT', function () {
console.log('Received SIGINT. Waiting for .NET process to exit...');
});
}
}
exports.exitWhenParentExits = exitWhenParentExits;
function processExists(pid) {
try {
// Sending signal 0 - on all platforms - tests whether the process exists. As long as it doesn't
// throw, that means it does exist.
process.kill(pid, 0);
return true;
}
catch (ex) {
// If the reason for the error is that we don't have permission to ask about this process,
// report that as a separate problem.
if (ex.code === 'EPERM') {
throw new Error("Attempted to check whether process " + pid + " was running, but got a permissions error.");
}
return false;
}
}
/***/ }
/******/ ])));

View File

@@ -0,0 +1,151 @@
using System;
using System.IO;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// A specialisation of the OutOfProcessNodeInstance base class that uses HTTP to perform RPC invocations.
///
/// The Node child process starts an HTTP listener on an arbitrary available port (except where a nonzero
/// port number is specified as a constructor parameter), and signals which port was selected using the same
/// input/output-based mechanism that the base class uses to determine when the child process is ready to
/// accept RPC invocations.
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.HostingModels.OutOfProcessNodeInstance" />
internal class HttpNodeInstance : OutOfProcessNodeInstance
{
private static readonly Regex PortMessageRegex =
new Regex(@"^\[Microsoft.AspNetCore.NodeServices.HttpNodeHost:Listening on port (\d+)\]$");
private static readonly JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver(),
TypeNameHandling = TypeNameHandling.None
};
private readonly HttpClient _client;
private bool _disposed;
private int _portNumber;
public HttpNodeInstance(NodeServicesOptions options, int port = 0)
: base(
EmbeddedResourceReader.Read(
typeof(HttpNodeInstance),
"/Content/Node/entrypoint-http.js"),
options.ProjectPath,
options.WatchFileExtensions,
MakeCommandLineOptions(port),
options.ApplicationStoppingToken,
options.NodeInstanceOutputLogger,
options.EnvironmentVariables,
options.InvocationTimeoutMilliseconds,
options.LaunchWithDebugging,
options.DebuggingPort)
{
_client = new HttpClient();
_client.Timeout = TimeSpan.FromMilliseconds(options.InvocationTimeoutMilliseconds + 1000);
}
private static string MakeCommandLineOptions(int port)
{
return $"--port {port}";
}
protected override async Task<T> InvokeExportAsync<T>(
NodeInvocationInfo invocationInfo, CancellationToken cancellationToken)
{
var payloadJson = JsonConvert.SerializeObject(invocationInfo, jsonSerializerSettings);
var payload = new StringContent(payloadJson, Encoding.UTF8, "application/json");
var response = await _client.PostAsync("http://localhost:" + _portNumber, payload, cancellationToken);
if (!response.IsSuccessStatusCode)
{
// Unfortunately there's no true way to cancel ReadAsStringAsync calls, hence AbandonIfCancelled
var responseJson = await response.Content.ReadAsStringAsync().OrThrowOnCancellation(cancellationToken);
var responseError = JsonConvert.DeserializeObject<RpcJsonResponse>(responseJson, jsonSerializerSettings);
throw new NodeInvocationException(responseError.ErrorMessage, responseError.ErrorDetails);
}
var responseContentType = response.Content.Headers.ContentType;
switch (responseContentType.MediaType)
{
case "text/plain":
// String responses can skip JSON encoding/decoding
if (typeof(T) != typeof(string))
{
throw new ArgumentException(
"Node module responded with non-JSON string. This cannot be converted to the requested generic type: " +
typeof(T).FullName);
}
var responseString = await response.Content.ReadAsStringAsync().OrThrowOnCancellation(cancellationToken);
return (T)(object)responseString;
case "application/json":
var responseJson = await response.Content.ReadAsStringAsync().OrThrowOnCancellation(cancellationToken);
return JsonConvert.DeserializeObject<T>(responseJson, jsonSerializerSettings);
case "application/octet-stream":
// Streamed responses have to be received as System.IO.Stream instances
if (typeof(T) != typeof(Stream) && typeof(T) != typeof(object))
{
throw new ArgumentException(
"Node module responded with binary stream. This cannot be converted to the requested generic type: " +
typeof(T).FullName + ". Instead you must use the generic type System.IO.Stream.");
}
return (T)(object)(await response.Content.ReadAsStreamAsync().OrThrowOnCancellation(cancellationToken));
default:
throw new InvalidOperationException("Unexpected response content type: " + responseContentType.MediaType);
}
}
protected override void OnOutputDataReceived(string outputData)
{
// Watch for "port selected" messages, and when observed, store the port number
// so we can use it when making HTTP requests. The child process will always send
// one of these messages before it sends a "ready for connections" message.
var match = _portNumber != 0 ? null : PortMessageRegex.Match(outputData);
if (match != null && match.Success)
{
_portNumber = int.Parse(match.Groups[1].Captures[0].Value);
}
else
{
base.OnOutputDataReceived(outputData);
}
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
if (!_disposed)
{
if (disposing)
{
_client.Dispose();
}
_disposed = true;
}
}
#pragma warning disable 649 // These properties are populated via JSON deserialization
private class RpcJsonResponse
{
public string ErrorMessage { get; set; }
public string ErrorDetails { get; set; }
}
#pragma warning restore 649
}
}

View File

@@ -0,0 +1,23 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Represents an instance of Node.js to which Remote Procedure Calls (RPC) may be sent.
/// </summary>
public interface INodeInstance : IDisposable
{
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportNameOrNull">If set, specifies the CommonJS export to be invoked. If not set, the module's default CommonJS export itself must be a function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeExportAsync<T>(CancellationToken cancellationToken, string moduleName, string exportNameOrNull, params object[] args);
}
}

View File

@@ -0,0 +1,55 @@
using System;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Represents an exception caused by invoking Node.js code.
/// </summary>
public class NodeInvocationException : Exception
{
/// <summary>
/// If true, indicates that the invocation failed because the Node.js instance could not be reached. For example,
/// it might have already shut down or previously crashed.
/// </summary>
public bool NodeInstanceUnavailable { get; private set; }
/// <summary>
/// If true, indicates that even though the invocation failed because the Node.js instance could not be reached
/// or needs to be restarted, that Node.js instance may remain alive for a period in order to complete any
/// outstanding requests.
/// </summary>
public bool AllowConnectionDraining { get; private set;}
/// <summary>
/// Creates a new instance of <see cref="NodeInvocationException"/>.
/// </summary>
/// <param name="message">A description of the exception.</param>
/// <param name="details">Additional information, such as a Node.js stack trace, representing the exception.</param>
public NodeInvocationException(string message, string details)
: base(message + Environment.NewLine + details)
{
}
/// <summary>
/// Creates a new instance of <see cref="NodeInvocationException"/>.
/// </summary>
/// <param name="message">A description of the exception.</param>
/// <param name="details">Additional information, such as a Node.js stack trace, representing the exception.</param>
/// <param name="nodeInstanceUnavailable">Specifies a value for the <see cref="NodeInstanceUnavailable"/> flag.</param>
/// <param name="allowConnectionDraining">Specifies a value for the <see cref="AllowConnectionDraining"/> flag.</param>
public NodeInvocationException(string message, string details, bool nodeInstanceUnavailable, bool allowConnectionDraining)
: this(message, details)
{
// Reject a meaningless combination of flags
if (allowConnectionDraining && !nodeInstanceUnavailable)
{
throw new ArgumentException(
$"The '${ nameof(allowConnectionDraining) }' parameter cannot be true " +
$"unless the '${ nameof(nodeInstanceUnavailable) }' parameter is also true.");
}
NodeInstanceUnavailable = nodeInstanceUnavailable;
AllowConnectionDraining = allowConnectionDraining;
}
}
}

View File

@@ -0,0 +1,24 @@
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Describes an RPC call sent from .NET code to Node.js code.
/// </summary>
public class NodeInvocationInfo
{
/// <summary>
/// Specifies the path to the Node.js module (i.e., .js file) relative to the project root.
/// </summary>
public string ModuleName { get; set; }
/// <summary>
/// If set, specifies the name of CommonJS function export to be invoked.
/// If not set, the Node.js module's default export must itself be a function to be invoked.
/// </summary>
public string ExportedFunctionName { get; set; }
/// <summary>
/// A sequence of JSON-serializable arguments to be passed to the Node.js function being invoked.
/// </summary>
public object[] Args { get; set; }
}
}

View File

@@ -0,0 +1,17 @@
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Extension methods that help with populating a <see cref="NodeServicesOptions"/> object.
/// </summary>
public static class NodeServicesOptionsExtensions
{
/// <summary>
/// Configures the <see cref="INodeServices"/> service so that it will use out-of-process
/// Node.js instances and perform RPC calls over HTTP.
/// </summary>
public static void UseHttpHosting(this NodeServicesOptions options)
{
options.NodeInstanceFactory = () => new HttpNodeInstance(options);
}
}
}

View File

@@ -0,0 +1,475 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Class responsible for launching a Node child process on the local machine, determining when it is ready to
/// accept invocations, detecting if it dies on its own, and finally terminating it on disposal.
///
/// This abstract base class uses the input/output streams of the child process to perform a simple handshake
/// to determine when the child process is ready to accept invocations. This is agnostic to the mechanism that
/// derived classes use to actually perform the invocations (e.g., they could use HTTP-RPC, or a binary TCP
/// protocol, or any other RPC-type mechanism).
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.HostingModels.INodeInstance" />
public abstract class OutOfProcessNodeInstance : INodeInstance
{
/// <summary>
/// The <see cref="ILogger"/> to which the Node.js instance's stdout/stderr is being redirected.
/// </summary>
protected readonly ILogger OutputLogger;
private const string ConnectionEstablishedMessage = "[Microsoft.AspNetCore.NodeServices:Listening]";
private readonly TaskCompletionSource<object> _connectionIsReadySource = new TaskCompletionSource<object>();
private bool _disposed;
private readonly StringAsTempFile _entryPointScript;
private FileSystemWatcher _fileSystemWatcher;
private int _invocationTimeoutMilliseconds;
private bool _launchWithDebugging;
private readonly Process _nodeProcess;
private int? _nodeDebuggingPort;
private bool _nodeProcessNeedsRestart;
private readonly string[] _watchFileExtensions;
/// <summary>
/// Creates a new instance of <see cref="OutOfProcessNodeInstance"/>.
/// </summary>
/// <param name="entryPointScript">The path to the entry point script that the Node instance should load and execute.</param>
/// <param name="projectPath">The root path of the current project. This is used when resolving Node.js module paths relative to the project root.</param>
/// <param name="watchFileExtensions">The filename extensions that should be watched within the project root. The Node instance will automatically shut itself down if any matching file changes.</param>
/// <param name="commandLineArguments">Additional command-line arguments to be passed to the Node.js instance.</param>
/// <param name="applicationStoppingToken">A token that indicates when the host application is stopping.</param>
/// <param name="nodeOutputLogger">The <see cref="ILogger"/> to which the Node.js instance's stdout/stderr (and other log information) should be written.</param>
/// <param name="environmentVars">Environment variables to be set on the Node.js process.</param>
/// <param name="invocationTimeoutMilliseconds">The maximum duration, in milliseconds, to wait for RPC calls to complete.</param>
/// <param name="launchWithDebugging">If true, passes a flag to the Node.js process telling it to accept V8 debugger connections.</param>
/// <param name="debuggingPort">If debugging is enabled, the Node.js process should listen for V8 debugger connections on this port.</param>
public OutOfProcessNodeInstance(
string entryPointScript,
string projectPath,
string[] watchFileExtensions,
string commandLineArguments,
CancellationToken applicationStoppingToken,
ILogger nodeOutputLogger,
IDictionary<string, string> environmentVars,
int invocationTimeoutMilliseconds,
bool launchWithDebugging,
int debuggingPort)
{
if (nodeOutputLogger == null)
{
throw new ArgumentNullException(nameof(nodeOutputLogger));
}
OutputLogger = nodeOutputLogger;
_entryPointScript = new StringAsTempFile(entryPointScript, applicationStoppingToken);
_invocationTimeoutMilliseconds = invocationTimeoutMilliseconds;
_launchWithDebugging = launchWithDebugging;
var startInfo = PrepareNodeProcessStartInfo(_entryPointScript.FileName, projectPath, commandLineArguments,
environmentVars, _launchWithDebugging, debuggingPort);
_nodeProcess = LaunchNodeProcess(startInfo);
_watchFileExtensions = watchFileExtensions;
_fileSystemWatcher = BeginFileWatcher(projectPath);
ConnectToInputOutputStreams();
}
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportNameOrNull">If set, specifies the CommonJS export to be invoked. If not set, the module's default CommonJS export itself must be a function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
public async Task<T> InvokeExportAsync<T>(
CancellationToken cancellationToken, string moduleName, string exportNameOrNull, params object[] args)
{
if (_nodeProcess.HasExited || _nodeProcessNeedsRestart)
{
// This special kind of exception triggers a transparent retry - NodeServicesImpl will launch
// a new Node instance and pass the invocation to that one instead.
// Note that if the Node process is listening for debugger connections, then we need it to shut
// down immediately and not stay open for connection draining (because if it did, the new Node
// instance wouldn't able to start, because the old one would still hold the debugging port).
var message = _nodeProcess.HasExited
? "The Node process has exited"
: "The Node process needs to restart";
throw new NodeInvocationException(
message,
details: null,
nodeInstanceUnavailable: true,
allowConnectionDraining: !_launchWithDebugging);
}
// Construct a new cancellation token that combines the supplied token with the configured invocation
// timeout. Technically we could avoid wrapping the cancellationToken if no timeout is configured,
// but that's not really a major use case, since timeouts are enabled by default.
using (var timeoutSource = new CancellationTokenSource())
using (var combinedCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutSource.Token))
{
if (_invocationTimeoutMilliseconds > 0)
{
timeoutSource.CancelAfter(_invocationTimeoutMilliseconds);
}
// By overwriting the supplied cancellation token, we ensure that it isn't accidentally used
// below. We only want to pass through the token that respects timeouts.
cancellationToken = combinedCancellationTokenSource.Token;
var connectionDidSucceed = false;
try
{
// Wait until the connection is established. This will throw if the connection fails to initialize,
// or if cancellation is requested first. Note that we can't really cancel the "establishing connection"
// task because that's shared with all callers, but we can stop waiting for it if this call is cancelled.
await _connectionIsReadySource.Task.OrThrowOnCancellation(cancellationToken);
connectionDidSucceed = true;
return await InvokeExportAsync<T>(new NodeInvocationInfo
{
ModuleName = moduleName,
ExportedFunctionName = exportNameOrNull,
Args = args
}, cancellationToken);
}
catch (TaskCanceledException)
{
if (timeoutSource.IsCancellationRequested)
{
// It was very common for developers to report 'TaskCanceledException' when encountering almost any
// trouble when using NodeServices. Now we have a default invocation timeout, and attempt to give
// a more descriptive exception message if it happens.
if (!connectionDidSucceed)
{
// This is very unlikely, but for debugging, it's still useful to differentiate it from the
// case below.
throw new NodeInvocationException(
$"Attempt to connect to Node timed out after {_invocationTimeoutMilliseconds}ms.",
string.Empty);
}
else
{
// Developers encounter this fairly often (if their Node code fails without invoking the callback,
// all that the .NET side knows is that the invocation eventually times out). Previously, this surfaced
// as a TaskCanceledException, but this led to a lot of issue reports. Now we throw the following
// descriptive error.
throw new NodeInvocationException(
$"The Node invocation timed out after {_invocationTimeoutMilliseconds}ms.",
$"You can change the timeout duration by setting the {NodeServicesOptions.TimeoutConfigPropertyName} "
+ $"property on {nameof(NodeServicesOptions)}.\n\n"
+ "The first debugging step is to ensure that your Node.js function always invokes the supplied "
+ "callback (or throws an exception synchronously), even if it encounters an error. Otherwise, "
+ "the .NET code has no way to know that it is finished or has failed."
);
}
}
else
{
throw;
}
}
}
}
/// <summary>
/// Disposes this instance.
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="invocationInfo">Specifies the Node.js function to be invoked and arguments to be passed to it.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
protected abstract Task<T> InvokeExportAsync<T>(
NodeInvocationInfo invocationInfo,
CancellationToken cancellationToken);
/// <summary>
/// Configures a <see cref="ProcessStartInfo"/> instance describing how to launch the Node.js process.
/// </summary>
/// <param name="entryPointFilename">The entrypoint JavaScript file that the Node.js process should execute.</param>
/// <param name="projectPath">The root path of the project. This is used when locating Node.js modules relative to the project root.</param>
/// <param name="commandLineArguments">Command-line arguments to be passed to the Node.js process.</param>
/// <param name="environmentVars">Environment variables to be set on the Node.js process.</param>
/// <param name="launchWithDebugging">If true, passes a flag to the Node.js process telling it to accept V8 Inspector connections.</param>
/// <param name="debuggingPort">If debugging is enabled, the Node.js process should listen for V8 Inspector connections on this port.</param>
/// <returns></returns>
protected virtual ProcessStartInfo PrepareNodeProcessStartInfo(
string entryPointFilename, string projectPath, string commandLineArguments,
IDictionary<string, string> environmentVars, bool launchWithDebugging, int debuggingPort)
{
// This method is virtual, as it provides a way to override the NODE_PATH or the path to node.exe
string debuggingArgs;
if (launchWithDebugging)
{
debuggingArgs = debuggingPort != default(int) ? $"--inspect={debuggingPort} " : "--inspect ";
_nodeDebuggingPort = debuggingPort;
}
else
{
debuggingArgs = string.Empty;
}
var thisProcessPid = Process.GetCurrentProcess().Id;
var startInfo = new ProcessStartInfo("node")
{
Arguments = $"{debuggingArgs}\"{entryPointFilename}\" --parentPid {thisProcessPid} {commandLineArguments ?? string.Empty}",
UseShellExecute = false,
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
WorkingDirectory = projectPath
};
// Append environment vars
if (environmentVars != null)
{
foreach (var envVarKey in environmentVars.Keys)
{
var envVarValue = environmentVars[envVarKey];
if (envVarValue != null)
{
SetEnvironmentVariable(startInfo, envVarKey, envVarValue);
}
}
}
// Append projectPath to NODE_PATH so it can locate node_modules
var existingNodePath = Environment.GetEnvironmentVariable("NODE_PATH") ?? string.Empty;
if (existingNodePath != string.Empty)
{
existingNodePath += Path.PathSeparator;
}
var nodePathValue = existingNodePath + Path.Combine(projectPath, "node_modules");
SetEnvironmentVariable(startInfo, "NODE_PATH", nodePathValue);
return startInfo;
}
/// <summary>
/// Virtual method invoked whenever the Node.js process emits a line to its stdout.
/// </summary>
/// <param name="outputData">The line emitted to the Node.js process's stdout.</param>
protected virtual void OnOutputDataReceived(string outputData)
{
OutputLogger.LogInformation(outputData);
}
/// <summary>
/// Virtual method invoked whenever the Node.js process emits a line to its stderr.
/// </summary>
/// <param name="errorData">The line emitted to the Node.js process's stderr.</param>
protected virtual void OnErrorDataReceived(string errorData)
{
OutputLogger.LogError(errorData);
}
/// <summary>
/// Disposes the instance.
/// </summary>
/// <param name="disposing">True if the object is disposing or false if it is finalizing.</param>
protected virtual void Dispose(bool disposing)
{
if (!_disposed)
{
if (disposing)
{
_entryPointScript.Dispose();
EnsureFileSystemWatcherIsDisposed();
}
// Make sure the Node process is finished
// TODO: Is there a more graceful way to end it? Or does this still let it perform any cleanup?
if (_nodeProcess != null && !_nodeProcess.HasExited)
{
_nodeProcess.Kill();
}
_disposed = true;
}
}
private void EnsureFileSystemWatcherIsDisposed()
{
if (_fileSystemWatcher != null)
{
_fileSystemWatcher.Dispose();
_fileSystemWatcher = null;
}
}
private static void SetEnvironmentVariable(ProcessStartInfo startInfo, string name, string value)
{
startInfo.Environment[name] = value;
}
private static Process LaunchNodeProcess(ProcessStartInfo startInfo)
{
try {
var process = Process.Start(startInfo);
// On Mac at least, a killed child process is left open as a zombie until the parent
// captures its exit code. We don't need the exit code for this process, and don't want
// to use process.WaitForExit() explicitly (we'd have to block the thread until it really
// has exited), but we don't want to leave zombies lying around either. It's sufficient
// to use process.EnableRaisingEvents so that .NET will grab the exit code and let the
// zombie be cleaned away without having to block our thread.
process.EnableRaisingEvents = true;
return process;
} catch (Exception ex) {
var message = "Failed to start Node process. To resolve this:.\n\n"
+ "[1] Ensure that Node.js is installed and can be found in one of the PATH directories.\n"
+ $" Current PATH enviroment variable is: { Environment.GetEnvironmentVariable("PATH") }\n"
+ " Make sure the Node executable is in one of those directories, or update your PATH.\n\n"
+ "[2] See the InnerException for further details of the cause.";
throw new InvalidOperationException(message, ex);
}
}
private static string UnencodeNewlines(string str)
{
if (str != null)
{
// The token here needs to match the const in OverrideStdOutputs.ts.
// See the comment there for why we're doing this.
str = str.Replace("__ns_newline__", Environment.NewLine);
}
return str;
}
private void ConnectToInputOutputStreams()
{
var initializationIsCompleted = false;
_nodeProcess.OutputDataReceived += (sender, evt) =>
{
if (evt.Data == ConnectionEstablishedMessage && !initializationIsCompleted)
{
_connectionIsReadySource.SetResult(null);
initializationIsCompleted = true;
}
else if (evt.Data != null)
{
OnOutputDataReceived(UnencodeNewlines(evt.Data));
}
};
_nodeProcess.ErrorDataReceived += (sender, evt) =>
{
if (evt.Data != null)
{
if (_launchWithDebugging && IsDebuggerMessage(evt.Data))
{
OutputLogger.LogWarning(evt.Data);
}
else
{
OnErrorDataReceived(UnencodeNewlines(evt.Data));
}
}
};
_nodeProcess.BeginOutputReadLine();
_nodeProcess.BeginErrorReadLine();
}
private static bool IsDebuggerMessage(string message)
{
return message.StartsWith("Debugger attached", StringComparison.Ordinal) ||
message.StartsWith("Debugger listening ", StringComparison.Ordinal) ||
message.StartsWith("To start debugging", StringComparison.Ordinal) ||
message.Equals("Warning: This is an experimental feature and could change at any time.", StringComparison.Ordinal) ||
message.Equals("For help see https://nodejs.org/en/docs/inspector", StringComparison.Ordinal) ||
message.Contains("chrome-devtools:");
}
private FileSystemWatcher BeginFileWatcher(string rootDir)
{
if (_watchFileExtensions == null || _watchFileExtensions.Length == 0)
{
// Nothing to watch
return null;
}
var watcher = new FileSystemWatcher(rootDir)
{
IncludeSubdirectories = true,
NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.FileName | NotifyFilters.DirectoryName
};
watcher.Changed += OnFileChanged;
watcher.Created += OnFileChanged;
watcher.Deleted += OnFileChanged;
watcher.Renamed += OnFileRenamed;
watcher.EnableRaisingEvents = true;
return watcher;
}
private void OnFileChanged(object source, FileSystemEventArgs e)
{
if (IsFilenameBeingWatched(e.FullPath))
{
RestartDueToFileChange(e.FullPath);
}
}
private void OnFileRenamed(object source, RenamedEventArgs e)
{
if (IsFilenameBeingWatched(e.OldFullPath) || IsFilenameBeingWatched(e.FullPath))
{
RestartDueToFileChange(e.OldFullPath);
}
}
private bool IsFilenameBeingWatched(string fullPath)
{
if (string.IsNullOrEmpty(fullPath))
{
return false;
}
else
{
var actualExtension = Path.GetExtension(fullPath) ?? string.Empty;
return _watchFileExtensions.Any(actualExtension.Equals);
}
}
private void RestartDueToFileChange(string fullPath)
{
OutputLogger.LogInformation($"Node will restart because file changed: {fullPath}");
_nodeProcessNeedsRestart = true;
// There's no need to watch for any more changes, since we're already restarting, and if the
// restart takes some time (e.g., due to connection draining), we could end up getting duplicate
// notifications.
EnsureFileSystemWatcherIsDisposed();
}
/// <summary>
/// Implements the finalization part of the IDisposable pattern by calling Dispose(false).
/// </summary>
~OutOfProcessNodeInstance()
{
Dispose(false);
}
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Represents the ability to invoke code in a Node.js environment. Although the underlying Node.js instance
/// might change over time (e.g., the process might be restarted), the <see cref="INodeServices"/> instance
/// will remain constant.
/// </summary>
public interface INodeServices : IDisposable
{
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root whose default CommonJS export is the function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeAsync<T>(string moduleName, params object[] args);
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root whose default CommonJS export is the function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeAsync<T>(CancellationToken cancellationToken, string moduleName, params object[] args);
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportedFunctionName">Specifies the CommonJS export to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeExportAsync<T>(string moduleName, string exportedFunctionName, params object[] args);
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportedFunctionName">Specifies the CommonJS export to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeExportAsync<T>(CancellationToken cancellationToken, string moduleName, string exportedFunctionName, params object[] args);
}
}

View File

@@ -0,0 +1,24 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Description>Invoke Node.js modules at runtime in ASP.NET Core applications.</Description>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
<ItemGroup>
<None Remove="node_modules\**\*" />
<EmbeddedResource Include="Content\**\*" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Hosting.Abstractions" Version="$(MicrosoftAspNetCoreHostingAbstractionsPackageVersion)" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="$(MicrosoftExtensionsLoggingConsolePackageVersion)" />
<PackageReference Include="Newtonsoft.Json" Version="$(NewtonsoftJsonPackageVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish" Condition=" '$(IsCrossTargetingBuild)' != 'true' ">
<Exec Command="npm install" />
<Exec Command="node node_modules/webpack/bin/webpack.js" />
</Target>
</Project>

View File

@@ -0,0 +1,165 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.NodeServices.HostingModels;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Default implementation of INodeServices. This is the primary API surface through which developers
/// make use of this package. It provides simple "InvokeAsync" methods that dispatch calls to the
/// correct Node instance, creating and destroying those instances as needed.
///
/// If a Node instance dies (or none was yet created), this class takes care of creating a new one.
/// If a Node instance signals that it needs to be restarted (e.g., because a file changed), then this
/// class will create a new instance and dispatch future calls to it, while keeping the old instance
/// alive for a defined period so that any in-flight RPC calls can complete. This latter feature is
/// analogous to the "connection draining" feature implemented by HTTP load balancers.
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.INodeServices" />
internal class NodeServicesImpl : INodeServices
{
private static TimeSpan ConnectionDrainingTimespan = TimeSpan.FromSeconds(15);
private Func<INodeInstance> _nodeInstanceFactory;
private INodeInstance _currentNodeInstance;
private object _currentNodeInstanceAccessLock = new object();
private Exception _instanceDelayedDisposalException;
internal NodeServicesImpl(Func<INodeInstance> nodeInstanceFactory)
{
_nodeInstanceFactory = nodeInstanceFactory;
}
public Task<T> InvokeAsync<T>(string moduleName, params object[] args)
{
return InvokeExportAsync<T>(moduleName, null, args);
}
public Task<T> InvokeAsync<T>(CancellationToken cancellationToken, string moduleName, params object[] args)
{
return InvokeExportAsync<T>(cancellationToken, moduleName, null, args);
}
public Task<T> InvokeExportAsync<T>(string moduleName, string exportedFunctionName, params object[] args)
{
return InvokeExportWithPossibleRetryAsync<T>(moduleName, exportedFunctionName, args, /* allowRetry */ true, CancellationToken.None);
}
public Task<T> InvokeExportAsync<T>(CancellationToken cancellationToken, string moduleName, string exportedFunctionName, params object[] args)
{
return InvokeExportWithPossibleRetryAsync<T>(moduleName, exportedFunctionName, args, /* allowRetry */ true, cancellationToken);
}
private async Task<T> InvokeExportWithPossibleRetryAsync<T>(string moduleName, string exportedFunctionName, object[] args, bool allowRetry, CancellationToken cancellationToken)
{
ThrowAnyOutstandingDelayedDisposalException();
var nodeInstance = GetOrCreateCurrentNodeInstance();
try
{
return await nodeInstance.InvokeExportAsync<T>(cancellationToken, moduleName, exportedFunctionName, args);
}
catch (NodeInvocationException ex)
{
// If the Node instance can't complete the invocation because it needs to restart (e.g., because the underlying
// Node process has exited, or a file it depends on has changed), then we make one attempt to restart transparently.
if (allowRetry && ex.NodeInstanceUnavailable)
{
// Perform the retry after clearing away the old instance
// Since we disposal is delayed even though the node instance is replaced immediately, this produces the
// "connection draining" feature whereby in-flight RPC calls are given a certain period to complete.
lock (_currentNodeInstanceAccessLock)
{
if (_currentNodeInstance == nodeInstance)
{
var disposalDelay = ex.AllowConnectionDraining ? ConnectionDrainingTimespan : TimeSpan.Zero;
DisposeNodeInstance(_currentNodeInstance, disposalDelay);
_currentNodeInstance = null;
}
}
// One the next call, don't allow retries, because we could get into an infinite retry loop, or a long retry
// loop that masks an underlying problem. A newly-created Node instance should be able to accept invocations,
// or something more serious must be wrong.
return await InvokeExportWithPossibleRetryAsync<T>(moduleName, exportedFunctionName, args, /* allowRetry */ false, cancellationToken);
}
else
{
throw;
}
}
}
public void Dispose()
{
lock (_currentNodeInstanceAccessLock)
{
if (_currentNodeInstance != null)
{
DisposeNodeInstance(_currentNodeInstance, delay: TimeSpan.Zero);
_currentNodeInstance = null;
}
}
}
private void DisposeNodeInstance(INodeInstance nodeInstance, TimeSpan delay)
{
if (delay == TimeSpan.Zero)
{
nodeInstance.Dispose();
}
else
{
Task.Run(async () => {
try
{
await Task.Delay(delay);
nodeInstance.Dispose();
}
catch(Exception ex)
{
// Nothing's waiting for the delayed disposal task, so any exceptions in it would
// by default just get ignored. To make these discoverable, capture them here so
// they can be rethrown to the next caller to InvokeExportAsync.
_instanceDelayedDisposalException = ex;
}
});
}
}
private void ThrowAnyOutstandingDelayedDisposalException()
{
if (_instanceDelayedDisposalException != null)
{
var ex = _instanceDelayedDisposalException;
_instanceDelayedDisposalException = null;
throw new AggregateException(
"A previous attempt to dispose a Node instance failed. See InnerException for details.",
ex);
}
}
private INodeInstance GetOrCreateCurrentNodeInstance()
{
var instance = _currentNodeInstance;
if (instance == null)
{
lock (_currentNodeInstanceAccessLock)
{
instance = _currentNodeInstance;
if (instance == null)
{
instance = _currentNodeInstance = CreateNewNodeInstance();
}
}
}
return instance;
}
private INodeInstance CreateNewNodeInstance()
{
return _nodeInstanceFactory();
}
}
}

View File

@@ -0,0 +1,363 @@
# Microsoft.AspNetCore.NodeServices
This NuGet package provides a fast and robust way to invoke Node.js code from a .NET application (typically ASP.NET Core web apps). You can use this whenever you want to use Node/NPM-supplied functionality at runtime in ASP.NET. For example,
* Executing arbitrary JavaScript
* Runtime integration with JavaScript build or packaging tools, e.g., transpiling code via Babel
* Using of NPM modules for image resizing, audio compression, language recognition, etc.
* Calling third-party services that supply Node-based APIs but don't yet ship native .NET ones
It is the underlying mechanism supporting the following packages:
* [`Microsoft.AspNetCore.SpaServices`](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices) - builds on NodeServices, adding functionality commonly used in Single Page Applications, such as server-side prerendering, webpack middleware, and integration between server-side and client-side routing.
* [`Microsoft.AspNetCore.AngularServices`](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.AngularServices) and [`Microsoft.AspNetCore.ReactServices`](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.ReactServices) - these build on `SpaServices`, adding helpers specific to Angular and React, such as cache priming and integrating server-side and client-side validation
### Requirements
* [Node.js](https://nodejs.org/en/)
* To test this is installed and can be found, run `node -v` on a command line
* Note: If you're deploying to an Azure web site, you don't need to do anything here - Node is already installed and available in the server environments
* [.NET](https://dot.net)
* For .NET Core (e.g., ASP.NET Core apps), you need at least 1.0 RC2
* For .NET Framework, you need at least version 4.5.1.
### Installation
For .NET Core apps:
* Add `Microsoft.AspNetCore.NodeServices` to the dependencies list in your `project.json` file
* Run `dotnet restore` (or if you use Visual Studio, just wait a moment - it will restore dependencies automatically)
For .NET Framework apps:
* `nuget install Microsoft.AspNetCore.NodeServices`
### Do you just want to build an ASP.NET Core app with Angular / React / Knockout / etc.?
In that case, you don't need to use NodeServices directly (or install it manually). You can either:
* **Recommended:** Use the `aspnetcore-spa` Yeoman generator to get a ready-to-go starting point using your choice of client-side framework. [Instructions here.](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/)
* Or set up your ASP.NET Core and client-side Angular/React/KO/etc. app manually, and then use the [`Microsoft.AspNetCore.SpaServices`](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices) package to add features like server-side prerendering or Webpack middleware. But really, at least try using the `aspnetcore-spa` generator first.
# Simple usage example
## For ASP.NET Core apps
.NET Core has a built-in dependency injection (DI) system. NodeServices is designed to work with this, so you don't have to manage the creation or disposal of instances.
Enable NodeServices in your application by first adding the following to your `ConfigureServices` method in `Startup.cs`:
```csharp
public void ConfigureServices(IServiceCollection services)
{
// ... all your existing configuration is here ...
// Enable Node Services
services.AddNodeServices();
}
```
Now you can receive an instance of `NodeServices` as an action method parameter to any MVC action, and then use it to make calls into Node.js code, e.g.:
```csharp
public async Task<IActionResult> MyAction([FromServices] INodeServices nodeServices)
{
var result = await nodeServices.InvokeAsync<int>("./addNumbers", 1, 2);
return Content("1 + 2 = " + result);
}
```
Of course, you also need to supply the Node.js code you want to invoke. Create a file called `addNumbers.js` at the root of your ASP.NET Core application, and add the following code:
```javascript
module.exports = function (callback, first, second) {
var result = first + second;
callback(/* error */ null, result);
};
```
As you can see, the exported JavaScript function will receive the arguments you pass from .NET (as long as they are JSON-serializable), along with a Node-style callback you can use to send back a result or error when you are ready.
When the `InvokeAsync<T>` method receives the result back from Node, the result will be JSON-deserialized to whatever generic type you specified when calling `InvokeAsync<T>` (e.g., above, that type is `int`). If `InvokeAsync<T>` receives an error from your Node code, it will throw an exception describing that error.
If you want to put `addNumber.js` inside a subfolder rather than the root of your app, then also amend the path in the `_nodeServices.Invoke` call to match that path.
## For non-ASP.NET apps
In other types of .NET Core app, where you don't have ASP.NET supplying an `IServiceCollection` to you, you'll need to instantiate your own DI container. For example, add a reference to the .NET package `Microsoft.Extensions.DependencyInjection`, and then you can construct an `IServiceCollection`, then register NodeServices as usual:
```csharp
var services = new ServiceCollection();
services.AddNodeServices(options => {
// Set any properties that you want on 'options' here
});
```
Now you can ask it to supply the shared `INodeServices` instance:
```csharp
var serviceProvider = services.BuildServiceProvider();
var nodeServices = serviceProvider.GetRequiredService<INodeServices>();
```
Or, if you want to obtain a separate (non-shared) `INodeServices` instance:
```csharp
var options = new NodeServicesOptions(serviceProvider) { /* Assign/override any other options here */ };
var nodeServices = NodeServicesFactory.CreateNodeServices(options);
```
Besides this, the usage is the same as described for ASP.NET above, so you can now call `nodeServices.InvokeAsync<T>(...)` etc.
You can dispose the `nodeServices` object whenever you are done with it (and it will shut down the associated Node.js instance), but because these instances are expensive to create, you should whenever possible retain and reuse instances. Don't dispose the shared instance returned from `serviceProvider.GetRequiredService` (except perhaps if you know your application is shutting down, although .NET's finalizers will dispose it anyway if the shutdown is graceful).
NodeServices instances are thread-safe - you can call `InvokeAsync<T>` simultaneously from multiple threads. Also, they are smart enough to detect if the associated Node instance has died and will automatically start a new Node instance if needed.
# API Reference
### AddNodeServices
**Signatures:**
```csharp
AddNodeServices()
AddNodeServices(Action<NodeServicesOptions> setupAction)
```
This is an extension method on `IServiceCollection`. It registers NodeServices with ASP.NET Core's DI system. Typically you should call this from the `ConfigureServices` method in your `Startup.cs` file.
To access this extension method, you'll need to add the following namespace import to the top of your file, if it isn't already there:
```csharp
using Microsoft.Extensions.DependencyInjection;
```
**Examples**
Using default options:
```csharp
services.AddNodeServices();
```
Or, specifying options:
```csharp
services.AddNodeServices(options =>
{
options.WatchFileExtensions = new[] { ".coffee", ".sass" };
// ... etc. - see other properties below
});
```
**Parameters**
* `setupAction` - type: `Action<NodeServicesOptions>`
* Optional. If not specified, defaults will be used.
* Properties on `NodeServicesOptions`:
* `HostingModel` - an `NodeHostingModel` enum value. See: [hosting models](#hosting-models)
* `ProjectPath` - if specified, controls the working directory used when launching Node instances. This affects, for example, the location that `require` statements resolve relative paths against. If not specified, your application root directory is used.
* `WatchFileExtensions` - if specified, the launched Node instance will watch for changes to any files with these extensions, and auto-restarts when any are changed. The default array includes `.js`, `.jsx`, `.ts`, `.tsx`, `.json`, and `.html`.
**Return type**: None. But once you've done this, you can get `NodeServices` instances out of ASP.NET's DI system. Typically it will be a singleton instance.
### CreateNodeServices
**Signature:**
```csharp
CreateNodeServices(NodeServicesOptions options)
```
Supplies a new (non-shared) instance of `NodeServices`.
**Example**
```csharp
var options = new NodeServicesOptions(serviceProvider); // Obtains default options from DI config
var nodeServices = NodeServicesFactory.CreateNodeServices(options);
```
**Parameters**
* `options` - type: `NodeServicesOptions`.
* Configures the returned `NodeServices` instance.
* Properties:
* `HostingModel` - an `NodeHostingModel` enum value. See: [hosting models](#hosting-models)
* `ProjectPath` - if specified, controls the working directory used when launching Node instances. This affects, for example, the location that `require` statements resolve relative paths against. If not specified, your application root directory is used.
* `WatchFileExtensions` - if specified, the launched Node instance will watch for changes to any files with these extension, and auto-restarts when any are changed.
**Return type:** `NodeServices`
If you create a `NodeServices` instance this way, you can also dispose it (call `nodeServiceInstance.Dispose();`) and it will shut down the associated Node instance. But because these instances are expensive to create, you should whenever possible retain and reuse your `NodeServices` object. They are thread-safe - you can call `nodeServiceInstance.InvokeAsync<T>(...)` simultaneously from multiple threads.
### InvokeAsync&lt;T&gt;
**Signature:**
```csharp
InvokeAsync<T>(string moduleName, params object[] args)
```
Asynchronously calls a JavaScript function and returns the result, or throws an exception if the result was an error.
**Example 1: Getting a JSON-serializable object from Node (the most common use case)**
```csharp
var result = await myNodeServicesInstance.InvokeAsync<TranspilerResult>(
"./Node/transpile",
pathOfSomeFileToBeTranspiled);
```
... where `TranspilerResult` might be defined as follows:
```csharp
public class TranspilerResult
{
public string Code { get; set; }
public string[] Warnings { get; set; }
}
```
... and the corresponding JavaScript module (in `Node/transpile.js`) could be implemented as follows:
```javascript
module.exports = function (callback, filePath) {
// Invoke some external transpiler (e.g., an NPM module) then:
callback(null, {
code: theTranspiledCodeAsAString,
warnings: someArrayOfStrings
});
};
```
**Example 2: Getting a stream of binary data from Node**
```csharp
var imageStream = await myNodeServicesInstance.InvokeAsync<Stream>(
"./Node/resizeImage",
fullImagePath,
width,
height);
// In an MVC action method, you can pipe the result to the response as follows
return File(imageStream, someContentType);
```
... where the corresponding JavaScript module (in `Node/resizeImage.js`) could be implemented as follows:
```javascript
var sharp = require('sharp'); // A popular image manipulation package on NPM
module.exports = function(result, physicalPath, maxWidth, maxHeight) {
// Invoke the 'sharp' NPM module, and have it pipe the resulting image data back to .NET
sharp(physicalPath)
.resize(maxWidth || null, maxHeight || null)
.pipe(result.stream);
}
```
There's a working image resizing example following this approach [here](https://github.com/aspnet/JavaScriptServices/tree/dev/samples/misc/NodeServicesExamples) - see the [C# code](https://github.com/aspnet/JavaScriptServices/blob/dev/samples/misc/NodeServicesExamples/Controllers/ResizeImage.cs) and the [JavaScript code](https://github.com/aspnet/JavaScriptServices/blob/dev/samples/misc/NodeServicesExamples/Node/resizeImage.js).
**Parameters**
* `moduleName` - type: `string`
* The name of a JavaScript module that Node.js must be able to resolve by calling `require(moduleName)`. This can be a relative path such as `"./Some/Directory/mymodule"`. If you don't specify the `.js` filename extension, Node.js will infer it anyway.
* `params`
* Any set of JSON-serializable objects you want to pass to the exported JavaScript function
**Return type:** `T`, which must be:
* A JSON-serializable .NET type, if your JavaScript code uses the `callback(error, result)` pattern to return an object, as in example 1 above
* Or, the type `System.IO.Stream`, if your JavaScript code writes data to the `result.stream` object (which is a [Node `Duplex` stream](https://nodejs.org/api/stream.html#stream_class_stream_duplex)), as in example 2 above
### InvokeExportAsync&lt;T&gt;
**Signature**
```csharp
InvokeExportAsync<T>(string moduleName, string exportName, params object[] args)
```
This is exactly the same as `InvokeAsync<T>`, except that it also takes an `exportName` parameter. You can use this if you want your JavaScript module to export more than one function.
**Example**
```csharp
var someString = await myNodeServicesInstance.InvokeExportAsync<string>(
"./Node/myNodeApis",
"getMeAString");
var someStringInFrench = await myNodeServicesInstance.InvokeExportAsync<string>(
"./Node/myNodeApis",
"convertLanguage"
someString,
"fr-FR");
```
... where the corresponding JavaScript module (in `Node/myNodeApis.js`) could be implemented as follows:
```javascript
module.exports = {
getMeAString: function (callback) {
callback(null, 'Here is a string');
},
convertLanguage: function (callback, sourceString, targetLanguage) {
// Implementation detail left as an exercise for the reader
doMachineTranslation(sourceString, targetLanguage, function(error, result) {
callback(error, result);
});
}
};
```
**Parameters, return type, etc.** For all other details, see the docs for [`InvokeAsync<T>`](#invokeasynct)
## Hosting models
NodeServices has a pluggable hosting/transport mechanism, because it is an abstraction over various possible ways to invoke Node.js from .NET. This allows more high-level facilities (e.g., for Angular prerendering) to be agnostic to the details of launching Node and communicating with it - those high-level facilities can just trust that *somehow* we can invoke code in Node for them.
Using this abstraction, we could run Node inside the .NET process, in a separate process on the same machine, or even on a different machine altogether. At the time of writing, all the built-in hosting mechanisms work by launching Node as a separate process on the same machine as your .NET code.
**What about Edge.js?**
[Edge.js](http://tjanczuk.github.io/edge/#/) hosts Node.js inside a .NET process, or vice-versa, and lets you interoperate between the two.
NodeServices is not meant to compete with Edge.js. Instead, NodeServices is an abstraction over all possible ways to invoke Node from .NET. Eventually we may offer an in-process Node hosting mechanism via Edge.js, without you needing to change your higher-level code. This can be done when Edge.js supports hosting Node in cross-platform .NET Core processes ([discussion](https://github.com/tjanczuk/edge/issues/279)).
**What about VroomJS?**
People have asked about using [VroomJS](https://github.com/fogzot/vroomjs) as a hosting mechanism. We don't currently plan to implement that, because Vroom only supplies a V8 runtime environment, not a complete Node environment. The difference is that, with a true Node environment, *all* NPM modules and Node code will work exactly as expected, whereas in a Vroom environment, code will only work if it doesn't use any Node primitives, which rules out large portions of the NPM landscape.
### Built-in hosting models
Normally, you can just use the default hosting model, and not worry about it. But if you have some special requirements, you can write your own hosting model, or reference a package that supplies one.
For example, you could use the 'socket' hosting model. It performs RPC between .NET and Node.js using a fast, low-level binary channel rather than the default HTTP transport. To do this, first install the NuGet package `Microsoft.AspNetCore.NodeServices.Sockets`. Then, at the top of your `Startup.cs` file, add:
```csharp
using Microsoft.AspNetCore.NodeServices.Sockets;
```
...then in your `Startup.cs` file's `ConfigureServices` method, you can configure:
```csharp
services.AddNodeServices(options => {
options.UseSocketHosting();
});
```
Now when you run your application, it will use the socket-based hosting and transport mechanism. In the past, the socket transport was faster than HTTP, but since .NET Core 1.1 improved the performance of `HttpClient` there isn't really any speed difference any more, so there's no longer any significant advantage to using `Microsoft.AspNetCore.NodeServices.Sockets`.
### Custom hosting models
If you implement a custom hosting model (by implementing `INodeInstance`), then you can cause it to be used by populating `NodeInstanceFactory` on your options:
```csharp
services.AddNodeServices(options =>
{
options.NodeInstanceFactory = () => new MyCustomNodeInstance();
});
```

View File

@@ -0,0 +1,94 @@
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
import './Util/PatchModuleResolutionLStat';
import './Util/OverrideStdOutputs';
import * as http from 'http';
import * as path from 'path';
import { parseArgs } from './Util/ArgsUtil';
import { exitWhenParentExits } from './Util/ExitWhenParentExits';
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
const dynamicRequire: (name: string) => any = eval('require');
const server = http.createServer((req, res) => {
readRequestBodyAsJson(req, bodyJson => {
let hasSentResult = false;
const callback = (errorValue, successValue) => {
if (!hasSentResult) {
hasSentResult = true;
if (errorValue) {
respondWithError(res, errorValue);
} else if (typeof successValue !== 'string') {
// Arbitrary object/number/etc - JSON-serialize it
let successValueJson: string;
try {
successValueJson = JSON.stringify(successValue);
} catch (ex) {
// JSON serialization error - pass it back to .NET
respondWithError(res, ex);
return;
}
res.setHeader('Content-Type', 'application/json');
res.end(successValueJson);
} else {
// String - can bypass JSON-serialization altogether
res.setHeader('Content-Type', 'text/plain');
res.end(successValue);
}
}
};
// Support streamed responses
Object.defineProperty(callback, 'stream', {
enumerable: true,
get: function() {
if (!hasSentResult) {
hasSentResult = true;
res.setHeader('Content-Type', 'application/octet-stream');
}
return res;
}
});
try {
const resolvedPath = path.resolve(process.cwd(), bodyJson.moduleName);
const invokedModule = dynamicRequire(resolvedPath);
const func = bodyJson.exportedFunctionName ? invokedModule[bodyJson.exportedFunctionName] : invokedModule;
if (!func) {
throw new Error('The module "' + resolvedPath + '" has no export named "' + bodyJson.exportedFunctionName + '"');
}
func.apply(null, [callback].concat(bodyJson.args));
} catch (synchronousException) {
callback(synchronousException, null);
}
});
});
const parsedArgs = parseArgs(process.argv);
const requestedPortOrZero = parsedArgs.port || 0; // 0 means 'let the OS decide'
server.listen(requestedPortOrZero, 'localhost', function () {
// Signal to HttpNodeHost which port it should make its HTTP connections on
console.log('[Microsoft.AspNetCore.NodeServices.HttpNodeHost:Listening on port ' + server.address().port + '\]');
// Signal to the NodeServices base class that we're ready to accept invocations
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
exitWhenParentExits(parseInt(parsedArgs.parentPid), /* ignoreSigint */ true);
function readRequestBodyAsJson(request, callback) {
let requestBodyAsString = '';
request.on('data', chunk => { requestBodyAsString += chunk; });
request.on('end', () => { callback(JSON.parse(requestBodyAsString)); });
}
function respondWithError(res: http.ServerResponse, errorValue: any) {
res.statusCode = 500;
res.end(JSON.stringify({
errorMessage: errorValue.message || errorValue,
errorDetails: errorValue.stack || null
}));
}

View File

@@ -0,0 +1,18 @@
export function parseArgs(args: string[]): any {
// Very simplistic parsing which is sufficient for the cases needed. We don't want to bring in any external
// dependencies (such as an args-parsing library) to this file.
const result = {};
let currentKey = null;
args.forEach(arg => {
if (arg.indexOf('--') === 0) {
const argName = arg.substring(2);
result[argName] = undefined;
currentKey = argName;
} else if (currentKey) {
result[currentKey] = arg;
currentKey = null;
}
});
return result;
}

Some files were not shown because too many files have changed in this diff Show More