Compare commits

..

189 Commits

Author SHA1 Message Date
Steve Sanderson
da705176c8 Have CI produce final 2.0.0 RTM build 2018-02-21 14:10:47 +00:00
Steve Sanderson
4b54cd1a32 In SpaProxy, don't fail if there are non-forwardable headers. Fixes #1543. 2018-02-21 14:02:19 +00:00
Steve Sanderson
bf1121f402 Now 2.0.0-rc2-final build was issued, update version to 2.0.0-rc3-* 2018-02-08 11:46:03 +00:00
Steve Sanderson
531a22774a Have CI produce final 2.0.0 RC2 build 2018-02-08 11:44:29 +00:00
Steve Sanderson
9b7994bc9b In Websocket proxy, don't forward User-Agent. Fixes #1469. 2018-02-08 11:39:28 +00:00
Steve Sanderson
c01193dd7b Now 2.0.0-rc1-final is published, change version to 2.0.0-rc2-* (i.e., with timestamps until final) 2018-01-04 10:54:51 +00:00
Steve Sanderson
e6708aa549 In Angular CLI middleware, remove additional level of timeouts since it's now covered upstream. Part of #1447 2018-01-03 11:48:38 +00:00
Steve Sanderson
c03fea4018 Bump version to 2.0.0-rc1 2018-01-02 15:56:49 +00:00
Steve Sanderson
8a8db2317c Allow explicit configuration of StaticFileOptions in new SPA APIs. Fixes #1424. 2018-01-02 15:46:20 +00:00
Steve Sanderson
26d666d4f1 Allow configuration of SPA startup timeout. Part of #1447 2018-01-02 14:13:47 +00:00
Steve Sanderson
e024e5bdcd When a SPA dev server (or prerendering build) takes too long to start up, only fail current request, not future requests. Fixes #1447 2018-01-02 14:02:10 +00:00
Steve Sanderson
25be429d57 Add status code support to SpaPrerenderingExtensions 2017-12-12 12:29:33 +00:00
Steve Sanderson
4ed3fab3bc Set SpaServices.Extensions package version to 2.0.0-preview1-final 2017-12-07 15:02:27 +00:00
Steve Sanderson
8da62b8565 Make AngularCliBuilder provide better information about timeouts 2017-11-22 15:06:45 +00:00
Steve Sanderson
39e6121937 Handle @angular/cli not accept requests immediately on startup 2017-11-17 14:58:22 +00:00
Steve Sanderson
c002937377 Add MSBuild targets to enable pushing to NuGet feed 2017-11-16 11:19:09 +00:00
Steve Sanderson
eb415b6282 Update Readme in OOB branch 2017-11-16 11:04:55 +00:00
Steve Sanderson
dcbe4b1c33 Remove sources for all packages except SpaServices.Extensions, since that's all this OOB release branch needs to build 2017-11-16 10:19:04 +00:00
Steve Sanderson
f2175e6c23 Remove all samples from SpaServices.Extensions OOB release branch, since we only want to build the one package 2017-11-16 10:17:14 +00:00
Steve Sanderson
064f43ab26 In this OOB-release branch, make SpaServices.Extensions depend on the existing published SpaServices v2.0.1 2017-11-16 10:12:38 +00:00
Steve Sanderson
685f7da091 Revert "Clean up how IHttpContextAccessor is added" because this API
only exists in 2.1.x, and this branch is for 2.0.1

This reverts commit e583a17ef8.
2017-11-16 10:07:42 +00:00
Steve Sanderson
e6309ba784 Begin rel/2.0.1-extensions branch for OOB release of SpaServices.Extensions package 2017-11-16 10:07:42 +00:00
Steve Sanderson
68c4620a55 Consider React dev server ready when it starts listening, not when (and if) it compiles successfully 2017-11-16 09:48:38 +00:00
Steve Sanderson
296435e40c When capturing prerendering template, avoid problems with HTTP compression 2017-11-16 09:34:18 +00:00
Steve Sanderson
aeabbdcada Stop create-react-app from opening an extra browser tab (pointed to the wrong port) 2017-11-13 12:51:18 +00:00
Steve Sanderson
96d7f85327 Add UseReactDevelopmentServer() middleware. Factor out common code. 2017-11-13 12:35:41 +00:00
Steve Sanderson
30333e250a AddSpaStaticFiles/UseSpaStaticFiles APIs to clean up the React template (or other cases where SPA files are outside wwwroot) 2017-11-13 10:54:12 +00:00
Steve Sanderson
08002e961b In WebpackDevMiddleware.ts, support loading Webpack config files with __esModule. Fixes #1378 2017-11-09 16:57:57 -08:00
Steve Sanderson
0c77224f46 Allow prerendering middleware to pass through non-prerendered responses (important when using dev middleware) 2017-11-09 10:44:30 -08:00
Steve Sanderson
a83ec3a053 ArgumentNullException -> ArgumentException 2017-11-09 10:30:52 -08:00
Steve Sanderson
a16343681b Rename BuildOnDemand to BootModuleBuilder 2017-11-09 10:25:48 -08:00
Steve Sanderson
c8b337ebaa Add new Microsoft.AspNetCore.SpaServices.Extensions package to host new runtime functionality needed for updated templates until 2.1 ships 2017-11-09 10:09:13 -08:00
Nate McMaster
7bf5516bb2 Update appveyor.yml to execute build.cmd and add nodejs as required toolset (#1372) 2017-11-03 15:54:23 -07:00
Nate McMaster
2d98a1808c Pin tool and package versions to make builds more repeatable
Part of aspnet/Universe#575
2017-11-03 15:09:19 -07:00
Steve Sanderson
e583a17ef8 Clean up how IHttpContextAccessor is added 2017-10-23 15:13:33 +01:00
Ryan Brandenburg
ba0d82d801 Add RepositoryRoot 2017-10-23 14:52:02 +01:00
Ryan Brandenburg
e67a30132f Update bootstrappers 2017-09-22 12:13:00 +01:00
Justin Kotalik
d51bef194c Increase Minimum Version of Visual Studio to 15.3.0 2017-09-21 17:49:17 -07:00
Steve Sanderson
78436adb08 Update README.md 2017-09-07 13:54:03 +01:00
Steve Sanderson
09317b83a8 SPA templates have now moved to the aspnet/templating repo 2017-09-07 13:28:18 +01:00
Nate McMaster
a0269fb0ad Use PackageLineup to manage PackageReference versions 2017-08-30 17:11:46 -07:00
Nate McMaster
64ed1c7945 Use Directory.Build.props/targets (#1235) 2017-08-30 14:48:58 -07:00
Steve Sanderson
04fe1204a9 Rename app.module.(server|browser).ts to app.(server|browser).module.ts. Fixes #1228. 2017-08-25 11:02:02 -07:00
Steve Sanderson
e2030fb1fa Handle publicPath=/ in HMR. Fixes #1191. 2017-08-24 17:52:35 -07:00
Steve Sanderson
e5f1299239 Use devDependencies consistently in templates (no packages required in production, as webpack publish builds are standalone). Fixes #747 2017-08-24 15:31:34 -07:00
Steve Sanderson
c922eee1d6 Fix artifacts dir for test 2017-08-23 17:15:37 -07:00
Steve Sanderson
8b243e8cc7 Simplify build script further 2017-08-23 17:12:06 -07:00
Steve Sanderson
28920c7691 Simplify .gitignore 2017-08-23 16:57:43 -07:00
Steve Sanderson
412ec1b427 Build templates nupkgs directly from source without copying to staging location 2017-08-23 16:56:07 -07:00
Steve Sanderson
c62a3b491c Fix templates directory structure to produce correct nupkg output 2017-08-23 16:38:16 -07:00
Steve Sanderson
559832bb6d Remove dynamic content replacement from nuspec files 2017-08-23 15:41:09 -07:00
Steve Sanderson
45d645931b Remove template build dynamic filename replacement. Working towards eliminating template build process completely. 2017-08-23 15:27:15 -07:00
Steve Sanderson
8d6119f31d Remove the Yeoman-specific gitignore workaround 2017-08-23 15:04:04 -07:00
Steve Sanderson
0291686b20 Reorganize templates into dir structure matching 'dotnet new' templates 2017-08-23 14:58:49 -07:00
Steve Sanderson
7c52be5e42 Stop generating .template.config files dynamically. Convert them to plain files on disk. 2017-08-22 18:06:48 -07:00
Steve Sanderson
900e9ca835 Add deprecation notice to Yeoman package 2017-08-22 17:11:02 -07:00
Steve Sanderson
ad758b1060 Update AppVeyor config to remove Yeoman artifact reference 2017-08-22 16:20:25 -07:00
Steve Sanderson
cd9ad38a99 Run tests against 'dotnet new' output instead of Yeoman output 2017-08-22 16:13:34 -07:00
Steve Sanderson
e057cb35ec Remove Yeoman from the template build process 2017-08-22 14:43:51 -07:00
Steve Sanderson
eea2066a6d Remove Yeoman generator - replace it with deprecation notice. 2017-08-22 14:33:30 -07:00
Steve Sanderson
d6ae8829b6 In HMR, don't rely on default JsonSerializer settings. Fixes #688 2017-08-21 17:11:47 -07:00
Steve Sanderson
a94ac6f37e For Redux dev tools, use newer __REDUX_DEVTOOLS_EXTENSION__ API. Fixes #1196 2017-08-21 16:48:30 -07:00
Steve Sanderson
a40adab38d In non-ASP.NET apps, default project path to current working directory. Fixes #1100. 2017-08-21 16:40:59 -07:00
Stephan Troyer
c2a284d5b8 small Knockout cleanup 2017-08-21 16:17:10 -07:00
frederikprijck
fc398d602a Allow lazy loading with AngularSpa in dev build
Previously, the AngularSpa didn't include `angular2-router-loader`.
This commit ensures it does.

Closes #1194
2017-08-21 16:06:18 -07:00
Steve Sanderson
90c59ff4e7 Merge branch 'fix-angular-material-publishing' into dev 2017-08-21 15:53:19 -07:00
Steve Lathrop
a7e715c88f Small grammatical fix to README.md 2017-08-21 15:37:58 -07:00
alejandro garcia
6dddc9d01d Removed json loader from react redux template 2017-08-21 15:37:26 -07:00
Steve Sanderson
5ed1a35ce0 Fix problems with AoT when using Angular Material. Fixes #1168 2017-08-03 18:00:46 +01:00
Steve Sanderson
680ba7497a Merge branch 'rel/2.0.0-templates' into dev 2017-08-03 10:52:54 +01:00
Steve Sanderson
287c10fd2e Bump additional SPA templates package version to 1.0.0 2017-08-03 10:52:08 +01:00
John Luo
63f7ac9330 Ensure fallback to curl after failed wget 2017-08-02 14:32:21 -07:00
John Luo
d2858beaa1 Update __get_remote_file logic 2017-08-02 12:44:45 -07:00
Steve Sanderson
5f1450c9ba Make aspnet-angular compatible with AoT compilation 2017-08-02 10:53:46 +01:00
Steve Sanderson
c83605baff Add aspnet-angular NPM package containing HttpWithStateTransfer utility 2017-08-02 10:26:46 +01:00
Stephen Lautier
fc12d722b8 fix(webpack): fix middleware to specifically serialize options as non-camecased 2017-08-02 09:24:42 +01:00
Eric Green
372e597f34 Added Timeout to HttpClient to always be longer than the InvocationTimeoutMilliseconds.
HttpClient defaults to 100 seconds. If InvocationTimeoutMilliseconds is greater than the default of the HttpClient, the task will fail with a Task was canceled exception.
2017-08-02 09:22:34 +01:00
Charles Lowell
3715ec7c3f Fix typo 2017-08-02 09:21:40 +01:00
Steve Sanderson
d2eaa36372 Merge branch 'rel/2.0.0-templates' into dev 2017-07-27 15:09:17 +01:00
Steve Sanderson
169ef12cd8 Change Microsoft.DotNet.Web.Spa.ProjectTemplates to use non-prerelease version number in NuGet package 2017-07-27 14:51:21 +01:00
Steve Sanderson
f5d58f3f2e Update AppVeyor build to use SDK 006857 2017-07-27 14:43:22 +01:00
Steve Sanderson
c2e4d4f261 Re-enable the "dotnet restore" postAction, like the stock templates 2017-07-27 14:41:55 +01:00
Steve Sanderson
50481fe23f Update templates to reference ASP.NET Core 2.0.0 final 2017-07-27 14:27:15 +01:00
Nate McMaster
895a61160e Fix syntax warning when running build.sh on older versions of bash
[ci skip]
2017-07-26 10:27:55 -07:00
Nate McMaster
b8b769aa74 Update bootstrappers to use the compiled version of KoreBuild
[ci skip]
2017-07-25 16:32:52 -07:00
Pranav K
c4aad6bcab Updating to InternalAspNetCoreSdkVersion 2.1.1-* 2017-07-25 15:13:39 -07:00
Ryan Brandenburg
27f1d07d21 Set AspNetCoreVersion 2017-07-24 17:56:46 -07:00
Ryan Brandenburg
0cb14a3c68 2.0.0-rtm to 2.1.0-preview1 2017-07-24 12:31:10 -07:00
Steve Sanderson
2457b4ee5d Merge branch 'rel/2.0.0-templates' into dev 2017-07-19 14:26:12 +01:00
Steve Sanderson
789ea5a320 Update to ASP.NET Core 2.0.0-rtm-26380 2017-07-19 14:24:32 +01:00
Mike Harder
a902874754 Remove unused variable AutoMapperVersion 2017-07-19 14:19:37 +01:00
Steve Sanderson
f43ea777eb Merge branch 'rel/2.0.0-templates' into dev 2017-07-13 16:17:19 +01:00
Steve Sanderson
c79db4e8e2 In React-Redux template, move Webpack config for images into sharedConfig so that adding images doesn't break the server build 2017-07-13 16:16:45 +01:00
Steve Sanderson
d1198aeab2 Merge branch 'rel/2.0.0-templates' into dev 2017-07-13 10:14:31 +01:00
Steve Sanderson
9528dd7432 Enable TS strict mode in all templates and generally clean up TS references 2017-07-13 10:11:59 +01:00
Stephan Troyer
b8c006a3e9 Update templates to support TypeScript 'strict' mode 2017-07-13 09:22:14 +01:00
Steve Sanderson
a9ddf1413f Merge branch 'rel/2.0.0-templates' into dev 2017-07-12 23:31:33 +01:00
Steve Sanderson
8b37dc8561 Update to ASP.NET Core 2.0.0-rtm-26272 2017-07-12 23:17:22 +01:00
Steve Sanderson
7b07fb66eb Update all templates to aspnet-webpack@2.0.1 2017-07-12 23:11:30 +01:00
Steve Sanderson
cdb04c74f9 Update aspnet-webpack to 2.0.1, automatically disabling the middleware if we detect the process doesn't have sufficient disk permissions 2017-07-12 23:06:05 +01:00
Steve Sanderson
a74941e3c8 Merge branch 'rel/2.0.0' into rel/2.0.0-templates 2017-07-12 15:30:14 +01:00
Steve Sanderson
a0a710a0df Merge pull request #1111 from aspnet/rel/2.0.0
Complete the lstat patching for #1101
2017-07-12 15:27:37 +01:00
Steve Sanderson
576e3debf1 Complete the lstat patching for #1101 2017-07-12 15:27:00 +01:00
Steve Sanderson
d5f5ad7fdc Merge branch 'rel/2.0.0-templates' into dev 2017-07-12 00:39:57 +01:00
Steve Sanderson
dab0faea66 Update to AspNetCore 2.0.0-rtm-26219 2017-07-12 00:39:28 +01:00
Steve Sanderson
2df0febfba Merge branch 'rel/2.0.0-templates' into dev 2017-07-12 00:05:37 +01:00
Steve Sanderson
e65ecebac6 Make templates work with nonempty baseUrls (e.g., IIS virtual directories) 2017-07-11 23:56:51 +01:00
Steve Sanderson
bb0727c34c Update templates to newer aspnet NPM dependencies 2017-07-11 23:56:42 +01:00
Steve Sanderson
4903e12373 Publish aspnet-webpack-react as 3.0.0 since it's no longer beta 2017-07-11 23:55:50 +01:00
Steve Sanderson
56c806b34e Update aspnet-prerendering to 3.0.1, fixing the default "fetch" base URL to match the real application root URL 2017-07-11 23:55:49 +01:00
Steve Sanderson
8acba88160 Update aspnet-webpack to 2.0.0, now supporting HMR when apps are running in virtual directories 2017-07-11 18:57:52 +01:00
Steve Sanderson
b434eefd83 Merge pull request #1108 from aspnet/rel/2.0.0
Fix webpack HMR proxying logic for apps running on non-root URLs (e.g…
2017-07-11 18:57:03 +01:00
Steve Sanderson
44f86eb8a1 Fix webpack HMR proxying logic for apps running on non-root URLs (e.g., full IIS virtual directories) 2017-07-11 18:56:06 +01:00
Steve Sanderson
44360b6955 Merge pull request #1103 from aspnet/rel/2.0.0
Merge Rel/2.0.0 back to dev
2017-07-11 10:45:58 +01:00
Steve Sanderson
22ff88ab79 On Windows, patch Node's lstat to work around permissions error. Fixes #1101 2017-07-11 10:43:31 +01:00
Steve Sanderson
70d89b9ad3 Update templates to ASP.NET Core 2.0.0-rtm-26190 and SDK 2.0.0-preview3-006729 2017-07-11 10:43:31 +01:00
Ryan Brandenburg
72b1e627b0 Skip first time experience on Appveyor 2017-07-10 15:22:22 -07:00
Pranav K
c94563c700 Updating KoreBuild branch 2017-07-10 11:57:58 -07:00
Pranav K
781c5dc37c Merge branch 'rel/2.0.0' into dev 2017-07-10 11:57:58 -07:00
Pranav K
814df07034 Branching for 2.0.0 rtm 2017-07-10 11:43:36 -07:00
Pranav K
c2f63f21fd Merge branch 'rel/2.0.0' into dev 2017-07-10 11:43:36 -07:00
Steve Sanderson
4492d3eab7 In Angular template, rename boot.client.ts to boot.browser.ts for consistency 2017-07-07 17:16:47 +01:00
Steve Sanderson
efdfbf5dbb In Yeoman templates, stop restoring using Yarn, since it doesn't respect npm-shrinkwrap.json 2017-07-07 15:27:50 +01:00
Steve Sanderson
6f2e408083 Update templates to ASP.NET Core 2.0.0-preview3-26096 2017-07-07 15:13:51 +01:00
Steve Sanderson
70a4a68e7c Update templates to ASP.NET Core 2.0.0-preview3-26071 2017-07-07 12:40:41 +01:00
Steve Sanderson
a1942edbfe Update AppVeyor build to use .NET Core SDK 2.0.0-preview3-006670 2017-07-07 12:31:52 +01:00
Ryan Brandenburg
fd90301b1c Remove NETStandard.Library.NETFramework 2017-07-07 12:25:04 +01:00
Steve Sanderson
58145d33e3 Update ReactRedux template dependencies 2017-07-07 12:08:01 +01:00
Steve Sanderson
2135c30d66 Update React template dependencies 2017-07-07 12:07:14 +01:00
Steve Sanderson
5191531813 Add AoT compilation to Angular template 2017-07-07 12:07:13 +01:00
Steve Sanderson
03018f32b7 Update Angular template dependencies 2017-07-07 12:07:13 +01:00
Steve Sanderson
e632d2bb69 In domain-task/fetch, only apply HTTPS cert validation workaround for HTTPS requests (not HTTP) 2017-07-07 12:07:12 +01:00
Pranav K
7a56f643f7 Update version suffix for 2.0.0 RTM release 2017-07-06 15:08:26 -07:00
Ryan Brandenburg
056b955f52 Set "TreatWarningsAsErrors" before NuGet restore
* Ensures our build stays clean of NuGet warnings
2017-07-06 14:17:52 -07:00
Nate McMaster
0993f46c9e React to aspnet/BuildTools#293
[ci skip]
2017-07-06 10:38:00 -07:00
Steve Sanderson
4ba0358987 In AngularSpa template, log any FetchData errors but don't stop prerendering completely 2017-07-06 14:57:07 +01:00
Steve Sanderson
721e3c45fd Update ReactReduxSpa NPM shrinkwrap file 2017-07-06 14:50:34 +01:00
Steve Sanderson
27f59c6971 Make domain-task/fetch disable HTTPS cert validation when making relative requests to own server as workaround for #1089 2017-07-06 14:37:29 +01:00
Steve Sanderson
943fb2f482 In core packages, set <None Remove="node_modules\**\*" /> otherwise builds are super slow 2017-07-06 11:13:24 +01:00
Pranav K
2c4bc302de Update LICENSE.txt text 2017-07-03 14:06:26 -07:00
Steve Sanderson
5b634ee33c Remove "dotnet restore" post-action from templates for consistency with stock Web templates 2017-07-03 15:50:16 +01:00
Steve Sanderson
6e1fa2b2c3 Use ASP.NET Core CI NuGet feed to enable access to preview3 packages 2017-07-03 14:44:40 +01:00
Steve Sanderson
f72ebbd66e Remove redundant preview2 NuGet feed reference 2017-07-03 14:33:44 +01:00
Steve Sanderson
391ceec559 Update AppVeyor build to use .NET Core 2.0 Preview 3 2017-07-03 14:32:43 +01:00
Steve Sanderson
076b5304e0 Update templates to reference latest preview3 packages 2017-07-03 14:32:29 +01:00
Steve Sanderson
94fc84a9b4 Add simpler prerendering API. Fixes #607 2017-07-03 14:07:28 +01:00
Steve Sanderson
9cce26ebd8 Fix error formatting by rebuilding entrypoint-http.js 2017-07-03 14:01:31 +01:00
Steve Sanderson
513d288a5d Use GetRandomFileName instead of GetTempFileName. Fixes #1060 2017-07-03 10:15:53 +01:00
Steve Sanderson
429a432dd3 Check for null during node process disposal. Fixes #1061 2017-07-03 10:09:03 +01:00
Ryan Brandenburg
76928bcbaa Remove AssetTargetFramework 2017-06-30 10:24:48 +01:00
Steve Sanderson
95c81729cd Merge branch 'rel/2.0.0-preview2-templates' into dev 2017-06-26 15:50:46 +01:00
Steve Sanderson
8352ec7626 Add npm-shrinkwrap.json files for all templates 2017-06-26 15:48:49 +01:00
Steve Sanderson
bebb82eb69 Implement longer-term fix for #1066 (see description)
- declare explict @types/react-router version
- update to newer aspnet-webpack-react that doesn't include the hacky
workaround since it's not needed if you have the correct
@types/react-router version

In a subsequent commit, will also shrinkwrap to pin all dependency
versions. This isn't strictly required right now, but is the right thing
for long-term stability.
2017-06-26 15:39:19 +01:00
Steve Sanderson
3da87dd3d3 In aspnet-webpack-react 3.0.0+, remove the workaround for #1066. Templates that use this version of the package need to not have issue #1066 2017-06-26 15:34:30 +01:00
Steve Sanderson
117c1a6cbd Implement workaround for #1066 2017-06-26 12:51:03 +01:00
Steve Sanderson
53f5a77490 Use preview2-final NuGet feed until it ships to NuGet.org 2017-06-23 12:08:14 +01:00
Steve Sanderson
5fd9b93324 Switch to a specific version of .NET Core 2.0 Preview 2, because "latest" is 2.1 and breaks. Fixes #1058 2017-06-23 11:59:34 +01:00
Scott Addie
55bd0c05cb Update SpaServices NuGet package installation requirement 2017-06-23 11:55:00 +01:00
Steve Sanderson
f9068819be Update AppVeyor builds to use latest .NET Core SDK. Fixes #1058 2017-06-23 11:52:09 +01:00
Steve Sanderson
85bf621386 Merge branch 'rel/2.0.0-preview2-templates' into dev 2017-06-22 16:39:58 +01:00
Steve Sanderson
328eb0451b Remove NETStandard.Library.NETFramework from the templates 2017-06-22 16:39:35 +01:00
Steve Sanderson
bc7742148e Remove duplicate var definition as in #966 2017-06-21 15:06:07 +01:00
Sławomir Rosiek
4ceed817a3 Removed unused methods 2017-06-20 16:33:34 +01:00
Sławomir Rosiek
6d85e752e1 Throwing HttpInvocationException instead of generic Exception 2017-06-20 16:33:34 +01:00
Steve Sanderson
460f202832 In React templates, update @types/react to 15.0.29 for compatibility with TypeScript 2.4.0+ 2017-06-20 15:17:19 +01:00
Steve Sanderson
73054559cc Bump aspnet-prerendering version to 2.0.6 for publish 2017-06-20 15:11:23 +01:00
Steve Sanderson
d007a3fbba In aspnet-prerendering, fix type definitions to satisfy TypeScript 2.4.0. Fixes #1034 2017-06-20 15:10:29 +01:00
Steve Sanderson
fcd3cf9dd7 In aspnet-prerendering, simply type imports/exports by changing PrerenderingInterfaces to a regular module (not a .d.ts file) 2017-06-20 15:07:11 +01:00
Sławomir Rosiek
48bca777fe Added "private": true to package.json 2017-06-19 15:04:42 +02:00
Steve Sanderson
aadcfa1fb7 Revert change that added preview2-final NuGet feed as it's not required in dev branch 2017-06-16 13:28:00 +02:00
Steve Sanderson
f683b39806 Merge branch 'rel/2.0.0-preview2' into dev 2017-06-16 13:26:56 +02:00
Kiran Challa
ed7e07a057 Updated to use latest netstandard.library 2017-06-15 05:20:36 -07:00
Kiran Challa
749d112b08 Merge branch 'rel/2.0.0-preview2' into dev-gate 2017-06-15 03:38:10 -07:00
Steve Sanderson
90922fb3c6 Merge branch 'rel/2.0.0-preview2' into dev 2017-06-13 22:06:55 +01:00
Nate McMaster
bcb359e355 Update System.Threading.Tasks.Dataflow dependency to latest corefx version 2017-06-13 11:37:59 -07:00
Steve Sanderson
a7a3a359fa Merge branch 'rel/2.0.0-preview2' into dev 2017-06-13 09:47:18 +01:00
Steve Sanderson
627da65692 Merge branch 'rel/2.0.0-preview2' into dev 2017-06-13 09:39:17 +01:00
Steve Sanderson
67ba6271a0 Merge branch 'rel/2.0.0-preview2' into dev 2017-06-11 20:33:23 +01:00
Steve Sanderson
32d9bdee6c Merge branch 'rel/2.0.0-preview2' into dev 2017-06-10 14:36:36 +01:00
Steve Sanderson
e50d4825c3 Merge branch 'rel/2.0.0-preview2' into dev 2017-06-08 11:44:53 +01:00
Steve Sanderson
06c94d54eb Merge branch 'rel/2.0.0-preview2' into dev 2017-06-08 11:15:48 +01:00
Steve Sanderson
3685592b01 Merge branch 'rel/2.0.0-preview2' into dev 2017-06-08 11:09:51 +01:00
Steve Sanderson
2acdad38cd Merge branch 'rel/2.0.0-preview2' into dev 2017-06-08 10:40:05 +01:00
Steve Sanderson
e331355c1d Merge branch 'rel/2.0.0-preview2' into dev 2017-06-07 22:57:37 +01:00
Steve Sanderson
d8c0bd8898 Merge branch 'rel/2.0.0-preview2' into dev 2017-06-07 15:02:29 +01:00
Josef Ottosson
0626bb1594 Update README.md
Fixes small typo.
2017-06-05 11:14:04 +01:00
Pranav K
d9e75c89ae Updating versions to preview3 2017-06-01 10:47:12 -07:00
Pranav K
ebe9d8ae92 Merge remote-tracking branch 'origin/rel/2.0.0-preview2' into dev 2017-05-31 20:00:38 -07:00
Pranav K
d42cd10975 Merge remote-tracking branch 'origin/rel/2.0.0-preview2' into dev 2017-05-31 19:44:28 -07:00
416 changed files with 2654 additions and 16167 deletions

21
.appveyor.yml Executable file
View File

@@ -0,0 +1,21 @@
init:
- git config --global core.autocrlf true
install:
- ps: Install-Product node 6.9.2 x64
branches:
only:
- master
- release
- dev
- /^(.*\/)?ci-.*$/
- /^rel\/.*/
build_script:
- ps: .\run.ps1 default-build
clone_depth: 1
environment:
global:
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true
DOTNET_CLI_TELEMETRY_OPTOUT: 1
test: off
deploy: off
os: Visual Studio 2017

13
.gitignore vendored
View File

@@ -24,19 +24,8 @@ nuget.exe
*.ncrunchsolution
*.*sdf
*.ipch
.vs/
npm-debug.log
/.build/
# The templates can't contain their own .gitignore files, because Yeoman has strange default handling for
# files with that name (https://github.com/npm/npm/issues/1862). So, each template instead has a template_gitignore
# file which gets renamed after the files are copied. And so any files that need to be excluded in the source
# repo have to be excluded here.
/templates/*/node_modules/
/templates/*/wwwroot/dist/
/templates/*/ClientApp/dist/
/templates/*/yarn.lock
.vscode/
/templates/*/Properties/launchSettings.json
global.json

15
Directory.Build.props Normal file
View File

@@ -0,0 +1,15 @@
<Project>
<Import Project="version.props" />
<Import Project="build\dependencies.props" />
<PropertyGroup>
<Product>Microsoft ASP.NET Core</Product>
<RepositoryUrl>https://github.com/aspnet/javascriptservices</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<RepositoryRoot>$(MSBuildThisFileDirectory)</RepositoryRoot>
<AssemblyOriginatorKeyFile>$(MSBuildThisFileDirectory)build\Key.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
<PublicSign Condition="'$(OS)' != 'Windows_NT'">true</PublicSign>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
</Project>

5
Directory.Build.targets Normal file
View File

@@ -0,0 +1,5 @@
<Project>
<PropertyGroup>
<RuntimeFrameworkVersion Condition=" '$(TargetFramework)' == 'netcoreapp2.0' ">$(MicrosoftNETCoreApp20PackageVersion)</RuntimeFrameworkVersion>
</PropertyGroup>
</Project>

View File

@@ -1,15 +1,12 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
VisualStudioVersion = 15.0.26430.4
MinimumVisualStudioVersion = 10.0.40219.1
VisualStudioVersion = 15.0.27019.1
MinimumVisualStudioVersion = 15.0.26730.03
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{27304DDE-AFB2-4F8B-B765-E3E2F11E886C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.NodeServices", "src\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj", "{66B77203-1469-41DF-92F2-2BE6900BD36F}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.NodeServices.Sockets", "src\Microsoft.AspNetCore.NodeServices.Sockets\Microsoft.AspNetCore.NodeServices.Sockets.csproj", "{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.SpaServices", "src\Microsoft.AspNetCore.SpaServices\Microsoft.AspNetCore.SpaServices.csproj", "{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}"
ProjectSection(SolutionItems) = preProject
src\Directory.Build.props = src\Directory.Build.props
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "misc", "misc", "{99EAF1FE-22C8-4526-BE78-74B24125D37F}"
ProjectSection(SolutionItems) = preProject
@@ -18,65 +15,32 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "misc", "misc", "{99EAF1FE-2
README.md = README.md
EndProjectSection
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "samples", "samples", "{23836492-E7F4-4376-85BF-A635C304AC46}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "misc", "misc", "{E6A161EA-646C-4033-9090-95BE809AB8D9}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LatencyTest", "samples\misc\LatencyTest\LatencyTest.csproj", "{1931B19A-EC42-4D56-B2D0-FB06D17244DA}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Webpack", "samples\misc\Webpack\Webpack.csproj", "{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "NodeServicesExamples", "samples\misc\NodeServicesExamples\NodeServicesExamples.csproj", "{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "templates", "templates", "{1598B415-73F1-4B37-B3B4-0A10677ABB2D}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "build", "build", "{E415FE14-13B0-469F-836D-95059E6BAA6E}"
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{645F7363-1240-4FB6-9422-B32A327C979F}"
ProjectSection(SolutionItems) = preProject
src\build\common.props = src\build\common.props
src\build\Key.snk = src\build\Key.snk
Directory.Build.props = Directory.Build.props
Directory.Build.targets = Directory.Build.targets
EndProjectSection
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Microsoft.AspNetCore.SpaServices.Extensions", "src\Microsoft.AspNetCore.SpaServices.Extensions\Microsoft.AspNetCore.SpaServices.Extensions.csproj", "{D40BD1C4-6A6F-4213-8535-1057F3EB3400}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Debug|Any CPU.Build.0 = Debug|Any CPU
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{66B77203-1469-41DF-92F2-2BE6900BD36F}.Release|Any CPU.Build.0 = Release|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3}.Release|Any CPU.Build.0 = Release|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Debug|Any CPU.Build.0 = Debug|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Release|Any CPU.ActiveCfg = Release|Any CPU
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC}.Release|Any CPU.Build.0 = Release|Any CPU
{1931B19A-EC42-4D56-B2D0-FB06D17244DA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1931B19A-EC42-4D56-B2D0-FB06D17244DA}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1931B19A-EC42-4D56-B2D0-FB06D17244DA}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1931B19A-EC42-4D56-B2D0-FB06D17244DA}.Release|Any CPU.Build.0 = Release|Any CPU
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}.Debug|Any CPU.Build.0 = Debug|Any CPU
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}.Release|Any CPU.ActiveCfg = Release|Any CPU
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344}.Release|Any CPU.Build.0 = Release|Any CPU
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE}.Release|Any CPU.Build.0 = Release|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Debug|Any CPU.Build.0 = Debug|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Release|Any CPU.ActiveCfg = Release|Any CPU
{D40BD1C4-6A6F-4213-8535-1057F3EB3400}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(NestedProjects) = preSolution
{66B77203-1469-41DF-92F2-2BE6900BD36F} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
{F46DEF99-6FAA-4406-B5D8-6FF34EF669E3} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
{66B071A8-EFC8-4A06-BEF6-06B99AE27EEC} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
{E6A161EA-646C-4033-9090-95BE809AB8D9} = {23836492-E7F4-4376-85BF-A635C304AC46}
{1931B19A-EC42-4D56-B2D0-FB06D17244DA} = {E6A161EA-646C-4033-9090-95BE809AB8D9}
{DE479DC3-1461-4EAD-A188-4AF7AA4AE344} = {E6A161EA-646C-4033-9090-95BE809AB8D9}
{93EFCC5F-C6EE-4623-894F-A42B22C0B6FE} = {E6A161EA-646C-4033-9090-95BE809AB8D9}
{D40BD1C4-6A6F-4213-8535-1057F3EB3400} = {27304DDE-AFB2-4F8B-B765-E3E2F11E886C}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {DDF59B0D-2DEC-45D6-8667-DCB767487101}
EndGlobalSection
EndGlobal

View File

@@ -1,10 +1,12 @@
Copyright (c) .NET Foundation. All rights reserved.
Copyright (c) .NET Foundation and Contributors
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
these files except in compliance with the License. You may obtain a copy of the
this file except in compliance with the License. You may obtain a copy of the
License at
http://www.apache.org/licenses/LICENSE-2.0
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR

View File

@@ -2,7 +2,8 @@
<configuration>
<packageSources>
<clear />
<add key="AspNetCore" value="https://dotnet.myget.org/F/aspnetcore-master/api/v3/index.json" />
<add key="AspNetCore" value="https://dotnet.myget.org/F/aspnetcore-ci-dev/api/v3/index.json" />
<add key="AspNetCoreTools" value="https://dotnet.myget.org/F/aspnetcore-tools/api/v3/index.json" />
<add key="NuGet" value="https://api.nuget.org/v3/index.json" />
</packageSources>
</configuration>
</configuration>

101
README.md
View File

@@ -1,100 +1 @@
# JavaScriptServices
AppVeyor: [![AppVeyor](https://ci.appveyor.com/api/projects/status/gprilrckx116vc9m/branch/dev?svg=true)](https://ci.appveyor.com/project/aspnetci/javascriptservices/branch/dev)
This project is part of ASP.NET Core. You can find samples, documentation and getting started instructions for ASP.NET Core at the [Home](https://github.com/aspnet/home) repo.
## What is this?
`JavaScriptServices` is a set of client-side technologies for ASP.NET Core. It provides infrastructure that you'll find useful if you:
- Use Angular / React / Vue / Aurelia / Knockout / etc.
- Build your client-side resources using Webpack.
- Execute JavaScript on the server at runtime.
Read [Building Single Page Applications on ASP.NET Core with JavaScriptServices](https://blogs.msdn.microsoft.com/webdev/2017/02/14/building-single-page-applications-on-asp-net-core-with-javascriptservices/) for more details.
This repo contains:
* A set of NuGet/NPM packages that implement functionality for:
* Invoking arbitrary NPM packages at runtime from .NET code ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.NodeServices#simple-usage-example))
* Server-side prerendering of SPA components ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#server-side-prerendering))
* Webpack dev middleware ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#webpack-dev-middleware))
* Hot module replacement (HMR) ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#webpack-hot-module-replacement))
* Server-side and client-side routing integration ([docs](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#routing-helper-mapspafallbackroute))
* Server-side and client-side validation integration
* "Lazy loading" for Knockout apps
* A Yeoman generator that creates preconfigured app starting points ([guide](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/))
* Samples and docs
It's cross-platform (Windows, Linux, or macOS) and works with .NET Core 1.0.1 or later.
## Creating new applications
If you want to build a brand-new ASP.NET Core app that uses Angular / React / Knockout on the client, consider starting with the `aspnetcore-spa` generator. This lets you choose your client-side framework. It generates a starting point that includes applicable features such as Webpack dev middleware, server-side prerendering, and efficient production builds. It's much easier than configuring everything to work together manually!
To do this, install Yeoman and these generator templates:
npm install -g yo generator-aspnetcore-spa
Generate your new application starting point:
cd some-empty-directory
yo aspnetcore-spa
Once the generator has run and restored all the dependencies, you can start up your new ASP.NET Core SPA:
dotnet run
For a more detailed walkthrough, see [getting started with the `aspnetcore-spa` generator](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/).
## Adding to existing applications
If you have an existing ASP.NET Core application, or if you just want to use the underlying JavaScriptServices packages directly, you can install these packages using NuGet and NPM:
* `Microsoft.AspNetCore.NodeServices`
* This provides a fast and robust way for .NET code to run JavaScript on the server inside a Node.js environment. You can use this to consume arbitrary functionality from NPM packages at runtime in your ASP.NET Core app.
* Most applications developers don't need to use this directly, but you can do so if you want to implement your own functionality that involves calling Node.js code from .NET at runtime.
* Find [documentation and usage examples here](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.NodeServices#microsoftaspnetcorenodeservices).
* `Microsoft.AspNetCore.SpaServices`
* This provides infrastructure that's generally useful when building Single Page Applications (SPAs) with technologies such as Angular or React (for example, server-side prerendering and webpack middleware). Internally, it uses the `NodeServices` package to implement its features.
* Find [documentation and usage examples here](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.SpaServices#microsoftaspnetcorespaservices).
* `Microsoft.AspNetCore.AngularServices`
* This builds on the `SpaServices` package and includes features specific to Angular. Currently, this includes validation helpers.
* The code is [here](https://github.com/aspnet/JavaScriptServices/tree/dev/src/Microsoft.AspNetCore.AngularServices). You'll find a usage example for [the validation helper here](https://github.com/aspnet/JavaScriptServices/blob/dev/samples/angular/MusicStore/wwwroot/ng-app/components/admin/album-edit/album-edit.ts).
There was previously a `Microsoft.AspNetCore.ReactServices` but this is not currently needed - all applicable functionality is in `Microsoft.AspNetCore.SpaServices`, because it's sufficiently general. We might add a new `Microsoft.AspNetCore.ReactServices` package in the future if new React-specific requirements emerge.
If you want to build a helper library for some other SPA framework, you can do so by taking a dependency on `Microsoft.AspNetCore.SpaServices` and wrapping its functionality in whatever way is most useful for your SPA framework.
## Samples and templates
Inside this repo, [the `templates` directory](https://github.com/aspnet/JavaScriptServices/tree/dev/templates) contains the application starting points that the `aspnetcore-spa` generator emits. You can clone this repo and run those applications directly. But it's easier to [use the Yeoman tool to run the generator](http://blog.stevensanderson.com/2016/05/02/angular2-react-knockout-apps-on-aspnet-core/).
The [`samples` directory](https://github.com/aspnet/JavaScriptServices/tree/dev/samples) contains examples of:
- Using the JavaScript services family of packages with Angular and React.
- A standalone `NodeServices` usage for runtime code transpilation and image processing.
**To run the samples:**
* Clone this repo
* At the repo's root directory (the one containing `src`, `samples`, etc.), run `dotnet restore`
* Change directory to the sample you want to run (for example, `cd samples/angular/MusicStore`)
* Restore Node dependencies by running `npm install`
* If you're trying to run the Angular "Music Store" sample, then also run `gulp` (which you need to have installed globally). None of the other samples require this.
* Run the application (`dotnet run`)
* Browse to [http://localhost:5000](http://localhost:5000)
## Contributing
If you're interested in contributing to the various packages, samples, and project templates in this repo, that's great! You can run the code in this repo as follows:
* Clone the repo
* Run `dotnet restore` at the repo root dir
* Go to whatever sample or template you want to run (for example, `cd templates/AngularSpa`)
* Restore NPM dependencies (run `npm install`)
* If the sample/template you're trying to run has a file called `webpack.config.vendor.js` at its root, run `webpack --config webpack.config.vendor.js`. It it has a file called `webpack.config.js`, run `webpack` (no args). You might need to install webpack first, by running `npm install -g webpack`.
* Launch it (`dotnet run`)
If you're planning to submit a pull request, and if it's more than a trivial fix (for example, for a typo), it's usually a good idea first to file an issue describing what you're proposing to do and how it will work. Then you can find out if it's likely that such a pull request will be accepted, and how it fits into wider ongoing plans.
This branch exists only to build `Microsoft.AspNetCore.SpaServices.Extensions` for an OOB release. It does not contain sources for any other packages or samples.

View File

@@ -1,40 +0,0 @@
init:
- git config --global core.autocrlf true
install:
- ps: Install-Product node 6.9.2 x64
# .NET Core SDK binaries
# Download .NET Core 2.0 Preview 1 SDK and add to PATH
- ps: $urlCurrent = "https://download.microsoft.com/download/0/6/5/0656B047-5F2F-4281-A851-F30776F8616D/dotnet-dev-win-x64.2.0.0-preview1-005977.zip"
- ps: $env:DOTNET_INSTALL_DIR = "$pwd\.dotnetsdk"
- ps: mkdir $env:DOTNET_INSTALL_DIR -Force | Out-Null
- ps: $tempFileCurrent = [System.IO.Path]::GetTempFileName()
- ps: (New-Object System.Net.WebClient).DownloadFile($urlCurrent, $tempFileCurrent)
- ps: Add-Type -AssemblyName System.IO.Compression.FileSystem; [System.IO.Compression.ZipFile]::ExtractToDirectory($tempFileCurrent, $env:DOTNET_INSTALL_DIR)
- ps: $env:Path = "$env:DOTNET_INSTALL_DIR;$env:Path"
build_script:
- ps: Push-Location
- cd templates/package-builder
- npm install
- npm run build
- ps: Pop-Location
artifacts:
- path: templates\package-builder\dist\artifacts\generator-aspnetcore-spa.tar.gz
name: generator-aspnetcore-spa
- path: templates\package-builder\dist\artifacts\*.nupkg
name: Microsoft.AspNetCore.SpaTemplates
type: NuGetPackage
# - ps: .\build.ps1
clone_depth: 1
test_script:
- dotnet restore
- ps: Push-Location
- cd test
- npm install selenium-standalone
- ps: Start-Process node './start-selenium.js'
- npm install
- npm test
on_finish :
- ps: Pop-Location
# After running tests, upload results to Appveyor
- ps: (new-object net.webclient).UploadFile("https://ci.appveyor.com/api/testresults/junit/$($env:APPVEYOR_JOB_ID)", (Resolve-Path .\test\tmp\junit\*.xml))
deploy: off

View File

@@ -1,2 +1,2 @@
@ECHO OFF
PowerShell -NoProfile -NoLogo -ExecutionPolicy unrestricted -Command "[System.Threading.Thread]::CurrentThread.CurrentCulture = ''; [System.Threading.Thread]::CurrentThread.CurrentUICulture = '';& '%~dp0build.ps1' %*; exit $LASTEXITCODE"
PowerShell -NoProfile -NoLogo -ExecutionPolicy unrestricted -Command "[System.Threading.Thread]::CurrentThread.CurrentCulture = ''; [System.Threading.Thread]::CurrentThread.CurrentUICulture = '';& '%~dp0run.ps1' default-build %*; exit $LASTEXITCODE"

View File

@@ -1,67 +0,0 @@
$ErrorActionPreference = "Stop"
function DownloadWithRetry([string] $url, [string] $downloadLocation, [int] $retries)
{
while($true)
{
try
{
Invoke-WebRequest $url -OutFile $downloadLocation
break
}
catch
{
$exceptionMessage = $_.Exception.Message
Write-Host "Failed to download '$url': $exceptionMessage"
if ($retries -gt 0) {
$retries--
Write-Host "Waiting 10 seconds before retrying. Retries left: $retries"
Start-Sleep -Seconds 10
}
else
{
$exception = $_.Exception
throw $exception
}
}
}
}
cd $PSScriptRoot
$repoFolder = $PSScriptRoot
$env:REPO_FOLDER = $repoFolder
$koreBuildZip="https://github.com/aspnet/KoreBuild/archive/rel/2.0.0-preview2.zip"
if ($env:KOREBUILD_ZIP)
{
$koreBuildZip=$env:KOREBUILD_ZIP
}
$buildFolder = ".build"
$buildFile="$buildFolder\KoreBuild.ps1"
if (!(Test-Path $buildFolder)) {
Write-Host "Downloading KoreBuild from $koreBuildZip"
$tempFolder=$env:TEMP + "\KoreBuild-" + [guid]::NewGuid()
New-Item -Path "$tempFolder" -Type directory | Out-Null
$localZipFile="$tempFolder\korebuild.zip"
DownloadWithRetry -url $koreBuildZip -downloadLocation $localZipFile -retries 6
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Compression.ZipFile]::ExtractToDirectory($localZipFile, $tempFolder)
New-Item -Path "$buildFolder" -Type directory | Out-Null
copy-item "$tempFolder\**\build\*" $buildFolder -Recurse
# Cleanup
if (Test-Path $tempFolder) {
Remove-Item -Recurse -Force $tempFolder
}
}
&"$buildFile" @args

View File

@@ -1,46 +1,8 @@
#!/usr/bin/env bash
repoFolder="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $repoFolder
koreBuildZip="https://github.com/aspnet/KoreBuild/archive/rel/2.0.0-preview2.zip"
if [ ! -z $KOREBUILD_ZIP ]; then
koreBuildZip=$KOREBUILD_ZIP
fi
set -euo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
buildFolder=".build"
buildFile="$buildFolder/KoreBuild.sh"
if test ! -d $buildFolder; then
echo "Downloading KoreBuild from $koreBuildZip"
tempFolder="/tmp/KoreBuild-$(uuidgen)"
mkdir $tempFolder
localZipFile="$tempFolder/korebuild.zip"
retries=6
until (wget -O $localZipFile $koreBuildZip 2>/dev/null || curl -o $localZipFile --location $koreBuildZip 2>/dev/null)
do
echo "Failed to download '$koreBuildZip'"
if [ "$retries" -le 0 ]; then
exit 1
fi
retries=$((retries - 1))
echo "Waiting 10 seconds before retrying. Retries left: $retries"
sleep 10s
done
unzip -q -d $tempFolder $localZipFile
mkdir $buildFolder
cp -r $tempFolder/**/build/** $buildFolder
chmod +x $buildFile
# Cleanup
if test -d $tempFolder; then
rm -rf $tempFolder
fi
fi
$buildFile -r $repoFolder "$@"
# Call "sync" between "chmod" and execution to prevent "text file busy" error in Docker (aufs)
chmod +x "$DIR/run.sh"; sync
"$DIR/run.sh" default-build "$@"

15
build/Push.targets Normal file
View File

@@ -0,0 +1,15 @@
<Project>
<Target Name="Push" DependsOnTargets="_PushNuGet" />
<ItemGroup>
<PackagesToPush Include="$(BuildDir)*.nupkg" />
</ItemGroup>
<Target Name="_PushNuGet" Condition="@(PackagesToPush->Count()) != 0">
<Error Text="Missing required property: NuGetPublishFeed" Condition=" '$(NuGetPublishFeed)' == '' "/>
<PushNuGetPackages
Packages="@(PackagesToPush)"
Feed="$(NuGetPublishFeed)"
ApiKey="$(APIKey)" />
</Target>
</Project>

View File

@@ -1,23 +0,0 @@
<Project>
<Import Project="dependencies.props" />
<Import Project="..\version.props" />
<PropertyGroup>
<Product>Microsoft ASP.NET Core</Product>
<RepositoryUrl>https://github.com/aspnet/javascriptservices</RepositoryUrl>
<RepositoryType>git</RepositoryType>
<AssemblyOriginatorKeyFile>$(MSBuildThisFileDirectory)Key.snk</AssemblyOriginatorKeyFile>
<SignAssembly>true</SignAssembly>
<PublicSign Condition="'$(OS)' != 'Windows_NT'">true</PublicSign>
<VersionSuffix Condition="'$(VersionSuffix)'!='' AND '$(BuildNumber)' != ''">$(VersionSuffix)-$(BuildNumber)</VersionSuffix>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Internal.AspNetCore.Sdk" Version="$(InternalAspNetCoreSdkVersion)" PrivateAssets="All" />
</ItemGroup>
<ItemGroup Condition="'$(TargetFrameworkIdentifier)'=='.NETFramework'">
<PackageReference Include="NETStandard.Library" Version="$(BundledNETStandardPackageVersion)" />
<PackageReference Include="NETStandard.Library.NETFramework" Version="$(NETStandardLibraryNETFrameworkVersion)" />
</ItemGroup>
</Project>

View File

@@ -1,12 +1,27 @@
<Project>
<PropertyGroup>
<AspNetCoreVersion>2.0.0-preview2-*</AspNetCoreVersion>
<AutoMapperVersion>5.0.2</AutoMapperVersion>
<InternalAspNetCoreSdkVersion>2.1.0-*</InternalAspNetCoreSdkVersion>
<JsonNetVersion>10.0.1</JsonNetVersion>
<NETStandardImplicitPackageVersion>$(BundledNETStandardPackageVersion)</NETStandardImplicitPackageVersion>
<NETStandardLibraryNETFrameworkVersion>2.0.0-*</NETStandardLibraryNETFrameworkVersion>
<RuntimeFrameworkVersion Condition="'$(TargetFramework)'=='netcoreapp2.0'">2.0.0-*</RuntimeFrameworkVersion>
<ThreadingDataflowVersion>4.7.0</ThreadingDataflowVersion>
<MSBuildAllProjects>$(MSBuildAllProjects);$(MSBuildThisFileFullPath)</MSBuildAllProjects>
</PropertyGroup>
<PropertyGroup Label="Package Versions">
<InternalAspNetCoreSdkPackageVersion>2.1.0-preview1-15549</InternalAspNetCoreSdkPackageVersion>
<MicrosoftAspNetCoreDiagnosticsPackageVersion>2.0.1</MicrosoftAspNetCoreDiagnosticsPackageVersion>
<MicrosoftAspNetCoreHostingAbstractionsPackageVersion>2.0.1</MicrosoftAspNetCoreHostingAbstractionsPackageVersion>
<MicrosoftAspNetCoreHostingPackageVersion>2.0.1</MicrosoftAspNetCoreHostingPackageVersion>
<MicrosoftAspNetCoreMvcPackageVersion>2.0.1</MicrosoftAspNetCoreMvcPackageVersion>
<MicrosoftAspNetCoreMvcTagHelpersPackageVersion>2.0.1</MicrosoftAspNetCoreMvcTagHelpersPackageVersion>
<MicrosoftAspNetCoreMvcViewFeaturesPackageVersion>2.0.1</MicrosoftAspNetCoreMvcViewFeaturesPackageVersion>
<MicrosoftAspNetCoreServerIISIntegrationPackageVersion>2.0.1</MicrosoftAspNetCoreServerIISIntegrationPackageVersion>
<MicrosoftAspNetCoreServerKestrelPackageVersion>2.0.1</MicrosoftAspNetCoreServerKestrelPackageVersion>
<MicrosoftAspNetCoreSpaServicesPackageVersion>2.0.1</MicrosoftAspNetCoreSpaServicesPackageVersion>
<MicrosoftAspNetCoreStaticFilesPackageVersion>2.0.1</MicrosoftAspNetCoreStaticFilesPackageVersion>
<MicrosoftAspNetCoreWebSocketsPackageVersion>2.0.1</MicrosoftAspNetCoreWebSocketsPackageVersion>
<MicrosoftExtensionsDependencyInjectionPackageVersion>2.0.0</MicrosoftExtensionsDependencyInjectionPackageVersion>
<MicrosoftExtensionsFileProvidersPhysicalPackageVersion>2.0.0</MicrosoftExtensionsFileProvidersPhysicalPackageVersion>
<MicrosoftExtensionsLoggingConsolePackageVersion>2.0.0</MicrosoftExtensionsLoggingConsolePackageVersion>
<MicrosoftExtensionsLoggingDebugPackageVersion>2.0.0</MicrosoftExtensionsLoggingDebugPackageVersion>
<MicrosoftNETCoreApp20PackageVersion>2.0.0</MicrosoftNETCoreApp20PackageVersion>
<NewtonsoftJsonPackageVersion>10.0.1</NewtonsoftJsonPackageVersion>
<SystemThreadingTasksDataflowPackageVersion>4.8.0</SystemThreadingTasksDataflowPackageVersion>
</PropertyGroup>
<Import Project="$(DotNetPackageVersionPropsPath)" Condition=" '$(DotNetPackageVersionPropsPath)' != '' " />
</Project>

7
build/repo.props Normal file
View File

@@ -0,0 +1,7 @@
<Project>
<PropertyGroup>
<!-- These properties are use by the automation that updates dependencies.props -->
<LineupPackageId>Internal.AspNetCore.Universe.Lineup</LineupPackageId>
<LineupPackageRestoreSource>https://dotnet.myget.org/F/aspnetcore-ci-dev/api/v3/index.json</LineupPackageRestoreSource>
</PropertyGroup>
</Project>

3
build/repo.targets Normal file
View File

@@ -0,0 +1,3 @@
<Project>
<Import Project="Push.targets" />
</Project>

2
korebuild-lock.txt Normal file
View File

@@ -0,0 +1,2 @@
version:2.1.0-preview1-15549
commithash:f570e08585fec510dd60cd4bfe8795388b757a95

10
korebuild.json Normal file
View File

@@ -0,0 +1,10 @@
{
"$schema": "https://raw.githubusercontent.com/aspnet/BuildTools/dev/tools/korebuild.schema.json",
"channel": "dev",
"toolsets": {
"nodejs": {
"required": true,
"minVersion": "6.9"
}
}
}

2
run.cmd Normal file
View File

@@ -0,0 +1,2 @@
@ECHO OFF
PowerShell -NoProfile -NoLogo -ExecutionPolicy unrestricted -Command "[System.Threading.Thread]::CurrentThread.CurrentCulture = ''; [System.Threading.Thread]::CurrentThread.CurrentUICulture = '';& '%~dp0run.ps1' %*; exit $LASTEXITCODE"

191
run.ps1 Normal file
View File

@@ -0,0 +1,191 @@
#!/usr/bin/env powershell
#requires -version 4
<#
.SYNOPSIS
Executes KoreBuild commands.
.DESCRIPTION
Downloads korebuild if required. Then executes the KoreBuild command. To see available commands, execute with `-Command help`.
.PARAMETER Command
The KoreBuild command to run.
.PARAMETER Path
The folder to build. Defaults to the folder containing this script.
.PARAMETER Channel
The channel of KoreBuild to download. Overrides the value from the config file.
.PARAMETER DotNetHome
The directory where .NET Core tools will be stored.
.PARAMETER ToolsSource
The base url where build tools can be downloaded. Overrides the value from the config file.
.PARAMETER Update
Updates KoreBuild to the latest version even if a lock file is present.
.PARAMETER ConfigFile
The path to the configuration file that stores values. Defaults to korebuild.json.
.PARAMETER Arguments
Arguments to be passed to the command
.NOTES
This function will create a file $PSScriptRoot/korebuild-lock.txt. This lock file can be committed to source, but does not have to be.
When the lockfile is not present, KoreBuild will create one using latest available version from $Channel.
The $ConfigFile is expected to be an JSON file. It is optional, and the configuration values in it are optional as well. Any options set
in the file are overridden by command line parameters.
.EXAMPLE
Example config file:
```json
{
"$schema": "https://raw.githubusercontent.com/aspnet/BuildTools/dev/tools/korebuild.schema.json",
"channel": "dev",
"toolsSource": "https://aspnetcore.blob.core.windows.net/buildtools"
}
```
#>
[CmdletBinding(PositionalBinding = $false)]
param(
[Parameter(Mandatory=$true, Position = 0)]
[string]$Command,
[string]$Path = $PSScriptRoot,
[Alias('c')]
[string]$Channel,
[Alias('d')]
[string]$DotNetHome,
[Alias('s')]
[string]$ToolsSource,
[Alias('u')]
[switch]$Update,
[string]$ConfigFile,
[Parameter(ValueFromRemainingArguments = $true)]
[string[]]$Arguments
)
Set-StrictMode -Version 2
$ErrorActionPreference = 'Stop'
#
# Functions
#
function Get-KoreBuild {
$lockFile = Join-Path $Path 'korebuild-lock.txt'
if (!(Test-Path $lockFile) -or $Update) {
Get-RemoteFile "$ToolsSource/korebuild/channels/$Channel/latest.txt" $lockFile
}
$version = Get-Content $lockFile | Where-Object { $_ -like 'version:*' } | Select-Object -first 1
if (!$version) {
Write-Error "Failed to parse version from $lockFile. Expected a line that begins with 'version:'"
}
$version = $version.TrimStart('version:').Trim()
$korebuildPath = Join-Paths $DotNetHome ('buildtools', 'korebuild', $version)
if (!(Test-Path $korebuildPath)) {
Write-Host -ForegroundColor Magenta "Downloading KoreBuild $version"
New-Item -ItemType Directory -Path $korebuildPath | Out-Null
$remotePath = "$ToolsSource/korebuild/artifacts/$version/korebuild.$version.zip"
try {
$tmpfile = Join-Path ([IO.Path]::GetTempPath()) "KoreBuild-$([guid]::NewGuid()).zip"
Get-RemoteFile $remotePath $tmpfile
if (Get-Command -Name 'Expand-Archive' -ErrorAction Ignore) {
# Use built-in commands where possible as they are cross-plat compatible
Expand-Archive -Path $tmpfile -DestinationPath $korebuildPath
}
else {
# Fallback to old approach for old installations of PowerShell
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Compression.ZipFile]::ExtractToDirectory($tmpfile, $korebuildPath)
}
}
catch {
Remove-Item -Recurse -Force $korebuildPath -ErrorAction Ignore
throw
}
finally {
Remove-Item $tmpfile -ErrorAction Ignore
}
}
return $korebuildPath
}
function Join-Paths([string]$path, [string[]]$childPaths) {
$childPaths | ForEach-Object { $path = Join-Path $path $_ }
return $path
}
function Get-RemoteFile([string]$RemotePath, [string]$LocalPath) {
if ($RemotePath -notlike 'http*') {
Copy-Item $RemotePath $LocalPath
return
}
$retries = 10
while ($retries -gt 0) {
$retries -= 1
try {
Invoke-WebRequest -UseBasicParsing -Uri $RemotePath -OutFile $LocalPath
return
}
catch {
Write-Verbose "Request failed. $retries retries remaining"
}
}
Write-Error "Download failed: '$RemotePath'."
}
#
# Main
#
# Load configuration or set defaults
$Path = Resolve-Path $Path
if (!$ConfigFile) { $ConfigFile = Join-Path $Path 'korebuild.json' }
if (Test-Path $ConfigFile) {
try {
$config = Get-Content -Raw -Encoding UTF8 -Path $ConfigFile | ConvertFrom-Json
if ($config) {
if (!($Channel) -and (Get-Member -Name 'channel' -InputObject $config)) { [string] $Channel = $config.channel }
if (!($ToolsSource) -and (Get-Member -Name 'toolsSource' -InputObject $config)) { [string] $ToolsSource = $config.toolsSource}
}
} catch {
Write-Warning "$ConfigFile could not be read. Its settings will be ignored."
Write-Warning $Error[0]
}
}
if (!$DotNetHome) {
$DotNetHome = if ($env:DOTNET_HOME) { $env:DOTNET_HOME } `
elseif ($env:USERPROFILE) { Join-Path $env:USERPROFILE '.dotnet'} `
elseif ($env:HOME) {Join-Path $env:HOME '.dotnet'}`
else { Join-Path $PSScriptRoot '.dotnet'}
}
if (!$Channel) { $Channel = 'dev' }
if (!$ToolsSource) { $ToolsSource = 'https://aspnetcore.blob.core.windows.net/buildtools' }
# Execute
$korebuildPath = Get-KoreBuild
Import-Module -Force -Scope Local (Join-Path $korebuildPath 'KoreBuild.psd1')
try {
Set-KoreBuildSettings -ToolsSource $ToolsSource -DotNetHome $DotNetHome -RepoPath $Path -ConfigFile $ConfigFile
Invoke-KoreBuildCommand $Command @Arguments
}
finally {
Remove-Module 'KoreBuild' -ErrorAction Ignore
}

223
run.sh Executable file
View File

@@ -0,0 +1,223 @@
#!/usr/bin/env bash
set -euo pipefail
#
# variables
#
RESET="\033[0m"
RED="\033[0;31m"
YELLOW="\033[0;33m"
MAGENTA="\033[0;95m"
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
[ -z "${DOTNET_HOME:-}" ] && DOTNET_HOME="$HOME/.dotnet"
verbose=false
update=false
repo_path="$DIR"
channel=''
tools_source=''
#
# Functions
#
__usage() {
echo "Usage: $(basename "${BASH_SOURCE[0]}") command [options] [[--] <Arguments>...]"
echo ""
echo "Arguments:"
echo " command The command to be run."
echo " <Arguments>... Arguments passed to the command. Variable number of arguments allowed."
echo ""
echo "Options:"
echo " --verbose Show verbose output."
echo " -c|--channel <CHANNEL> The channel of KoreBuild to download. Overrides the value from the config file.."
echo " --config-file <FILE> The path to the configuration file that stores values. Defaults to korebuild.json."
echo " -d|--dotnet-home <DIR> The directory where .NET Core tools will be stored. Defaults to '\$DOTNET_HOME' or '\$HOME/.dotnet."
echo " --path <PATH> The directory to build. Defaults to the directory containing the script."
echo " -s|--tools-source|-ToolsSource <URL> The base url where build tools can be downloaded. Overrides the value from the config file."
echo " -u|--update Update to the latest KoreBuild even if the lock file is present."
echo ""
echo "Description:"
echo " This function will create a file \$DIR/korebuild-lock.txt. This lock file can be committed to source, but does not have to be."
echo " When the lockfile is not present, KoreBuild will create one using latest available version from \$channel."
if [[ "${1:-}" != '--no-exit' ]]; then
exit 2
fi
}
get_korebuild() {
local version
local lock_file="$repo_path/korebuild-lock.txt"
if [ ! -f "$lock_file" ] || [ "$update" = true ]; then
__get_remote_file "$tools_source/korebuild/channels/$channel/latest.txt" "$lock_file"
fi
version="$(grep 'version:*' -m 1 "$lock_file")"
if [[ "$version" == '' ]]; then
__error "Failed to parse version from $lock_file. Expected a line that begins with 'version:'"
return 1
fi
version="$(echo "${version#version:}" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//')"
local korebuild_path="$DOTNET_HOME/buildtools/korebuild/$version"
{
if [ ! -d "$korebuild_path" ]; then
mkdir -p "$korebuild_path"
local remote_path="$tools_source/korebuild/artifacts/$version/korebuild.$version.zip"
tmpfile="$(mktemp)"
echo -e "${MAGENTA}Downloading KoreBuild ${version}${RESET}"
if __get_remote_file "$remote_path" "$tmpfile"; then
unzip -q -d "$korebuild_path" "$tmpfile"
fi
rm "$tmpfile" || true
fi
source "$korebuild_path/KoreBuild.sh"
} || {
if [ -d "$korebuild_path" ]; then
echo "Cleaning up after failed installation"
rm -rf "$korebuild_path" || true
fi
return 1
}
}
__error() {
echo -e "${RED}error: $*${RESET}" 1>&2
}
__warn() {
echo -e "${YELLOW}warning: $*${RESET}"
}
__machine_has() {
hash "$1" > /dev/null 2>&1
return $?
}
__get_remote_file() {
local remote_path=$1
local local_path=$2
if [[ "$remote_path" != 'http'* ]]; then
cp "$remote_path" "$local_path"
return 0
fi
local failed=false
if __machine_has wget; then
wget --tries 10 --quiet -O "$local_path" "$remote_path" || failed=true
else
failed=true
fi
if [ "$failed" = true ] && __machine_has curl; then
failed=false
curl --retry 10 -sSL -f --create-dirs -o "$local_path" "$remote_path" || failed=true
fi
if [ "$failed" = true ]; then
__error "Download failed: $remote_path" 1>&2
return 1
fi
}
#
# main
#
command="${1:-}"
shift
while [[ $# -gt 0 ]]; do
case $1 in
-\?|-h|--help)
__usage --no-exit
exit 0
;;
-c|--channel|-Channel)
shift
channel="${1:-}"
[ -z "$channel" ] && __usage
;;
--config-file|-ConfigFile)
shift
config_file="${1:-}"
[ -z "$config_file" ] && __usage
if [ ! -f "$config_file" ]; then
__error "Invalid value for --config-file. $config_file does not exist."
exit 1
fi
;;
-d|--dotnet-home|-DotNetHome)
shift
DOTNET_HOME="${1:-}"
[ -z "$DOTNET_HOME" ] && __usage
;;
--path|-Path)
shift
repo_path="${1:-}"
[ -z "$repo_path" ] && __usage
;;
-s|--tools-source|-ToolsSource)
shift
tools_source="${1:-}"
[ -z "$tools_source" ] && __usage
;;
-u|--update|-Update)
update=true
;;
--verbose|-Verbose)
verbose=true
;;
--)
shift
break
;;
*)
break
;;
esac
shift
done
if ! __machine_has unzip; then
__error 'Missing required command: unzip'
exit 1
fi
if ! __machine_has curl && ! __machine_has wget; then
__error 'Missing required command. Either wget or curl is required.'
exit 1
fi
[ -z "${config_file:-}" ] && config_file="$repo_path/korebuild.json"
if [ -f "$config_file" ]; then
if __machine_has jq ; then
if jq '.' "$config_file" >/dev/null ; then
config_channel="$(jq -r 'select(.channel!=null) | .channel' "$config_file")"
config_tools_source="$(jq -r 'select(.toolsSource!=null) | .toolsSource' "$config_file")"
else
__warn "$config_file is invalid JSON. Its settings will be ignored."
fi
elif __machine_has python ; then
if python -c "import json,codecs;obj=json.load(codecs.open('$config_file', 'r', 'utf-8-sig'))" >/dev/null ; then
config_channel="$(python -c "import json,codecs;obj=json.load(codecs.open('$config_file', 'r', 'utf-8-sig'));print(obj['channel'] if 'channel' in obj else '')")"
config_tools_source="$(python -c "import json,codecs;obj=json.load(codecs.open('$config_file', 'r', 'utf-8-sig'));print(obj['toolsSource'] if 'toolsSource' in obj else '')")"
else
__warn "$config_file is invalid JSON. Its settings will be ignored."
fi
else
__warn 'Missing required command: jq or pyton. Could not parse the JSON file. Its settings will be ignored.'
fi
[ ! -z "${config_channel:-}" ] && channel="$config_channel"
[ ! -z "${config_tools_source:-}" ] && tools_source="$config_tools_source"
fi
[ -z "$channel" ] && channel='dev'
[ -z "$tools_source" ] && tools_source='https://aspnetcore.blob.core.windows.net/buildtools'
get_korebuild
set_korebuildsettings "$tools_source" "$DOTNET_HOME" "$repo_path" "$config_file"
invoke_korebuild_command "$command" "$@"

View File

@@ -1,20 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<Import Project="..\..\..\build\common.props" />
<PropertyGroup>
<TargetFrameworks>netcoreapp2.0;net461</TargetFrameworks>
<IsPackable>false</IsPackable>
<OutputType>exe</OutputType>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\src\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj" />
<ProjectReference Include="..\..\..\src\Microsoft.AspNetCore.NodeServices.Sockets\Microsoft.AspNetCore.NodeServices.Sockets.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="$(AspNetCoreVersion)" />
</ItemGroup>
</Project>

View File

@@ -1,53 +0,0 @@
using System;
using System.Diagnostics;
using System.IO;
using System.Threading.Tasks;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.AspNetCore.NodeServices.Sockets;
using Microsoft.Extensions.DependencyInjection;
namespace ConsoleApplication
{
// This project is a micro-benchmark for .NET->Node RPC via NodeServices. It doesn't reflect
// real-world usage patterns (you're not likely to make hundreds of sequential calls like this),
// but is a starting point for comparing the overhead of different hosting models and transports.
public class Program
{
public static void Main(string[] args) {
// Set up the DI system
var services = new ServiceCollection();
services.AddNodeServices(options => {
// To compare with Socket hosting, uncomment the following line
// Since .NET Core 1.1, the HTTP hosting model has become basically as fast as the Socket hosting model
//options.UseSocketHosting();
options.ProjectPath = Directory.GetCurrentDirectory();
options.WatchFileExtensions = new string[] {}; // Don't watch anything
});
var serviceProvider = services.BuildServiceProvider();
// Now instantiate an INodeServices and use it
using (var nodeServices = serviceProvider.GetRequiredService<INodeServices>()) {
MeasureLatency(nodeServices).Wait();
}
}
private static async Task MeasureLatency(INodeServices nodeServices) {
// Ensure the connection is open, so we can measure per-request timings below
var response = await nodeServices.InvokeAsync<string>("latencyTest", "C#");
Console.WriteLine(response);
// Now perform a series of requests, capturing the time taken
const int requestCount = 100;
var watch = Stopwatch.StartNew();
for (var i = 0; i < requestCount; i++) {
await nodeServices.InvokeAsync<string>("latencyTest", "C#");
}
// Display results
var elapsedSeconds = (float)watch.ElapsedTicks / Stopwatch.Frequency;
Console.WriteLine("\nTotal time: {0:F2} milliseconds", 1000 * elapsedSeconds);
Console.WriteLine("\nTime per invocation: {0:F2} milliseconds", 1000 * elapsedSeconds / requestCount);
}
}
}

View File

@@ -1,4 +0,0 @@
module.exports = function(callback, incomingParam1) {
var result = 'Hello, ' + incomingParam1 + '!';
callback(/* error */ null, result);
}

View File

@@ -1,2 +0,0 @@
/node_modules/
/Properties/launchSettings.json

View File

@@ -1,42 +0,0 @@
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.NodeServices;
namespace NodeServicesExamples.Controllers
{
public class HomeController : Controller
{
public IActionResult Index(int pageIndex)
{
return View();
}
public IActionResult ES2015Transpilation()
{
return View();
}
public async Task<IActionResult> Chart([FromServices] INodeServices nodeServices)
{
var options = new { width = 400, height = 200, showArea = true, showPoint = true, fullWidth = true };
var data = new
{
labels = new[] { "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" },
series = new[] {
new[] { 1, 5, 2, 5, 4, 3 },
new[] { 2, 3, 4, 8, 1, 2 },
new[] { 5, 4, 3, 2, 1, 0 }
}
};
ViewData["ChartMarkup"] = await nodeServices.InvokeAsync<string>("./Node/renderChart", "line", options, data);
return View();
}
public IActionResult Error()
{
return View("~/Views/Shared/Error.cshtml");
}
}
}

View File

@@ -1,8 +0,0 @@
var generate = require('node-chartist');
module.exports = function (callback, type, options, data) {
generate(type, options, data).then(
result => callback(null, result), // Success case
error => callback(error) // Error case
);
};

View File

@@ -1,12 +0,0 @@
var fs = require('fs');
var babelCore = require('babel-core');
module.exports = function(cb, physicalPath, requestPath) {
var originalContents = fs.readFileSync(physicalPath);
var result = babelCore.transform(originalContents, {
presets: ['es2015'],
sourceMaps: 'inline',
sourceFileName: '/sourcemapped' + requestPath
});
cb(null, result.code);
}

View File

@@ -1,29 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<Import Project="..\..\..\build\common.props" />
<PropertyGroup>
<TargetFrameworks>netcoreapp2.0;net461</TargetFrameworks>
<TypeScriptCompileBlocked>true</TypeScriptCompileBlocked>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\src\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Diagnostics" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Hosting" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Server.IISIntegration" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Mvc" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Server.Kestrel" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.StaticFiles" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="$(AspNetCoreVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish">
<Exec Command="npm install" />
</Target>
</Project>

View File

@@ -1,69 +0,0 @@
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using System.IO;
namespace NodeServicesExamples
{
public class Startup
{
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
services.AddMvc();
// Enable Node Services
services.AddNodeServices();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, ILoggerFactory loggerFactory, IHostingEnvironment env, INodeServices nodeServices)
{
app.UseDeveloperExceptionPage();
// Dynamically transpile any .js files under the '/js/' directory
app.Use(next => async context => {
var requestPath = context.Request.Path.Value;
if (requestPath.StartsWith("/js/") && requestPath.EndsWith(".js")) {
var fileInfo = env.WebRootFileProvider.GetFileInfo(requestPath);
if (fileInfo.Exists) {
var transpiled = await nodeServices.InvokeAsync<string>("./Node/transpilation.js", fileInfo.PhysicalPath, requestPath);
await context.Response.WriteAsync(transpiled);
return;
}
}
// Not a JS file, or doesn't exist - let some other middleware handle it
await next.Invoke(context);
});
app.UseStaticFiles();
app.UseMvc(routes =>
{
routes.MapRoute(
name: "default",
template: "{controller=Home}/{action=Index}/{id?}");
});
}
public static void Main(string[] args)
{
var host = new WebHostBuilder()
.ConfigureLogging(factory =>
{
factory.AddConsole();
factory.AddDebug();
})
.UseContentRoot(Directory.GetCurrentDirectory())
.UseIISIntegration()
.UseKestrel()
.UseStartup<Startup>()
.Build();
host.Run();
}
}
}

View File

@@ -1,12 +0,0 @@
<h1>Server-rendered chart</h1>
<p>
This sample demonstrates how arbitrary NPM modules can be invoked from .NET code.
</p>
<p>
In this case, we use <code>node-chartist</code> to render the following chart on the server. The output is
identical to what you'd get if you used <a href='https://gionkunz.github.io/chartist-js/'>chartist.js</a>
on the client, except that in this example, we're not executing any client-side code at all.
</p>
@Html.Raw(ViewData["ChartMarkup"])

View File

@@ -1,16 +0,0 @@
<h1>ES2015 Transpilation</h1>
<p>
This sample demonstrates a way of intercepting requests for .js files and dynamically transpiling them
from ES2015 code to browser-compatible ES5 code using the Babel library.
</p>
<p>
To see that it's working, open your browser's 'Debug' console and look for the log message. This is
produced by the file <a href='/js/main.js'>/js/main.js</a>, which is transpiled from ES2015 dynamically
when requested.
</p>
@section scripts {
<script src='/js/main.js'></script>
}

View File

@@ -1,12 +0,0 @@
<h1>NodeServices examples</h1>
<p>
These examples demonstrate the direct use of the NodeServices package, independently of the usual SPA scenarios.
In general, NodeServices offers an efficient way to use Node-provided functionality (e.g., NPM modules) from inside
a .NET application.
</p>
<ul>
<li><a asp-action="ES2015Transpilation">ES2015 transpilation</a></li>
<li><a asp-action="Chart">Server-side chart rendering</a></li>
</ul>

View File

@@ -1,6 +0,0 @@
@{
ViewData["Title"] = "Error";
}
<h1 class="text-danger">Error.</h1>
<h2 class="text-danger">An error occurred while processing your request.</h2>

View File

@@ -1,12 +0,0 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<title>NodeServices Examples</title>
<link rel="stylesheet" href="~/css/chartist.min.css" />
</head>
<body>
@RenderBody()
@RenderSection("scripts", required: false)
</body>
</html>

View File

@@ -1,2 +0,0 @@
@using NodeServicesExamples
@addTagHelper "*, Microsoft.AspNetCore.Mvc.TagHelpers"

View File

@@ -1,3 +0,0 @@
@{
Layout = "_Layout";
}

View File

@@ -1,6 +0,0 @@
{
"compilerOptions": {
"target": "ES6",
"module": "commonjs"
}
}

View File

@@ -1,9 +0,0 @@
{
"name": "nodeservicesexamples",
"version": "0.0.0",
"dependencies": {
"babel-core": "^6.7.4",
"babel-preset-es2015": "^6.6.0",
"node-chartist": "^1.0.2"
}
}

File diff suppressed because one or more lines are too long

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

View File

@@ -1,7 +0,0 @@
class Greeting {
getMessage() {
return 'Hello from the ES2015 class';
}
}
console.log(new Greeting().getMessage());

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<system.webServer>
<handlers>
<add name="httpPlatformHandler" path="*" verb="*" modules="httpPlatformHandler" resourceType="Unspecified" />
</handlers>
<httpPlatform processPath="%DNX_PATH%" arguments="%DNX_ARGS%" stdoutLogEnabled="false" forwardWindowsAuthToken="false" startupTimeLimit="3600" />
</system.webServer>
</configuration>

View File

@@ -1,233 +0,0 @@
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
yarn.lock
wwwroot/dist
# User-specific files
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
build/
bld/
[Bb]in/
[Oo]bj/
# Visual Studio 2015 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.pch
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# TODO: Comment the next line if you want to checkin your web deploy settings
# but database connection strings (with potential passwords) will be unencrypted
*.pubxml
*.publishproj
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/packages/*
# except build/, which is used as an MSBuild target.
!**/packages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/packages/repositories.config
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Microsoft Azure ApplicationInsights config file
ApplicationInsights.config
# Windows Store app package directory
AppPackages/
BundleArtifacts/
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.pfx
*.publishsettings
node_modules/
orleans.codegen.cs
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
# SQL Server files
*.mdf
*.ldf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
# FAKE - F# Make
.fake/

View File

@@ -1,51 +0,0 @@
using System.Threading.Tasks;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Extensions;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.NodeServices;
using Microsoft.AspNetCore.SpaServices.Prerendering;
using Microsoft.Extensions.DependencyInjection;
namespace Webpack.ActionResults
{
// This is an example of how you could invoke the prerendering API from an ActionResult, so as to
// prerender a SPA component as the entire response page (instead of injecting the SPA component
// into a Razor view's output)
public class PrerenderResult : ActionResult
{
private JavaScriptModuleExport _moduleExport;
private object _dataToSupply;
public PrerenderResult(JavaScriptModuleExport moduleExport, object dataToSupply = null)
{
_moduleExport = moduleExport;
_dataToSupply = dataToSupply;
}
public override async Task ExecuteResultAsync(ActionContext context)
{
var nodeServices = context.HttpContext.RequestServices.GetRequiredService<INodeServices>();
var hostEnv = context.HttpContext.RequestServices.GetRequiredService<IHostingEnvironment>();
var applicationLifetime = context.HttpContext.RequestServices.GetRequiredService<IApplicationLifetime>();
var applicationBasePath = hostEnv.ContentRootPath;
var request = context.HttpContext.Request;
var response = context.HttpContext.Response;
var prerenderedHtml = await Prerenderer.RenderToString(
applicationBasePath,
nodeServices,
applicationLifetime.ApplicationStopping,
_moduleExport,
request.GetEncodedUrl(),
request.Path + request.QueryString.Value,
_dataToSupply,
/* timeoutMilliseconds */ 30000,
/* requestPathBase */ "/"
);
response.ContentType = "text/html";
await response.WriteAsync(prerenderedHtml.Html);
}
}
}

View File

@@ -1,13 +0,0 @@
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.SpaServices.Prerendering;
namespace Webpack.ActionResults
{
public static class PrerenderResultExtensions
{
public static PrerenderResult Prerender(this ControllerBase controller, JavaScriptModuleExport exportToPrerender, object dataToSupply = null)
{
return new PrerenderResult(exportToPrerender, dataToSupply);
}
}
}

View File

@@ -1,4 +0,0 @@
import { HelloWorld } from './HelloWorld';
import './styles/main.less';
new HelloWorld().doIt();

View File

@@ -1,5 +0,0 @@
export class HelloWorld {
public doIt() {
console.log('Hello from MyApp');
}
}

View File

@@ -1,5 +0,0 @@
@headerColor: red;
h1 {
color: @headerColor;
}

View File

@@ -1,16 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
namespace Webpack.Controllers
{
public class HomeController : Controller
{
public IActionResult Index()
{
return View();
}
}
}

View File

@@ -1,25 +0,0 @@
{
"iisSettings": {
"windowsAuthentication": false,
"anonymousAuthentication": true,
"iisExpress": {
"applicationUrl": "http://localhost:51463/",
"sslPort": 0
}
},
"profiles": {
"IIS Express": {
"commandName": "IISExpress",
"launchBrowser": true,
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
},
"dotnet cli": {
"commandName": "Project",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
}
}
}
}

View File

@@ -1,60 +0,0 @@
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.SpaServices.Webpack;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using System.IO;
using Microsoft.AspNetCore.NodeServices;
namespace Webpack
{
public class Startup
{
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
services.AddMvc();
services.AddNodeServices();
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, ILoggerFactory loggerFactory, IHostingEnvironment env)
{
app.UseDeveloperExceptionPage();
// For real apps, you should only use Webpack Dev Middleware at development time. For production,
// you'll get better performance and reliability if you precompile the webpack output and simply
// serve the resulting static files. For examples of setting up this automatic switch between
// development-style and production-style webpack usage, see the 'templates' dir in this repo.
app.UseWebpackDevMiddleware(new WebpackDevMiddlewareOptions {
HotModuleReplacement = true
});
app.UseStaticFiles();
app.UseMvc(routes =>
{
routes.MapRoute(
name: "default",
template: "{controller=Home}/{action=Index}/{id?}");
});
}
public static void Main(string[] args)
{
var host = new WebHostBuilder()
.ConfigureLogging(factory =>
{
factory.AddConsole();
factory.AddDebug();
})
.UseContentRoot(Directory.GetCurrentDirectory())
.UseIISIntegration()
.UseKestrel()
.UseStartup<Startup>()
.Build();
host.Run();
}
}
}

View File

@@ -1,10 +0,0 @@
@{
ViewData["Title"] = "Home Page";
}
<h1>Hello</h1>
Hi there. Enter some text: <input />
@section scripts {
<script src="dist/main.js"></script>
}

View File

@@ -1,6 +0,0 @@
@{
ViewData["Title"] = "Error";
}
<h1 class="text-danger">Error.</h1>
<h2 class="text-danger">An error occurred while processing your request.</h2>

View File

@@ -1,14 +0,0 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>@ViewData["Title"]</title>
<environment names="Production">
<link rel="stylesheet" href="dist/my-styles.css" />
</environment>
</head>
<body>
@RenderBody()
@RenderSection("scripts", required: false)
</body>
</html>

View File

@@ -1,2 +0,0 @@
@using Webpack
@addTagHelper "*, Microsoft.AspNetCore.Mvc.TagHelpers"

View File

@@ -1,3 +0,0 @@
@{
Layout = "_Layout";
}

View File

@@ -1,29 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk.Web">
<Import Project="..\..\..\build\common.props" />
<PropertyGroup>
<TargetFrameworks>netcoreapp2.0;net461</TargetFrameworks>
<TypeScriptCompileBlocked>true</TypeScriptCompileBlocked>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\src\Microsoft.AspNetCore.SpaServices\Microsoft.AspNetCore.SpaServices.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Diagnostics" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Hosting" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Server.IISIntegration" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Mvc" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.Server.Kestrel" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.AspNetCore.StaticFiles" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="$(AspNetCoreVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish">
<Exec Command="npm install" />
</Target>
</Project>

View File

@@ -1,10 +0,0 @@
{
"Logging": {
"IncludeScopes": false,
"LogLevel": {
"Default": "Verbose",
"System": "Information",
"Microsoft": "Information"
}
}
}

View File

@@ -1,20 +0,0 @@
{
"name": "Webpack",
"version": "0.0.0",
"devDependencies": {
"css-loader": "^0.23.1",
"extendify": "^1.0.0",
"extract-text-webpack-plugin": "^1.0.1",
"less": "^2.6.0",
"less-loader": "^2.2.2",
"style-loader": "^0.13.0",
"webpack-hot-middleware": "^2.7.1"
},
"dependencies": {
"aspnet-prerendering": "^1.0.4",
"aspnet-webpack": "^1.0.3",
"ts-loader": "^0.8.1",
"typescript": "^2.0.0",
"webpack": "^1.13.3"
}
}

View File

@@ -1,13 +0,0 @@
{
"compilerOptions": {
"moduleResolution": "node",
"module": "commonjs",
"target": "es5",
"jsx": "preserve",
"sourceMap": true,
"lib": ["es6", "dom"]
},
"exclude": [
"node_modules"
]
}

View File

@@ -1,8 +0,0 @@
module.exports = {
devtool: 'inline-source-map',
module: {
loaders: [
{ test: /\.less$/, loader: 'style-loader!css-loader!less-loader' }
]
}
};

View File

@@ -1,25 +0,0 @@
var path = require('path');
var merge = require('extendify')({ isDeep: true, arrays: 'concat' });
var devConfig = require('./webpack.config.dev');
var prodConfig = require('./webpack.config.prod');
var isDevelopment = process.env.ASPNETCORE_ENVIRONMENT === 'Development';
module.exports = merge({
resolve: {
extensions: [ '', '.js', '.jsx', '.ts', '.tsx' ]
},
module: {
loaders: [
{ test: /\.ts(x?)$/, exclude: /node_modules/, loader: 'ts-loader?silent' }
],
},
entry: {
main: ['./Clientside/App.ts']
},
output: {
path: path.join(__dirname, 'wwwroot', 'dist'),
filename: '[name].js',
publicPath: '/dist/'
},
plugins: []
}, isDevelopment ? devConfig : prodConfig);

View File

@@ -1,15 +0,0 @@
var webpack = require('webpack');
var ExtractTextPlugin = require('extract-text-webpack-plugin');
var extractLESS = new ExtractTextPlugin('my-styles.css');
module.exports = {
module: {
loaders: [
{ test: /\.less$/, loader: extractLESS.extract(['css-loader', 'less-loader']) },
]
},
plugins: [
extractLESS,
new webpack.optimize.UglifyJsPlugin({ minimize: true, compressor: { warnings: false } })
]
};

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

View File

@@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<system.webServer>
<handlers>
<add name="httpPlatformHandler" path="*" verb="*" modules="httpPlatformHandler" resourceType="Unspecified"/>
</handlers>
<httpPlatform processPath="%DNX_PATH%" arguments="%DNX_ARGS%" stdoutLogEnabled="false"/>
</system.webServer>
</configuration>

13
src/Directory.Build.props Normal file
View File

@@ -0,0 +1,13 @@
<Project>
<Import Project="..\Directory.Build.props" />
<PropertyGroup>
<TypeScriptCompileBlocked>true</TypeScriptCompileBlocked>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
<PackageTags>aspnetcore;aspnetcoremvc;nodeservices</PackageTags>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Internal.AspNetCore.Sdk" PrivateAssets="All" Version="$(InternalAspNetCoreSdkPackageVersion)" />
</ItemGroup>
</Project>

View File

@@ -1,3 +0,0 @@
/bin/
/node_modules/
yarn.lock

View File

@@ -1,524 +0,0 @@
(function(e, a) { for(var i in a) e[i] = a[i]; }(exports, /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(1);
/***/ },
/* 1 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
__webpack_require__(2);
var net = __webpack_require__(3);
var path = __webpack_require__(4);
var readline = __webpack_require__(5);
var ArgsUtil_1 = __webpack_require__(6);
var ExitWhenParentExits_1 = __webpack_require__(7);
var virtualConnectionServer = __webpack_require__(8);
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
var dynamicRequire = eval('require');
// Signal to the .NET side when we're ready to accept invocations
var server = net.createServer().on('listening', function () {
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
// Each virtual connection represents a separate invocation
virtualConnectionServer.createInterface(server).on('connection', function (connection) {
readline.createInterface(connection, null).on('line', function (line) {
try {
// Get a reference to the function to invoke
var invocation = JSON.parse(line);
var invokedModule = dynamicRequire(path.resolve(process.cwd(), invocation.moduleName));
var invokedFunction = invocation.exportedFunctionName ? invokedModule[invocation.exportedFunctionName] : invokedModule;
// Prepare a callback for accepting non-streamed JSON responses
var hasInvokedCallback_1 = false;
var invocationCallback = function (errorValue, successValue) {
if (hasInvokedCallback_1) {
throw new Error('Cannot supply more than one result. The callback has already been invoked,'
+ ' or the result stream has already been accessed');
}
hasInvokedCallback_1 = true;
connection.end(JSON.stringify({
result: successValue,
errorMessage: errorValue && (errorValue.message || errorValue),
errorDetails: errorValue && (errorValue.stack || null)
}));
};
// Also support streamed binary responses
Object.defineProperty(invocationCallback, 'stream', {
enumerable: true,
get: function () {
hasInvokedCallback_1 = true;
return connection;
}
});
// Actually invoke it, passing through any supplied args
invokedFunction.apply(null, [invocationCallback].concat(invocation.args));
}
catch (ex) {
connection.end(JSON.stringify({
errorMessage: ex.message,
errorDetails: ex.stack
}));
}
});
});
// Begin listening now. The underlying transport varies according to the runtime platform.
// On Windows it's Named Pipes; on Linux/OSX it's Domain Sockets.
var useWindowsNamedPipes = /^win/.test(process.platform);
var parsedArgs = ArgsUtil_1.parseArgs(process.argv);
var listenAddress = (useWindowsNamedPipes ? '\\\\.\\pipe\\' : '/tmp/') + parsedArgs.listenAddress;
server.listen(listenAddress);
ExitWhenParentExits_1.exitWhenParentExits(parseInt(parsedArgs.parentPid));
/***/ },
/* 2 */
/***/ function(module, exports) {
// When Node writes to stdout/strerr, we capture that and convert the lines into calls on the
// active .NET ILogger. But by default, stdout/stderr don't have any way of distinguishing
// linebreaks inside log messages from the linebreaks that delimit separate log messages,
// so multiline strings will end up being written to the ILogger as multiple independent
// log messages. This makes them very hard to make sense of, especially when they represent
// something like stack traces.
//
// To fix this, we intercept stdout/stderr writes, and replace internal linebreaks with a
// marker token. When .NET receives the lines, it converts the marker tokens back to regular
// linebreaks within the logged messages.
//
// Note that it's better to do the interception at the stdout/stderr level, rather than at
// the console.log/console.error (etc.) level, because this takes place after any native
// message formatting has taken place (e.g., inserting values for % placeholders).
var findInternalNewlinesRegex = /\n(?!$)/g;
var encodedNewline = '__ns_newline__';
encodeNewlinesWrittenToStream(process.stdout);
encodeNewlinesWrittenToStream(process.stderr);
function encodeNewlinesWrittenToStream(outputStream) {
var origWriteFunction = outputStream.write;
outputStream.write = function (value) {
// Only interfere with the write if it's definitely a string
if (typeof value === 'string') {
var argsClone = Array.prototype.slice.call(arguments, 0);
argsClone[0] = encodeNewlinesInString(value);
origWriteFunction.apply(this, argsClone);
}
else {
origWriteFunction.apply(this, arguments);
}
};
}
function encodeNewlinesInString(str) {
return str.replace(findInternalNewlinesRegex, encodedNewline);
}
/***/ },
/* 3 */
/***/ function(module, exports) {
module.exports = require("net");
/***/ },
/* 4 */
/***/ function(module, exports) {
module.exports = require("path");
/***/ },
/* 5 */
/***/ function(module, exports) {
module.exports = require("readline");
/***/ },
/* 6 */
/***/ function(module, exports) {
"use strict";
function parseArgs(args) {
// Very simplistic parsing which is sufficient for the cases needed. We don't want to bring in any external
// dependencies (such as an args-parsing library) to this file.
var result = {};
var currentKey = null;
args.forEach(function (arg) {
if (arg.indexOf('--') === 0) {
var argName = arg.substring(2);
result[argName] = undefined;
currentKey = argName;
}
else if (currentKey) {
result[currentKey] = arg;
currentKey = null;
}
});
return result;
}
exports.parseArgs = parseArgs;
/***/ },
/* 7 */
/***/ function(module, exports) {
/*
In general, we want the Node child processes to be terminated as soon as the parent .NET processes exit,
because we have no further use for them. If the .NET process shuts down gracefully, it will run its
finalizers, one of which (in OutOfProcessNodeInstance.cs) will kill its associated Node process immediately.
But if the .NET process is terminated forcefully (e.g., on Linux/OSX with 'kill -9'), then it won't have
any opportunity to shut down its child processes, and by default they will keep running. In this case, it's
up to the child process to detect this has happened and terminate itself.
There are many possible approaches to detecting when a parent process has exited, most of which behave
differently between Windows and Linux/OS X:
- On Windows, the parent process can mark its child as being a 'job' that should auto-terminate when
the parent does (http://stackoverflow.com/a/4657392). Not cross-platform.
- The child Node process can get a callback when the parent disconnects (process.on('disconnect', ...)).
But despite http://stackoverflow.com/a/16487966, no callback fires in any case I've tested (Windows / OS X).
- The child Node process can get a callback when its stdin/stdout are disconnected, as described at
http://stackoverflow.com/a/15693934. This works well on OS X, but calling stdout.resume() on Windows
causes the process to terminate prematurely.
- I don't know why, but on Windows, it's enough to invoke process.stdin.resume(). For some reason this causes
the child Node process to exit as soon as the parent one does, but I don't see this documented anywhere.
- You can poll to see if the parent process, or your stdin/stdout connection to it, is gone
- You can directly pass a parent process PID to the child, and then have the child poll to see if it's
still running (e.g., using process.kill(pid, 0), which doesn't kill it but just tests whether it exists,
as per https://nodejs.org/api/process.html#process_process_kill_pid_signal)
- Or, on each poll, you can try writing to process.stdout. If the parent has died, then this will throw.
However I don't see this documented anywhere. It would be nice if you could just poll for whether or not
process.stdout is still connected (without actually writing to it) but I haven't found any property whose
value changes until you actually try to write to it.
Of these, the only cross-platform approach that is actually documented as a valid strategy is simply polling
to check whether the parent PID is still running. So that's what we do here.
*/
"use strict";
var pollIntervalMs = 1000;
function exitWhenParentExits(parentPid) {
setInterval(function () {
if (!processExists(parentPid)) {
// Can't log anything at this point, because out stdout was connected to the parent,
// but the parent is gone.
process.exit();
}
}, pollIntervalMs);
}
exports.exitWhenParentExits = exitWhenParentExits;
function processExists(pid) {
try {
// Sending signal 0 - on all platforms - tests whether the process exists. As long as it doesn't
// throw, that means it does exist.
process.kill(pid, 0);
return true;
}
catch (ex) {
// If the reason for the error is that we don't have permission to ask about this process,
// report that as a separate problem.
if (ex.code === 'EPERM') {
throw new Error("Attempted to check whether process " + pid + " was running, but got a permissions error.");
}
return false;
}
}
/***/ },
/* 8 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var events_1 = __webpack_require__(9);
var VirtualConnection_1 = __webpack_require__(10);
// Keep this in sync with the equivalent constant in the .NET code. Both sides split up their transmissions into frames with this max length,
// and both will reject longer frames.
var MaxFrameBodyLength = 16 * 1024;
/**
* Accepts connections to a net.Server and adapts them to behave as multiplexed connections. That is, for each physical socket connection,
* we track a list of 'virtual connections' whose API is a Duplex stream. The remote clients may open and close as many virtual connections
* as they wish, reading and writing to them independently, without the overhead of establishing new physical connections each time.
*/
function createInterface(server) {
var emitter = new events_1.EventEmitter();
server.on('connection', function (socket) {
// For each physical socket connection, maintain a set of virtual connections. Issue a notification whenever
// a new virtual connections is opened.
var childSockets = new VirtualConnectionsCollection(socket, function (virtualConnection) {
emitter.emit('connection', virtualConnection);
});
});
return emitter;
}
exports.createInterface = createInterface;
/**
* Tracks the 'virtual connections' associated with a single physical socket connection.
*/
var VirtualConnectionsCollection = (function () {
function VirtualConnectionsCollection(_socket, _onVirtualConnectionCallback) {
var _this = this;
this._socket = _socket;
this._onVirtualConnectionCallback = _onVirtualConnectionCallback;
this._currentFrameHeader = null;
this._virtualConnections = {};
// If the remote end closes the physical socket, treat all the virtual connections as being closed remotely too
this._socket.on('close', function () {
Object.getOwnPropertyNames(_this._virtualConnections).forEach(function (id) {
// A 'null' frame signals that the connection was closed remotely
_this._virtualConnections[id].onReceivedData(null);
});
});
this._socket.on('readable', this._onIncomingDataAvailable.bind(this));
}
/**
* This is called whenever the underlying socket signals that it may have some data available to read. It will synchronously read as many
* message frames as it can from the underlying socket, opens virtual connections as needed, and dispatches data to them.
*/
VirtualConnectionsCollection.prototype._onIncomingDataAvailable = function () {
var exhaustedAllData = false;
while (!exhaustedAllData) {
// We might already have a pending frame header from the previous time this method ran, but if not, that's the next thing we need to read
if (this._currentFrameHeader === null) {
this._currentFrameHeader = this._readNextFrameHeader();
}
if (this._currentFrameHeader === null) {
// There's not enough data to fill a frameheader, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
}
else {
var frameBodyLength = this._currentFrameHeader.bodyLength;
var frameBodyOrNull = frameBodyLength > 0 ? this._socket.read(this._currentFrameHeader.bodyLength) : null;
if (frameBodyOrNull !== null || frameBodyLength === 0) {
// We have a complete frame header+body pair, so we can now dispatch this to a virtual connection. We set _currentFrameHeader back to null
// so that the next thing we try to read is the next frame header.
var headerCopy = this._currentFrameHeader;
this._currentFrameHeader = null;
this._onReceivedCompleteFrame(headerCopy, frameBodyOrNull);
}
else {
// There's not enough data to fill the pending frame body, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
}
}
}
};
VirtualConnectionsCollection.prototype._onReceivedCompleteFrame = function (header, bodyIfNotEmpty) {
// An incoming zero-length frame signals that there's no more data to read.
// Signal this to the Node stream APIs by pushing a 'null' chunk to it.
var virtualConnection = this._getOrOpenVirtualConnection(header);
virtualConnection.onReceivedData(header.bodyLength > 0 ? bodyIfNotEmpty : null);
};
VirtualConnectionsCollection.prototype._getOrOpenVirtualConnection = function (header) {
if (this._virtualConnections.hasOwnProperty(header.connectionIdString)) {
// It's an existing virtual connection
return this._virtualConnections[header.connectionIdString];
}
else {
// It's a new one
return this._openVirtualConnection(header);
}
};
VirtualConnectionsCollection.prototype._openVirtualConnection = function (header) {
var _this = this;
var beginWriteCallback = function (data, writeCompletedCallback) {
// Only send nonempty frames, since empty ones are a signal to close the virtual connection
if (data.length > 0) {
_this._sendFrame(header.connectionIdBinary, data, writeCompletedCallback);
}
};
var newVirtualConnection = new VirtualConnection_1.VirtualConnection(beginWriteCallback);
newVirtualConnection.on('end', function () {
// The virtual connection was closed remotely. Clean up locally.
_this._onVirtualConnectionWasClosed(header.connectionIdString);
});
newVirtualConnection.on('finish', function () {
// The virtual connection was closed locally. Clean up locally, and notify the remote that we're done.
_this._onVirtualConnectionWasClosed(header.connectionIdString);
_this._sendFrame(header.connectionIdBinary, new Buffer(0));
});
this._virtualConnections[header.connectionIdString] = newVirtualConnection;
this._onVirtualConnectionCallback(newVirtualConnection);
return newVirtualConnection;
};
/**
* Attempts to read a complete frame header, synchronously, from the underlying socket.
* If not enough data is available synchronously, returns null without consuming any data from the socket.
*/
VirtualConnectionsCollection.prototype._readNextFrameHeader = function () {
var headerBuf = this._socket.read(12);
if (headerBuf !== null) {
// We have enough data synchronously
var connectionIdBinary = headerBuf.slice(0, 8);
var connectionIdString = connectionIdBinary.toString('hex');
var bodyLength = headerBuf.readInt32LE(8);
if (bodyLength < 0 || bodyLength > MaxFrameBodyLength) {
// Throwing here is going to bring down the whole process, so this cannot be allowed to happen in real use.
// But it won't happen in real use, because this is only used with our .NET client, which doesn't violate this rule.
throw new Error('Illegal frame body length: ' + bodyLength);
}
return { connectionIdBinary: connectionIdBinary, connectionIdString: connectionIdString, bodyLength: bodyLength };
}
else {
// Not enough bytes are available synchronously, so none were consumed
return null;
}
};
VirtualConnectionsCollection.prototype._sendFrame = function (connectionIdBinary, data, callback) {
// For all sends other than the last one, only invoke the callback if it failed.
// Also, only invoke the callback at most once.
var hasInvokedCallback = false;
var finalCallback = callback && (function (error) {
if (!hasInvokedCallback) {
hasInvokedCallback = true;
callback(error);
}
});
var notFinalCallback = callback && (function (error) {
if (error) {
finalCallback(error);
}
});
// The amount of data we're writing might exceed MaxFrameBodyLength, so split into frames as needed.
// Note that we always send at least one frame, even if it's empty (because that's the close-virtual-connection signal).
// If needed, this could be changed to send frames asynchronously, so that large sends could proceed in parallel
// (though that would involve making a clone of 'data', to avoid the risk of it being mutated during the send).
var bytesSent = 0;
do {
var nextFrameBodyLength = Math.min(MaxFrameBodyLength, data.length - bytesSent);
var isFinalChunk = (bytesSent + nextFrameBodyLength) === data.length;
this._socket.write(connectionIdBinary, notFinalCallback);
this._sendInt32LE(nextFrameBodyLength, notFinalCallback);
this._socket.write(data.slice(bytesSent, bytesSent + nextFrameBodyLength), isFinalChunk ? finalCallback : notFinalCallback);
bytesSent += nextFrameBodyLength;
} while (bytesSent < data.length);
};
/**
* Sends a number serialized in the correct format for .NET to receive as a System.Int32
*/
VirtualConnectionsCollection.prototype._sendInt32LE = function (value, callback) {
var buf = new Buffer(4);
buf.writeInt32LE(value, 0);
this._socket.write(buf, callback);
};
VirtualConnectionsCollection.prototype._onVirtualConnectionWasClosed = function (id) {
if (this._virtualConnections.hasOwnProperty(id)) {
delete this._virtualConnections[id];
}
};
return VirtualConnectionsCollection;
}());
/***/ },
/* 9 */
/***/ function(module, exports) {
module.exports = require("events");
/***/ },
/* 10 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
var stream_1 = __webpack_require__(11);
/**
* Represents a virtual connection. Multiple virtual connections may be multiplexed over a single physical socket connection.
*/
var VirtualConnection = (function (_super) {
__extends(VirtualConnection, _super);
function VirtualConnection(_beginWriteCallback) {
var _this = _super.call(this) || this;
_this._beginWriteCallback = _beginWriteCallback;
_this._flowing = false;
_this._receivedDataQueue = [];
return _this;
}
VirtualConnection.prototype._read = function () {
this._flowing = true;
// Keep pushing data until we run out, or the underlying framework asks us to stop.
// When we finish, the 'flowing' state is detemined by whether more data is still being requested.
while (this._flowing && this._receivedDataQueue.length > 0) {
var nextChunk = this._receivedDataQueue.shift();
this._flowing = this.push(nextChunk);
}
};
VirtualConnection.prototype._write = function (chunk, encodingIfString, callback) {
if (typeof chunk === 'string') {
chunk = new Buffer(chunk, encodingIfString);
}
this._beginWriteCallback(chunk, callback);
};
VirtualConnection.prototype.onReceivedData = function (dataOrNullToSignalEOF) {
if (this._flowing) {
this._flowing = this.push(dataOrNullToSignalEOF);
}
else {
this._receivedDataQueue.push(dataOrNullToSignalEOF);
}
};
return VirtualConnection;
}(stream_1.Duplex));
exports.VirtualConnection = VirtualConnection;
/***/ },
/* 11 */
/***/ function(module, exports) {
module.exports = require("stream");
/***/ }
/******/ ])));

View File

@@ -1,26 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<Import Project="..\..\build\common.props" />
<PropertyGroup>
<Description>Socket-based RPC for Microsoft.AspNetCore.NodeServices.</Description>
<TargetFramework>netstandard2.0</TargetFramework>
<PackageTags>aspnetcore;aspnetcoremvc;nodeservices</PackageTags>
<TypeScriptCompileBlocked>true</TypeScriptCompileBlocked>
</PropertyGroup>
<ItemGroup>
<EmbeddedResource Include="Content\**\*" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Microsoft.AspNetCore.NodeServices\Microsoft.AspNetCore.NodeServices.csproj" />
<PackageReference Include="System.Threading.Tasks.Dataflow" Version="$(ThreadingDataflowVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish" Condition=" '$(IsCrossTargetingBuild)' != 'true' ">
<Exec Command="npm install" />
<Exec Command="node node_modules/webpack/bin/webpack.js" />
</Target>
</Project>

View File

@@ -1,40 +0,0 @@
using System.IO;
using System.IO.Pipes;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
internal class NamedPipeConnection : StreamConnection
{
private bool _disposedValue = false;
private NamedPipeClientStream _namedPipeClientStream;
#pragma warning disable 1998 // Because in the NET451 code path, there's nothing to await
public override async Task<Stream> Open(string address)
{
_namedPipeClientStream = new NamedPipeClientStream(
".",
address,
PipeDirection.InOut,
PipeOptions.Asynchronous);
await _namedPipeClientStream.ConnectAsync().ConfigureAwait(false);
return _namedPipeClientStream;
}
#pragma warning restore 1998
public override void Dispose()
{
if (!_disposedValue)
{
if (_namedPipeClientStream != null)
{
_namedPipeClientStream.Dispose();
}
_disposedValue = true;
}
}
}
}

View File

@@ -1,26 +0,0 @@
using System;
using System.IO;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
internal abstract class StreamConnection : IDisposable
{
public abstract Task<Stream> Open(string address);
public abstract void Dispose();
public static StreamConnection Create()
{
var useNamedPipes = System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(
System.Runtime.InteropServices.OSPlatform.Windows);
if (useNamedPipes)
{
return new NamedPipeConnection();
}
else
{
return new UnixDomainSocketConnection();
}
}
}
}

View File

@@ -1,40 +0,0 @@
using System.IO;
using System.Net.Sockets;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
internal class UnixDomainSocketConnection : StreamConnection
{
private bool _disposedValue = false;
private NetworkStream _networkStream;
private Socket _socket;
public override async Task<Stream> Open(string address)
{
var endPoint = new UnixDomainSocketEndPoint("/tmp/" + address);
_socket = new Socket(endPoint.AddressFamily, SocketType.Stream, ProtocolType.Unspecified);
await _socket.ConnectAsync(endPoint).ConfigureAwait(false);
_networkStream = new NetworkStream(_socket);
return _networkStream;
}
public override void Dispose()
{
if (!_disposedValue)
{
if (_networkStream != null)
{
_networkStream.Dispose();
}
if (_socket != null)
{
_socket.Dispose();
}
_disposedValue = true;
}
}
}
}

View File

@@ -1,86 +0,0 @@
using System;
using System.Net;
using System.Net.Sockets;
using System.Text;
namespace Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections
{
// From System.IO.Pipes/src/System/Net/Sockets/UnixDomainSocketEndPoint.cs (an internal class in System.IO.Pipes)
internal sealed class UnixDomainSocketEndPoint : EndPoint
{
private const AddressFamily EndPointAddressFamily = AddressFamily.Unix;
private static readonly Encoding s_pathEncoding = Encoding.UTF8;
private static readonly int s_nativePathOffset = 2; // = offsetof(struct sockaddr_un, sun_path). It's the same on Linux and OSX
private static readonly int s_nativePathLength = 91; // sockaddr_un.sun_path at http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/sys_un.h.html, -1 for terminator
private static readonly int s_nativeAddressSize = s_nativePathOffset + s_nativePathLength;
private readonly string _path;
private readonly byte[] _encodedPath;
public UnixDomainSocketEndPoint(string path)
{
if (path == null)
{
throw new ArgumentNullException(nameof(path));
}
_path = path;
_encodedPath = s_pathEncoding.GetBytes(_path);
if (path.Length == 0 || _encodedPath.Length > s_nativePathLength)
{
throw new ArgumentOutOfRangeException(nameof(path));
}
}
internal UnixDomainSocketEndPoint(SocketAddress socketAddress)
{
if (socketAddress == null)
{
throw new ArgumentNullException(nameof(socketAddress));
}
if (socketAddress.Family != EndPointAddressFamily ||
socketAddress.Size > s_nativeAddressSize)
{
throw new ArgumentOutOfRangeException(nameof(socketAddress));
}
if (socketAddress.Size > s_nativePathOffset)
{
_encodedPath = new byte[socketAddress.Size - s_nativePathOffset];
for (int i = 0; i < _encodedPath.Length; i++)
{
_encodedPath[i] = socketAddress[s_nativePathOffset + i];
}
_path = s_pathEncoding.GetString(_encodedPath, 0, _encodedPath.Length);
}
else
{
_encodedPath = Array.Empty<byte>();
_path = string.Empty;
}
}
public override SocketAddress Serialize()
{
var result = new SocketAddress(AddressFamily.Unix, s_nativeAddressSize);
for (int index = 0; index < _encodedPath.Length; index++)
{
result[s_nativePathOffset + index] = _encodedPath[index];
}
result[s_nativePathOffset + _encodedPath.Length] = 0; // path must be null-terminated
return result;
}
public override EndPoint Create(SocketAddress socketAddress) => new UnixDomainSocketEndPoint(socketAddress);
public override AddressFamily AddressFamily => EndPointAddressFamily;
public override string ToString() => _path;
}
}

View File

@@ -1,241 +0,0 @@
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.NodeServices.HostingModels;
using Microsoft.AspNetCore.NodeServices.Sockets.PhysicalConnections;
using Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Microsoft.AspNetCore.NodeServices.Sockets
{
/// <summary>
/// A specialisation of the OutOfProcessNodeInstance base class that uses a lightweight binary streaming protocol
/// to perform RPC invocations. The physical transport is Named Pipes on Windows, or Domain Sockets on Linux/Mac.
/// For details on the binary streaming protocol, see
/// Microsoft.AspNetCore.NodeServices.HostingModels.VirtualConnections.VirtualConnectionClient.
/// The advantage versus using HTTP for RPC is that this is faster (not surprisingly - there's much less overhead
/// because we don't need most of the functionality of HTTP.
///
/// The address of the pipe/socket is selected randomly here on the .NET side and sent to the child process as a
/// command-line argument (the address space is wide enough that there's no real risk of a clash, unlike when
/// selecting TCP port numbers).
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.HostingModels.OutOfProcessNodeInstance" />
internal class SocketNodeInstance : OutOfProcessNodeInstance
{
private readonly static JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver(),
TypeNameHandling = TypeNameHandling.None
};
private readonly static int streamBufferSize = 16 * 1024;
private readonly static UTF8Encoding utf8EncodingWithoutBom = new UTF8Encoding(false);
private readonly SemaphoreSlim _connectionCreationSemaphore = new SemaphoreSlim(1);
private bool _connectionHasFailed;
private StreamConnection _physicalConnection;
private string _socketAddress;
private VirtualConnectionClient _virtualConnectionClient;
public SocketNodeInstance(NodeServicesOptions options, string socketAddress)
: base(
EmbeddedResourceReader.Read(
typeof(SocketNodeInstance),
"/Content/Node/entrypoint-socket.js"),
options.ProjectPath,
options.WatchFileExtensions,
MakeNewCommandLineOptions(socketAddress),
options.ApplicationStoppingToken,
options.NodeInstanceOutputLogger,
options.EnvironmentVariables,
options.InvocationTimeoutMilliseconds,
options.LaunchWithDebugging,
options.DebuggingPort)
{
_socketAddress = socketAddress;
}
protected override async Task<T> InvokeExportAsync<T>(NodeInvocationInfo invocationInfo, CancellationToken cancellationToken)
{
if (_connectionHasFailed)
{
// _connectionHasFailed implies a protocol-level error. The old instance is no longer of any use.
var allowConnectionDraining = false;
// This special exception type forces NodeServicesImpl to restart the Node instance
throw new NodeInvocationException(
"The SocketNodeInstance socket connection failed. See logs to identify the reason.",
details: null,
nodeInstanceUnavailable: true,
allowConnectionDraining: allowConnectionDraining);
}
if (_virtualConnectionClient == null)
{
// Although we could pass the cancellationToken into EnsureVirtualConnectionClientCreated and
// have it signal cancellations upstream, that would be a bad thing to do, because all callers
// wait for the same connection task. There's no reason why the first caller should have the
// special ability to cancel the connection process in a way that would affect subsequent
// callers. So, each caller just independently stops awaiting connection if that call is cancelled.
await ThrowOnCancellation(EnsureVirtualConnectionClientCreated(), cancellationToken);
}
// For each invocation, we open a new virtual connection. This gives an API equivalent to opening a new
// physical connection to the child process, but without the overhead of doing so, because it's really
// just multiplexed into the existing physical connection stream.
bool shouldDisposeVirtualConnection = true;
Stream virtualConnection = null;
try
{
virtualConnection = _virtualConnectionClient.OpenVirtualConnection();
// Send request
WriteJsonLine(virtualConnection, invocationInfo);
// Determine what kind of response format is expected
if (typeof(T) == typeof(Stream))
{
// Pass through streamed binary response
// It is up to the consumer to dispose this stream, so don't do so here
shouldDisposeVirtualConnection = false;
return (T)(object)virtualConnection;
}
else
{
// Parse and return non-streamed JSON response
var response = await ReadJsonAsync<RpcJsonResponse<T>>(virtualConnection, cancellationToken);
if (response.ErrorMessage != null)
{
throw new NodeInvocationException(response.ErrorMessage, response.ErrorDetails);
}
return response.Result;
}
}
finally
{
if (shouldDisposeVirtualConnection)
{
virtualConnection.Dispose();
}
}
}
private async Task EnsureVirtualConnectionClientCreated()
{
// Asynchronous equivalent to a 'lock(...) { ... }'
await _connectionCreationSemaphore.WaitAsync();
try
{
if (_virtualConnectionClient == null)
{
_physicalConnection = StreamConnection.Create();
var connection = await _physicalConnection.Open(_socketAddress);
_virtualConnectionClient = new VirtualConnectionClient(connection);
_virtualConnectionClient.OnError += (ex) =>
{
// This callback is fired only if there's a protocol-level failure (e.g., child process disconnected
// unexpectedly). It does *not* fire when RPC calls return errors. Since there's been a protocol-level
// failure, this Node instance is no longer usable and should be discarded.
_connectionHasFailed = true;
OutputLogger.LogError(0, ex, ex.Message);
};
}
}
finally
{
_connectionCreationSemaphore.Release();
}
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (_virtualConnectionClient != null)
{
_virtualConnectionClient.Dispose();
_virtualConnectionClient = null;
}
if (_physicalConnection != null)
{
_physicalConnection.Dispose();
_physicalConnection = null;
}
}
base.Dispose(disposing);
}
private static void WriteJsonLine(Stream stream, object serializableObject)
{
using (var streamWriter = new StreamWriter(stream, utf8EncodingWithoutBom, streamBufferSize, true))
using (var jsonWriter = new JsonTextWriter(streamWriter))
{
jsonWriter.CloseOutput = false;
jsonWriter.AutoCompleteOnClose = false;
var serializer = JsonSerializer.Create(jsonSerializerSettings);
serializer.Serialize(jsonWriter, serializableObject);
jsonWriter.Flush();
streamWriter.WriteLine();
streamWriter.Flush();
}
}
private static async Task<T> ReadJsonAsync<T>(Stream stream, CancellationToken cancellationToken)
{
var json = Encoding.UTF8.GetString(await ReadAllBytesAsync(stream, cancellationToken));
return JsonConvert.DeserializeObject<T>(json, jsonSerializerSettings);
}
private static async Task<byte[]> ReadAllBytesAsync(Stream input, CancellationToken cancellationToken)
{
byte[] buffer = new byte[streamBufferSize];
using (var ms = new MemoryStream())
{
int read;
while ((read = await input.ReadAsync(buffer, 0, buffer.Length, cancellationToken)) > 0)
{
ms.Write(buffer, 0, read);
}
return ms.ToArray();
}
}
private static string MakeNewCommandLineOptions(string listenAddress)
{
return $"--listenAddress {listenAddress}";
}
private static Task ThrowOnCancellation(Task task, CancellationToken cancellationToken)
{
return task.IsCompleted
? task // If the task is already completed, no need to wrap it in a further layer of task
: task.ContinueWith(
_ => {}, // If the task completes, allow execution to continue
cancellationToken,
TaskContinuationOptions.ExecuteSynchronously,
TaskScheduler.Default);
}
#pragma warning disable 649 // These properties are populated via JSON deserialization
private class RpcJsonResponse<TResult>
{
public TResult Result { get; set; }
public string ErrorMessage { get; set; }
public string ErrorDetails { get; set; }
}
#pragma warning restore 649
}
}

View File

@@ -1,21 +0,0 @@
using System;
namespace Microsoft.AspNetCore.NodeServices.Sockets
{
/// <summary>
/// Extension methods that help with populating a <see cref="NodeServicesOptions"/> object.
/// </summary>
public static class NodeServicesOptionsExtensions
{
/// <summary>
/// Configures the <see cref="INodeServices"/> service so that it will use out-of-process
/// Node.js instances and perform RPC calls over binary sockets (on Windows, this is
/// implemented as named pipes; on other platforms it uses domain sockets).
/// </summary>
public static void UseSocketHosting(this NodeServicesOptions options)
{
var pipeName = "pni-" + Guid.NewGuid().ToString("D"); // Arbitrary non-clashing string
options.NodeInstanceFactory = () => new SocketNodeInstance(options, pipeName);
}
}
}

View File

@@ -1,79 +0,0 @@
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
import '../../Microsoft.AspNetCore.NodeServices/TypeScript/Util/OverrideStdOutputs';
import * as net from 'net';
import * as path from 'path';
import * as readline from 'readline';
import { Duplex } from 'stream';
import { parseArgs } from '../../Microsoft.AspNetCore.NodeServices/TypeScript/Util/ArgsUtil';
import { exitWhenParentExits } from '../../Microsoft.AspNetCore.NodeServices/TypeScript/Util/ExitWhenParentExits';
import * as virtualConnectionServer from './VirtualConnections/VirtualConnectionServer';
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
const dynamicRequire: (name: string) => any = eval('require');
// Signal to the .NET side when we're ready to accept invocations
const server = net.createServer().on('listening', () => {
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
// Each virtual connection represents a separate invocation
virtualConnectionServer.createInterface(server).on('connection', (connection: Duplex) => {
readline.createInterface(connection, null).on('line', line => {
try {
// Get a reference to the function to invoke
const invocation = JSON.parse(line) as RpcInvocation;
const invokedModule = dynamicRequire(path.resolve(process.cwd(), invocation.moduleName));
const invokedFunction = invocation.exportedFunctionName ? invokedModule[invocation.exportedFunctionName] : invokedModule;
// Prepare a callback for accepting non-streamed JSON responses
let hasInvokedCallback = false;
const invocationCallback = (errorValue, successValue) => {
if (hasInvokedCallback) {
throw new Error('Cannot supply more than one result. The callback has already been invoked,'
+ ' or the result stream has already been accessed');
}
hasInvokedCallback = true;
connection.end(JSON.stringify({
result: successValue,
errorMessage: errorValue && (errorValue.message || errorValue),
errorDetails: errorValue && (errorValue.stack || null)
}));
};
// Also support streamed binary responses
Object.defineProperty(invocationCallback, 'stream', {
enumerable: true,
get: (): Duplex => {
hasInvokedCallback = true;
return connection;
}
});
// Actually invoke it, passing through any supplied args
invokedFunction.apply(null, [invocationCallback].concat(invocation.args));
} catch (ex) {
connection.end(JSON.stringify({
errorMessage: ex.message,
errorDetails: ex.stack
}));
}
});
});
// Begin listening now. The underlying transport varies according to the runtime platform.
// On Windows it's Named Pipes; on Linux/OSX it's Domain Sockets.
const useWindowsNamedPipes = /^win/.test(process.platform);
const parsedArgs = parseArgs(process.argv);
const listenAddress = (useWindowsNamedPipes ? '\\\\.\\pipe\\' : '/tmp/') + parsedArgs.listenAddress;
server.listen(listenAddress);
exitWhenParentExits(parseInt(parsedArgs.parentPid), /* ignoreSigint */ true);
interface RpcInvocation {
moduleName: string;
exportedFunctionName: string;
args: any[];
}

View File

@@ -1,43 +0,0 @@
import { Duplex } from 'stream';
export type EndWriteCallback = (error?: any) => void;
export type BeginWriteCallback = (data: Buffer, callback: EndWriteCallback) => void;
/**
* Represents a virtual connection. Multiple virtual connections may be multiplexed over a single physical socket connection.
*/
export class VirtualConnection extends Duplex {
private _flowing = false;
private _receivedDataQueue: Buffer[] = [];
constructor(private _beginWriteCallback: BeginWriteCallback) {
super();
}
public _read() {
this._flowing = true;
// Keep pushing data until we run out, or the underlying framework asks us to stop.
// When we finish, the 'flowing' state is detemined by whether more data is still being requested.
while (this._flowing && this._receivedDataQueue.length > 0) {
const nextChunk = this._receivedDataQueue.shift();
this._flowing = this.push(nextChunk);
}
}
public _write(chunk: Buffer | string, encodingIfString: string, callback: EndWriteCallback) {
if (typeof chunk === 'string') {
chunk = new Buffer(chunk as string, encodingIfString);
}
this._beginWriteCallback(chunk as Buffer, callback);
}
public onReceivedData(dataOrNullToSignalEOF: Buffer) {
if (this._flowing) {
this._flowing = this.push(dataOrNullToSignalEOF);
} else {
this._receivedDataQueue.push(dataOrNullToSignalEOF);
}
}
}

View File

@@ -1,199 +0,0 @@
import { Server, Socket } from 'net';
import { EventEmitter } from 'events';
import { Duplex } from 'stream';
import { VirtualConnection, EndWriteCallback } from './VirtualConnection';
// Keep this in sync with the equivalent constant in the .NET code. Both sides split up their transmissions into frames with this max length,
// and both will reject longer frames.
const MaxFrameBodyLength = 16 * 1024;
/**
* Accepts connections to a net.Server and adapts them to behave as multiplexed connections. That is, for each physical socket connection,
* we track a list of 'virtual connections' whose API is a Duplex stream. The remote clients may open and close as many virtual connections
* as they wish, reading and writing to them independently, without the overhead of establishing new physical connections each time.
*/
export function createInterface(server: Server): EventEmitter {
const emitter = new EventEmitter();
server.on('connection', (socket: Socket) => {
// For each physical socket connection, maintain a set of virtual connections. Issue a notification whenever
// a new virtual connections is opened.
const childSockets = new VirtualConnectionsCollection(socket, virtualConnection => {
emitter.emit('connection', virtualConnection);
});
});
return emitter;
}
/**
* Tracks the 'virtual connections' associated with a single physical socket connection.
*/
class VirtualConnectionsCollection {
private _currentFrameHeader: FrameHeader = null;
private _virtualConnections: { [id: string]: VirtualConnection } = {};
constructor(private _socket: Socket, private _onVirtualConnectionCallback: (virtualConnection: Duplex) => void) {
// If the remote end closes the physical socket, treat all the virtual connections as being closed remotely too
this._socket.on('close', () => {
Object.getOwnPropertyNames(this._virtualConnections).forEach(id => {
// A 'null' frame signals that the connection was closed remotely
this._virtualConnections[id].onReceivedData(null);
});
});
this._socket.on('readable', this._onIncomingDataAvailable.bind(this));
}
/**
* This is called whenever the underlying socket signals that it may have some data available to read. It will synchronously read as many
* message frames as it can from the underlying socket, opens virtual connections as needed, and dispatches data to them.
*/
private _onIncomingDataAvailable() {
let exhaustedAllData = false;
while (!exhaustedAllData) {
// We might already have a pending frame header from the previous time this method ran, but if not, that's the next thing we need to read
if (this._currentFrameHeader === null) {
this._currentFrameHeader = this._readNextFrameHeader();
}
if (this._currentFrameHeader === null) {
// There's not enough data to fill a frameheader, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
} else {
const frameBodyLength = this._currentFrameHeader.bodyLength;
const frameBodyOrNull: Buffer = frameBodyLength > 0 ? this._socket.read(this._currentFrameHeader.bodyLength) : null;
if (frameBodyOrNull !== null || frameBodyLength === 0) {
// We have a complete frame header+body pair, so we can now dispatch this to a virtual connection. We set _currentFrameHeader back to null
// so that the next thing we try to read is the next frame header.
const headerCopy = this._currentFrameHeader;
this._currentFrameHeader = null;
this._onReceivedCompleteFrame(headerCopy, frameBodyOrNull);
} else {
// There's not enough data to fill the pending frame body, so wait until more arrives later
// The next attempt to read from the socket will start from the same place this one did (incomplete reads don't consume any data)
exhaustedAllData = true;
}
}
}
}
private _onReceivedCompleteFrame(header: FrameHeader, bodyIfNotEmpty: Buffer) {
// An incoming zero-length frame signals that there's no more data to read.
// Signal this to the Node stream APIs by pushing a 'null' chunk to it.
const virtualConnection = this._getOrOpenVirtualConnection(header);
virtualConnection.onReceivedData(header.bodyLength > 0 ? bodyIfNotEmpty : null);
}
private _getOrOpenVirtualConnection(header: FrameHeader) {
if (this._virtualConnections.hasOwnProperty(header.connectionIdString)) {
// It's an existing virtual connection
return this._virtualConnections[header.connectionIdString];
} else {
// It's a new one
return this._openVirtualConnection(header);
}
}
private _openVirtualConnection(header: FrameHeader) {
const beginWriteCallback = (data, writeCompletedCallback) => {
// Only send nonempty frames, since empty ones are a signal to close the virtual connection
if (data.length > 0) {
this._sendFrame(header.connectionIdBinary, data, writeCompletedCallback);
}
};
const newVirtualConnection = new VirtualConnection(beginWriteCallback);
newVirtualConnection.on('end', () => {
// The virtual connection was closed remotely. Clean up locally.
this._onVirtualConnectionWasClosed(header.connectionIdString);
});
newVirtualConnection.on('finish', () => {
// The virtual connection was closed locally. Clean up locally, and notify the remote that we're done.
this._onVirtualConnectionWasClosed(header.connectionIdString);
this._sendFrame(header.connectionIdBinary, new Buffer(0));
});
this._virtualConnections[header.connectionIdString] = newVirtualConnection;
this._onVirtualConnectionCallback(newVirtualConnection);
return newVirtualConnection;
}
/**
* Attempts to read a complete frame header, synchronously, from the underlying socket.
* If not enough data is available synchronously, returns null without consuming any data from the socket.
*/
private _readNextFrameHeader(): FrameHeader {
const headerBuf: Buffer = this._socket.read(12);
if (headerBuf !== null) {
// We have enough data synchronously
const connectionIdBinary = headerBuf.slice(0, 8);
const connectionIdString = connectionIdBinary.toString('hex');
const bodyLength = headerBuf.readInt32LE(8);
if (bodyLength < 0 || bodyLength > MaxFrameBodyLength) {
// Throwing here is going to bring down the whole process, so this cannot be allowed to happen in real use.
// But it won't happen in real use, because this is only used with our .NET client, which doesn't violate this rule.
throw new Error('Illegal frame body length: ' + bodyLength);
}
return { connectionIdBinary, connectionIdString, bodyLength };
} else {
// Not enough bytes are available synchronously, so none were consumed
return null;
}
}
private _sendFrame(connectionIdBinary: Buffer, data: Buffer, callback?: EndWriteCallback) {
// For all sends other than the last one, only invoke the callback if it failed.
// Also, only invoke the callback at most once.
let hasInvokedCallback = false;
const finalCallback: EndWriteCallback = callback && (error => {
if (!hasInvokedCallback) {
hasInvokedCallback = true;
callback(error);
}
});
const notFinalCallback: EndWriteCallback = callback && (error => {
if (error) {
finalCallback(error);
}
});
// The amount of data we're writing might exceed MaxFrameBodyLength, so split into frames as needed.
// Note that we always send at least one frame, even if it's empty (because that's the close-virtual-connection signal).
// If needed, this could be changed to send frames asynchronously, so that large sends could proceed in parallel
// (though that would involve making a clone of 'data', to avoid the risk of it being mutated during the send).
let bytesSent = 0;
do {
const nextFrameBodyLength = Math.min(MaxFrameBodyLength, data.length - bytesSent);
const isFinalChunk = (bytesSent + nextFrameBodyLength) === data.length;
this._socket.write(connectionIdBinary, notFinalCallback);
this._sendInt32LE(nextFrameBodyLength, notFinalCallback);
this._socket.write(data.slice(bytesSent, bytesSent + nextFrameBodyLength), isFinalChunk ? finalCallback : notFinalCallback);
bytesSent += nextFrameBodyLength;
} while (bytesSent < data.length);
}
/**
* Sends a number serialized in the correct format for .NET to receive as a System.Int32
*/
private _sendInt32LE(value: number, callback?: EndWriteCallback) {
const buf = new Buffer(4);
buf.writeInt32LE(value, 0);
this._socket.write(buf, callback);
}
private _onVirtualConnectionWasClosed(id: string) {
if (this._virtualConnections.hasOwnProperty(id)) {
delete this._virtualConnections[id];
}
}
}
interface FrameHeader {
connectionIdBinary: Buffer;
connectionIdString: string;
bodyLength: number;
}

View File

@@ -1,11 +0,0 @@
{
"compilerOptions": {
"target": "es3",
"module": "commonjs",
"moduleResolution": "node",
"types": ["node"]
},
"exclude": [
"node_modules"
]
}

View File

@@ -1,150 +0,0 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using System.Threading.Tasks.Dataflow;
namespace Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections
{
/// <summary>
/// A virtual read/write connection, typically to a remote process. Multiple virtual connections can be
/// multiplexed over a single physical connection (e.g., a named pipe, domain socket, or TCP socket).
/// </summary>
internal class VirtualConnection : Stream
{
private readonly static Task CompletedTask = Task.CompletedTask;
private VirtualConnectionClient _host;
private readonly BufferBlock<byte[]> _receivedDataQueue = new BufferBlock<byte[]>();
private ArraySegment<byte> _receivedDataNotYetUsed;
private bool _wasClosedByRemote;
private bool _isDisposed;
public VirtualConnection(long id, VirtualConnectionClient host)
{
Id = id;
_host = host;
}
public long Id { get; }
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return false; } }
public override bool CanWrite { get { return true; } }
public override long Length
{
get { throw new NotImplementedException(); }
}
public override long Position
{
get { throw new NotImplementedException(); }
set { throw new NotImplementedException(); }
}
public override void Flush()
{
// We're auto-flushing, so this is a no-op.
}
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_wasClosedByRemote)
{
return 0;
}
var bytesRead = 0;
while (true)
{
// Pull as many applicable bytes as we can out of receivedDataNotYetUsed, then update its offset/length
int bytesToExtract = Math.Min(count - bytesRead, _receivedDataNotYetUsed.Count);
if (bytesToExtract > 0)
{
Buffer.BlockCopy(_receivedDataNotYetUsed.Array, _receivedDataNotYetUsed.Offset, buffer, bytesRead, bytesToExtract);
_receivedDataNotYetUsed = new ArraySegment<byte>(_receivedDataNotYetUsed.Array, _receivedDataNotYetUsed.Offset + bytesToExtract, _receivedDataNotYetUsed.Count - bytesToExtract);
bytesRead += bytesToExtract;
}
// If we've completely filled the output buffer, we're done
if (bytesRead == count)
{
return bytesRead;
}
// We haven't yet filled the output buffer, so we must have exhausted receivedDataNotYetUsed instead.
// We want to get the next block of data from the underlying queue.
byte[] nextReceivedBlock;
if (bytesRead > 0)
{
if (!_receivedDataQueue.TryReceive(null, out nextReceivedBlock))
{
// No more data is available synchronously, and we already have some data, so we can stop now
return bytesRead;
}
}
else
{
// Since we don't yet have anything, wait for the underlying source
nextReceivedBlock = await _receivedDataQueue.ReceiveAsync(cancellationToken);
}
if (nextReceivedBlock.Length == 0)
{
// A zero-length block signals that the remote regards this virtual connection as closed
_wasClosedByRemote = true;
return bytesRead;
}
else
{
// We got some more data, so can continue trying to fill the output buffer
_receivedDataNotYetUsed = new ArraySegment<byte>(nextReceivedBlock, 0, nextReceivedBlock.Length);
}
}
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (_wasClosedByRemote)
{
throw new InvalidOperationException("The connection was already closed by the remote party");
}
return count > 0 ? _host.WriteAsync(Id, buffer, offset, count, cancellationToken) : CompletedTask;
}
public override int Read(byte[] buffer, int offset, int count)
{
return ReadAsync(buffer, offset, count, CancellationToken.None).Result;
}
public override long Seek(long offset, SeekOrigin origin)
{
throw new NotImplementedException();
}
public override void SetLength(long value)
{
throw new NotImplementedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
WriteAsync(buffer, offset, count, CancellationToken.None).Wait();
}
protected override void Dispose(bool disposing)
{
if (disposing && !_isDisposed)
{
_isDisposed = true;
_host.CloseInnerStream(Id, _wasClosedByRemote);
}
}
public async Task AddDataToQueue(byte[] data)
{
await _receivedDataQueue.SendAsync(data);
}
}
}

View File

@@ -1,238 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.Sockets.VirtualConnections
{
/// <summary>
/// A callback that will be invoked if the <see cref="VirtualConnectionClient"/> encounters a read error.
/// </summary>
/// <param name="ex"></param>
public delegate void VirtualConnectionReadErrorHandler(Exception ex);
/// <summary>
/// Wraps an underlying physical read/write stream (e.g., named pipes, domain sockets, or TCP sockets) and
/// exposes an API for making 'virtual connections', which act as independent read/write streams.
/// Traffic over these virtual connections is multiplexed over the underlying physical stream. This is useful
/// for fast stream-based inter-process communication because it avoids the overhead of opening a new physical
/// connection each time a new communication channel is needed.
/// </summary>
internal class VirtualConnectionClient : IDisposable
{
internal const int MaxFrameBodyLength = 16 * 1024;
public event VirtualConnectionReadErrorHandler OnError;
private Stream _underlyingTransport;
private Dictionary<long, VirtualConnection> _activeInnerStreams;
private long _nextInnerStreamId;
private readonly SemaphoreSlim _streamWriterSemaphore = new SemaphoreSlim(1);
private readonly object _readControlLock = new object();
private Exception _readLoopExitedWithException;
private readonly CancellationTokenSource _disposalCancellatonToken = new CancellationTokenSource();
private bool _disposedValue = false;
public VirtualConnectionClient(Stream underlyingTransport)
{
_underlyingTransport = underlyingTransport;
_activeInnerStreams = new Dictionary<long, VirtualConnection>();
RunReadLoop();
}
public Stream OpenVirtualConnection()
{
// Improve discoverability of read-loop errors (in case the developer doesn't add an OnError listener)
ThrowIfReadLoopFailed();
var id = Interlocked.Increment(ref _nextInnerStreamId);
var newInnerStream = new VirtualConnection(id, this);
lock (_activeInnerStreams)
{
_activeInnerStreams.Add(id, newInnerStream);
}
return newInnerStream;
}
// It's async void because nothing waits for it to finish (it continues indefinitely). It signals any errors via
// a separate channel.
private async void RunReadLoop()
{
try
{
while (!_disposalCancellatonToken.IsCancellationRequested)
{
var remoteIsStillConnected = await ProcessNextFrameAsync();
if (!remoteIsStillConnected)
{
CloseAllActiveStreams();
}
}
}
catch (Exception ex)
{
// Not all underlying transports correctly honor cancellation tokens. For example,
// DomainSocketStreamTransport's ReadAsync ignores them, so we only know to stop
// the read loop when the underlying stream is disposed and then it throws ObjectDisposedException.
if (!(ex is TaskCanceledException || ex is ObjectDisposedException))
{
_readLoopExitedWithException = ex;
var evt = OnError;
if (evt != null)
{
evt(ex);
}
}
}
}
private async Task<bool> ProcessNextFrameAsync()
{
// First read frame header
var frameHeaderBuffer = await ReadExactLength(12);
if (frameHeaderBuffer == null)
{
return false; // Underlying stream was closed
}
// Parse frame header, then read the frame body
long streamId = BitConverter.ToInt64(frameHeaderBuffer, 0);
int frameBodyLength = BitConverter.ToInt32(frameHeaderBuffer, 8);
if (frameBodyLength < 0 || frameBodyLength > MaxFrameBodyLength)
{
throw new InvalidDataException("Illegal frame length: " + frameBodyLength);
}
var frameBody = await ReadExactLength(frameBodyLength);
if (frameBody == null)
{
return false; // Underlying stream was closed
}
// Dispatch the frame to the relevant inner stream
VirtualConnection innerStream;
lock (_activeInnerStreams)
{
_activeInnerStreams.TryGetValue(streamId, out innerStream);
}
if (innerStream != null)
{
await innerStream.AddDataToQueue(frameBody);
}
return true;
}
private async Task<byte[]> ReadExactLength(int lengthToRead) {
byte[] buffer = new byte[lengthToRead];
var totalBytesRead = 0;
var ct = _disposalCancellatonToken.Token;
while (totalBytesRead < lengthToRead)
{
var chunkLengthRead = await _underlyingTransport.ReadAsync(buffer, totalBytesRead, lengthToRead - totalBytesRead, ct);
if (chunkLengthRead == 0)
{
// Underlying stream was closed
return null;
}
totalBytesRead += chunkLengthRead;
}
return buffer;
}
private void CloseAllActiveStreams()
{
IList<VirtualConnection> innerStreamsCopy;
// Only hold the lock while cloning the list of inner streams. Release the lock before
// actually disposing them, because each 'dispose' call will try to take another lock
// so it can remove that inner stream from activeInnerStreams.
lock (_activeInnerStreams)
{
innerStreamsCopy = _activeInnerStreams.Values.ToList();
}
foreach (var stream in innerStreamsCopy)
{
stream.Dispose();
}
}
public void Dispose()
{
if (!_disposedValue)
{
_disposedValue = true;
_disposalCancellatonToken.Cancel(); // Stops the read loop
CloseAllActiveStreams();
}
}
public async Task WriteAsync(long innerStreamId, byte[] data, int offset, int count, CancellationToken cancellationToken)
{
// In case the amount of data to be sent exceeds the max frame length, split it into separate frames
// Note that we always send at least one frame, even if it's empty, because the zero-length frame is the signal to close a virtual connection
// (hence 'do..while' instead of just 'while').
int bytesWritten = 0;
do {
// Improve discoverability of read-loop errors (in case the developer doesn't add an OnError listener)
ThrowIfReadLoopFailed();
// Hold the write lock only for the time taken to send a single frame, not all frames, to allow large sends to be proceed in parallel
await _streamWriterSemaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
// Write stream ID, then length prefix, then chunk payload, then flush
var nextChunkBodyLength = Math.Min(MaxFrameBodyLength, count - bytesWritten);
await _underlyingTransport.WriteAsync(BitConverter.GetBytes(innerStreamId), 0, 8, cancellationToken).ConfigureAwait(false);
await _underlyingTransport.WriteAsync(BitConverter.GetBytes(nextChunkBodyLength), 0, 4, cancellationToken).ConfigureAwait(false);
if (nextChunkBodyLength > 0)
{
await _underlyingTransport.WriteAsync(data, offset + bytesWritten, nextChunkBodyLength, cancellationToken).ConfigureAwait(false);
bytesWritten += nextChunkBodyLength;
}
await _underlyingTransport.FlushAsync(cancellationToken).ConfigureAwait(false);
}
finally
{
_streamWriterSemaphore.Release();
}
} while (bytesWritten < count);
}
public void CloseInnerStream(long innerStreamId, bool isAlreadyClosedRemotely)
{
lock (_activeInnerStreams)
{
if (_activeInnerStreams.ContainsKey(innerStreamId))
{
_activeInnerStreams.Remove(innerStreamId);
}
}
if (!isAlreadyClosedRemotely) {
// Also notify the remote that this innerstream is closed
WriteAsync(innerStreamId, new byte[0], 0, 0, new CancellationToken()).Wait();
}
}
private void ThrowIfReadLoopFailed()
{
if (_readLoopExitedWithException != null)
{
throw new AggregateException("The connection failed - see InnerException for details.", _readLoopExitedWithException);
}
}
}
}

View File

@@ -1,18 +0,0 @@
{
"name": "nodeservices.sockets",
"version": "1.0.0",
"description": "This is not really an NPM package and will not be published. This file exists only to reference compilation tools.",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "./node_modules/.bin/webpack"
},
"author": "Microsoft",
"license": "Apache-2.0",
"devDependencies": {
"@types/node": "^6.0.42",
"ts-loader": "^0.8.2",
"typescript": "^2.0.0",
"webpack": "^1.13.1"
}
}

View File

@@ -1,20 +0,0 @@
module.exports = {
target: 'node',
externals: ['fs', 'net', 'events', 'readline', 'stream'],
resolve: {
extensions: [ '.ts' ]
},
module: {
loaders: [
{ test: /\.ts$/, loader: 'ts-loader' },
]
},
entry: {
'entrypoint-socket': ['./TypeScript/SocketNodeInstanceEntryPoint'],
},
output: {
libraryTarget: 'commonjs',
path: './Content/Node',
filename: '[name].js'
}
};

View File

@@ -1,3 +0,0 @@
/bin/
/node_modules/
yarn.lock

View File

@@ -1,25 +0,0 @@
using System;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Supplies INodeServices instances.
/// </summary>
public static class NodeServicesFactory
{
/// <summary>
/// Create an <see cref="INodeServices"/> instance according to the supplied options.
/// </summary>
/// <param name="options">Options for creating the <see cref="INodeServices"/> instance.</param>
/// <returns>An <see cref="INodeServices"/> instance.</returns>
public static INodeServices CreateNodeServices(NodeServicesOptions options)
{
if (options == null)
{
throw new ArgumentNullException(nameof (options));
}
return new NodeServicesImpl(options.NodeInstanceFactory);
}
}
}

View File

@@ -1,109 +0,0 @@
using System;
using System.Collections.Generic;
using System.Threading;
using Microsoft.AspNetCore.NodeServices.HostingModels;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Logging.Console;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Describes options used to configure an <see cref="INodeServices"/> instance.
/// </summary>
public class NodeServicesOptions
{
internal const string TimeoutConfigPropertyName = nameof(InvocationTimeoutMilliseconds);
private const int DefaultInvocationTimeoutMilliseconds = 60 * 1000;
private const string LogCategoryName = "Microsoft.AspNetCore.NodeServices";
private static readonly string[] DefaultWatchFileExtensions = { ".js", ".jsx", ".ts", ".tsx", ".json", ".html" };
/// <summary>
/// Creates a new instance of <see cref="NodeServicesOptions"/>.
/// </summary>
/// <param name="serviceProvider">The <see cref="IServiceProvider"/>.</param>
public NodeServicesOptions(IServiceProvider serviceProvider)
{
if (serviceProvider == null)
{
throw new ArgumentNullException(nameof (serviceProvider));
}
EnvironmentVariables = new Dictionary<string, string>();
InvocationTimeoutMilliseconds = DefaultInvocationTimeoutMilliseconds;
WatchFileExtensions = (string[])DefaultWatchFileExtensions.Clone();
// In an ASP.NET environment, we can use the IHostingEnvironment data to auto-populate a few
// things that you'd otherwise have to specify manually
var hostEnv = serviceProvider.GetService<IHostingEnvironment>();
if (hostEnv != null)
{
ProjectPath = hostEnv.ContentRootPath;
EnvironmentVariables["NODE_ENV"] = hostEnv.IsDevelopment() ? "development" : "production"; // De-facto standard values for Node
}
var applicationLifetime = serviceProvider.GetService<IApplicationLifetime>();
if (applicationLifetime != null)
{
ApplicationStoppingToken = applicationLifetime.ApplicationStopping;
}
// If the DI system gives us a logger, use it. Otherwise, set up a default one.
var loggerFactory = serviceProvider.GetService<ILoggerFactory>();
NodeInstanceOutputLogger = loggerFactory != null
? loggerFactory.CreateLogger(LogCategoryName)
: new ConsoleLogger(LogCategoryName, null, false);
// By default, we use this package's built-in out-of-process-via-HTTP hosting/transport
this.UseHttpHosting();
}
/// <summary>
/// Specifies how to construct Node.js instances. An <see cref="INodeInstance"/> encapsulates all details about
/// how Node.js instances are launched and communicated with. A new <see cref="INodeInstance"/> will be created
/// automatically if the previous instance has terminated (e.g., because a source file changed).
/// </summary>
public Func<INodeInstance> NodeInstanceFactory { get; set; }
/// <summary>
/// If set, overrides the path to the root of your application. This path is used when locating Node.js modules relative to your project.
/// </summary>
public string ProjectPath { get; set; }
/// <summary>
/// If set, the Node.js instance should restart when any matching file on disk within your project changes.
/// </summary>
public string[] WatchFileExtensions { get; set; }
/// <summary>
/// The Node.js instance's stdout/stderr will be redirected to this <see cref="ILogger"/>.
/// </summary>
public ILogger NodeInstanceOutputLogger { get; set; }
/// <summary>
/// If true, the Node.js instance will accept incoming V8 debugger connections (e.g., from node-inspector).
/// </summary>
public bool LaunchWithDebugging { get; set; }
/// <summary>
/// If <see cref="LaunchWithDebugging"/> is true, the Node.js instance will listen for V8 debugger connections on this port.
/// </summary>
public int DebuggingPort { get; set; }
/// <summary>
/// If set, starts the Node.js instance with the specified environment variables.
/// </summary>
public IDictionary<string, string> EnvironmentVariables { get; set; }
/// <summary>
/// Specifies the maximum duration, in milliseconds, that your .NET code should wait for Node.js RPC calls to return.
/// </summary>
public int InvocationTimeoutMilliseconds { get; set; }
/// <summary>
/// A token that indicates when the host application is stopping.
/// </summary>
public CancellationToken ApplicationStoppingToken { get; set; }
}
}

View File

@@ -1,41 +0,0 @@
using System;
using Microsoft.AspNetCore.NodeServices;
namespace Microsoft.Extensions.DependencyInjection
{
/// <summary>
/// Extension methods for setting up NodeServices in an <see cref="IServiceCollection" />.
/// </summary>
public static class NodeServicesServiceCollectionExtensions
{
/// <summary>
/// Adds NodeServices support to the <paramref name="serviceCollection"/>.
/// </summary>
/// <param name="serviceCollection">The <see cref="IServiceCollection"/>.</param>
public static void AddNodeServices(this IServiceCollection serviceCollection)
=> AddNodeServices(serviceCollection, _ => {});
/// <summary>
/// Adds NodeServices support to the <paramref name="serviceCollection"/>.
/// </summary>
/// <param name="serviceCollection">The <see cref="IServiceCollection"/>.</param>
/// <param name="setupAction">A callback that will be invoked to populate the <see cref="NodeServicesOptions"/>.</param>
public static void AddNodeServices(this IServiceCollection serviceCollection, Action<NodeServicesOptions> setupAction)
{
if (setupAction == null)
{
throw new ArgumentNullException(nameof (setupAction));
}
serviceCollection.AddSingleton(typeof(INodeServices), serviceProvider =>
{
// First we let NodeServicesOptions take its defaults from the IServiceProvider,
// then we let the developer override those options
var options = new NodeServicesOptions(serviceProvider);
setupAction(options);
return NodeServicesFactory.CreateNodeServices(options);
});
}
}
}

View File

@@ -1,305 +0,0 @@
(function(e, a) { for(var i in a) e[i] = a[i]; }(exports, /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(1);
/***/ },
/* 1 */
/***/ function(module, exports, __webpack_require__) {
"use strict";
// Limit dependencies to core Node modules. This means the code in this file has to be very low-level and unattractive,
// but simplifies things for the consumer of this module.
__webpack_require__(2);
var http = __webpack_require__(3);
var path = __webpack_require__(4);
var ArgsUtil_1 = __webpack_require__(5);
var ExitWhenParentExits_1 = __webpack_require__(6);
// Webpack doesn't support dynamic requires for files not present at compile time, so grab a direct
// reference to Node's runtime 'require' function.
var dynamicRequire = eval('require');
var server = http.createServer(function (req, res) {
readRequestBodyAsJson(req, function (bodyJson) {
var hasSentResult = false;
var callback = function (errorValue, successValue) {
if (!hasSentResult) {
hasSentResult = true;
if (errorValue) {
respondWithError(res, errorValue);
}
else if (typeof successValue !== 'string') {
// Arbitrary object/number/etc - JSON-serialize it
var successValueJson = void 0;
try {
successValueJson = JSON.stringify(successValue);
}
catch (ex) {
// JSON serialization error - pass it back to .NET
respondWithError(res, ex);
return;
}
res.setHeader('Content-Type', 'application/json');
res.end(successValueJson);
}
else {
// String - can bypass JSON-serialization altogether
res.setHeader('Content-Type', 'text/plain');
res.end(successValue);
}
}
};
// Support streamed responses
Object.defineProperty(callback, 'stream', {
enumerable: true,
get: function () {
if (!hasSentResult) {
hasSentResult = true;
res.setHeader('Content-Type', 'application/octet-stream');
}
return res;
}
});
try {
var resolvedPath = path.resolve(process.cwd(), bodyJson.moduleName);
var invokedModule = dynamicRequire(resolvedPath);
var func = bodyJson.exportedFunctionName ? invokedModule[bodyJson.exportedFunctionName] : invokedModule;
if (!func) {
throw new Error('The module "' + resolvedPath + '" has no export named "' + bodyJson.exportedFunctionName + '"');
}
func.apply(null, [callback].concat(bodyJson.args));
}
catch (synchronousException) {
callback(synchronousException, null);
}
});
});
var parsedArgs = ArgsUtil_1.parseArgs(process.argv);
var requestedPortOrZero = parsedArgs.port || 0; // 0 means 'let the OS decide'
server.listen(requestedPortOrZero, 'localhost', function () {
// Signal to HttpNodeHost which port it should make its HTTP connections on
console.log('[Microsoft.AspNetCore.NodeServices.HttpNodeHost:Listening on port ' + server.address().port + '\]');
// Signal to the NodeServices base class that we're ready to accept invocations
console.log('[Microsoft.AspNetCore.NodeServices:Listening]');
});
ExitWhenParentExits_1.exitWhenParentExits(parseInt(parsedArgs.parentPid), /* ignoreSigint */ true);
function readRequestBodyAsJson(request, callback) {
var requestBodyAsString = '';
request.on('data', function (chunk) { requestBodyAsString += chunk; });
request.on('end', function () { callback(JSON.parse(requestBodyAsString)); });
}
function respondWithError(res, errorValue) {
res.statusCode = 500;
res.end(errorValue.stack || errorValue.toString());
}
/***/ },
/* 2 */
/***/ function(module, exports) {
// When Node writes to stdout/strerr, we capture that and convert the lines into calls on the
// active .NET ILogger. But by default, stdout/stderr don't have any way of distinguishing
// linebreaks inside log messages from the linebreaks that delimit separate log messages,
// so multiline strings will end up being written to the ILogger as multiple independent
// log messages. This makes them very hard to make sense of, especially when they represent
// something like stack traces.
//
// To fix this, we intercept stdout/stderr writes, and replace internal linebreaks with a
// marker token. When .NET receives the lines, it converts the marker tokens back to regular
// linebreaks within the logged messages.
//
// Note that it's better to do the interception at the stdout/stderr level, rather than at
// the console.log/console.error (etc.) level, because this takes place after any native
// message formatting has taken place (e.g., inserting values for % placeholders).
var findInternalNewlinesRegex = /\n(?!$)/g;
var encodedNewline = '__ns_newline__';
encodeNewlinesWrittenToStream(process.stdout);
encodeNewlinesWrittenToStream(process.stderr);
function encodeNewlinesWrittenToStream(outputStream) {
var origWriteFunction = outputStream.write;
outputStream.write = function (value) {
// Only interfere with the write if it's definitely a string
if (typeof value === 'string') {
var argsClone = Array.prototype.slice.call(arguments, 0);
argsClone[0] = encodeNewlinesInString(value);
origWriteFunction.apply(this, argsClone);
}
else {
origWriteFunction.apply(this, arguments);
}
};
}
function encodeNewlinesInString(str) {
return str.replace(findInternalNewlinesRegex, encodedNewline);
}
/***/ },
/* 3 */
/***/ function(module, exports) {
module.exports = require("http");
/***/ },
/* 4 */
/***/ function(module, exports) {
module.exports = require("path");
/***/ },
/* 5 */
/***/ function(module, exports) {
"use strict";
function parseArgs(args) {
// Very simplistic parsing which is sufficient for the cases needed. We don't want to bring in any external
// dependencies (such as an args-parsing library) to this file.
var result = {};
var currentKey = null;
args.forEach(function (arg) {
if (arg.indexOf('--') === 0) {
var argName = arg.substring(2);
result[argName] = undefined;
currentKey = argName;
}
else if (currentKey) {
result[currentKey] = arg;
currentKey = null;
}
});
return result;
}
exports.parseArgs = parseArgs;
/***/ },
/* 6 */
/***/ function(module, exports) {
/*
In general, we want the Node child processes to be terminated as soon as the parent .NET processes exit,
because we have no further use for them. If the .NET process shuts down gracefully, it will run its
finalizers, one of which (in OutOfProcessNodeInstance.cs) will kill its associated Node process immediately.
But if the .NET process is terminated forcefully (e.g., on Linux/OSX with 'kill -9'), then it won't have
any opportunity to shut down its child processes, and by default they will keep running. In this case, it's
up to the child process to detect this has happened and terminate itself.
There are many possible approaches to detecting when a parent process has exited, most of which behave
differently between Windows and Linux/OS X:
- On Windows, the parent process can mark its child as being a 'job' that should auto-terminate when
the parent does (http://stackoverflow.com/a/4657392). Not cross-platform.
- The child Node process can get a callback when the parent disconnects (process.on('disconnect', ...)).
But despite http://stackoverflow.com/a/16487966, no callback fires in any case I've tested (Windows / OS X).
- The child Node process can get a callback when its stdin/stdout are disconnected, as described at
http://stackoverflow.com/a/15693934. This works well on OS X, but calling stdout.resume() on Windows
causes the process to terminate prematurely.
- I don't know why, but on Windows, it's enough to invoke process.stdin.resume(). For some reason this causes
the child Node process to exit as soon as the parent one does, but I don't see this documented anywhere.
- You can poll to see if the parent process, or your stdin/stdout connection to it, is gone
- You can directly pass a parent process PID to the child, and then have the child poll to see if it's
still running (e.g., using process.kill(pid, 0), which doesn't kill it but just tests whether it exists,
as per https://nodejs.org/api/process.html#process_process_kill_pid_signal)
- Or, on each poll, you can try writing to process.stdout. If the parent has died, then this will throw.
However I don't see this documented anywhere. It would be nice if you could just poll for whether or not
process.stdout is still connected (without actually writing to it) but I haven't found any property whose
value changes until you actually try to write to it.
Of these, the only cross-platform approach that is actually documented as a valid strategy is simply polling
to check whether the parent PID is still running. So that's what we do here.
*/
"use strict";
var pollIntervalMs = 1000;
function exitWhenParentExits(parentPid, ignoreSigint) {
setInterval(function () {
if (!processExists(parentPid)) {
// Can't log anything at this point, because out stdout was connected to the parent,
// but the parent is gone.
process.exit();
}
}, pollIntervalMs);
if (ignoreSigint) {
// Pressing ctrl+c in the terminal sends a SIGINT to all processes in the foreground process tree.
// By default, the Node process would then exit before the .NET process, because ASP.NET implements
// a delayed shutdown to allow ongoing requests to complete.
//
// This is problematic, because if Node exits first, the CopyToAsync code in ConditionalProxyMiddleware
// will experience a read fault, and logs a huge load of errors. Fortunately, since the Node process is
// already set up to shut itself down if it detects the .NET process is terminated, all we have to do is
// ignore the SIGINT. The Node process will then terminate automatically after the .NET process does.
//
// A better solution would be to have WebpackDevMiddleware listen for SIGINT and gracefully close any
// ongoing EventSource connections before letting the Node process exit, independently of the .NET
// process exiting. However, doing this well in general is very nontrivial (see all the discussion at
// https://github.com/nodejs/node/issues/2642).
process.on('SIGINT', function () {
console.log('Received SIGINT. Waiting for .NET process to exit...');
});
}
}
exports.exitWhenParentExits = exitWhenParentExits;
function processExists(pid) {
try {
// Sending signal 0 - on all platforms - tests whether the process exists. As long as it doesn't
// throw, that means it does exist.
process.kill(pid, 0);
return true;
}
catch (ex) {
// If the reason for the error is that we don't have permission to ask about this process,
// report that as a separate problem.
if (ex.code === 'EPERM') {
throw new Error("Attempted to check whether process " + pid + " was running, but got a permissions error.");
}
return false;
}
}
/***/ }
/******/ ])));

View File

@@ -1,140 +0,0 @@
using System;
using System.IO;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// A specialisation of the OutOfProcessNodeInstance base class that uses HTTP to perform RPC invocations.
///
/// The Node child process starts an HTTP listener on an arbitrary available port (except where a nonzero
/// port number is specified as a constructor parameter), and signals which port was selected using the same
/// input/output-based mechanism that the base class uses to determine when the child process is ready to
/// accept RPC invocations.
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.HostingModels.OutOfProcessNodeInstance" />
internal class HttpNodeInstance : OutOfProcessNodeInstance
{
private static readonly Regex PortMessageRegex =
new Regex(@"^\[Microsoft.AspNetCore.NodeServices.HttpNodeHost:Listening on port (\d+)\]$");
private static readonly JsonSerializerSettings jsonSerializerSettings = new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver(),
TypeNameHandling = TypeNameHandling.None
};
private readonly HttpClient _client;
private bool _disposed;
private int _portNumber;
public HttpNodeInstance(NodeServicesOptions options, int port = 0)
: base(
EmbeddedResourceReader.Read(
typeof(HttpNodeInstance),
"/Content/Node/entrypoint-http.js"),
options.ProjectPath,
options.WatchFileExtensions,
MakeCommandLineOptions(port),
options.ApplicationStoppingToken,
options.NodeInstanceOutputLogger,
options.EnvironmentVariables,
options.InvocationTimeoutMilliseconds,
options.LaunchWithDebugging,
options.DebuggingPort)
{
_client = new HttpClient();
}
private static string MakeCommandLineOptions(int port)
{
return $"--port {port}";
}
protected override async Task<T> InvokeExportAsync<T>(
NodeInvocationInfo invocationInfo, CancellationToken cancellationToken)
{
var payloadJson = JsonConvert.SerializeObject(invocationInfo, jsonSerializerSettings);
var payload = new StringContent(payloadJson, Encoding.UTF8, "application/json");
var response = await _client.PostAsync("http://localhost:" + _portNumber, payload, cancellationToken);
if (!response.IsSuccessStatusCode)
{
// Unfortunately there's no true way to cancel ReadAsStringAsync calls, hence AbandonIfCancelled
var responseErrorString = await response.Content.ReadAsStringAsync().OrThrowOnCancellation(cancellationToken);
throw new Exception("Call to Node module failed with error: " + responseErrorString);
}
var responseContentType = response.Content.Headers.ContentType;
switch (responseContentType.MediaType)
{
case "text/plain":
// String responses can skip JSON encoding/decoding
if (typeof(T) != typeof(string))
{
throw new ArgumentException(
"Node module responded with non-JSON string. This cannot be converted to the requested generic type: " +
typeof(T).FullName);
}
var responseString = await response.Content.ReadAsStringAsync().OrThrowOnCancellation(cancellationToken);
return (T)(object)responseString;
case "application/json":
var responseJson = await response.Content.ReadAsStringAsync().OrThrowOnCancellation(cancellationToken);
return JsonConvert.DeserializeObject<T>(responseJson, jsonSerializerSettings);
case "application/octet-stream":
// Streamed responses have to be received as System.IO.Stream instances
if (typeof(T) != typeof(Stream) && typeof(T) != typeof(object))
{
throw new ArgumentException(
"Node module responded with binary stream. This cannot be converted to the requested generic type: " +
typeof(T).FullName + ". Instead you must use the generic type System.IO.Stream.");
}
return (T)(object)(await response.Content.ReadAsStreamAsync().OrThrowOnCancellation(cancellationToken));
default:
throw new InvalidOperationException("Unexpected response content type: " + responseContentType.MediaType);
}
}
protected override void OnOutputDataReceived(string outputData)
{
// Watch for "port selected" messages, and when observed, store the port number
// so we can use it when making HTTP requests. The child process will always send
// one of these messages before it sends a "ready for connections" message.
var match = _portNumber != 0 ? null : PortMessageRegex.Match(outputData);
if (match != null && match.Success)
{
_portNumber = int.Parse(match.Groups[1].Captures[0].Value);
}
else
{
base.OnOutputDataReceived(outputData);
}
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
if (!_disposed)
{
if (disposing)
{
_client.Dispose();
}
_disposed = true;
}
}
}
}

View File

@@ -1,23 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Represents an instance of Node.js to which Remote Procedure Calls (RPC) may be sent.
/// </summary>
public interface INodeInstance : IDisposable
{
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportNameOrNull">If set, specifies the CommonJS export to be invoked. If not set, the module's default CommonJS export itself must be a function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeExportAsync<T>(CancellationToken cancellationToken, string moduleName, string exportNameOrNull, params object[] args);
}
}

View File

@@ -1,55 +0,0 @@
using System;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Represents an exception caused by invoking Node.js code.
/// </summary>
public class NodeInvocationException : Exception
{
/// <summary>
/// If true, indicates that the invocation failed because the Node.js instance could not be reached. For example,
/// it might have already shut down or previously crashed.
/// </summary>
public bool NodeInstanceUnavailable { get; private set; }
/// <summary>
/// If true, indicates that even though the invocation failed because the Node.js instance could not be reached
/// or needs to be restarted, that Node.js instance may remain alive for a period in order to complete any
/// outstanding requests.
/// </summary>
public bool AllowConnectionDraining { get; private set;}
/// <summary>
/// Creates a new instance of <see cref="NodeInvocationException"/>.
/// </summary>
/// <param name="message">A description of the exception.</param>
/// <param name="details">Additional information, such as a Node.js stack trace, representing the exception.</param>
public NodeInvocationException(string message, string details)
: base(message + Environment.NewLine + details)
{
}
/// <summary>
/// Creates a new instance of <see cref="NodeInvocationException"/>.
/// </summary>
/// <param name="message">A description of the exception.</param>
/// <param name="details">Additional information, such as a Node.js stack trace, representing the exception.</param>
/// <param name="nodeInstanceUnavailable">Specifies a value for the <see cref="NodeInstanceUnavailable"/> flag.</param>
/// <param name="allowConnectionDraining">Specifies a value for the <see cref="AllowConnectionDraining"/> flag.</param>
public NodeInvocationException(string message, string details, bool nodeInstanceUnavailable, bool allowConnectionDraining)
: this(message, details)
{
// Reject a meaningless combination of flags
if (allowConnectionDraining && !nodeInstanceUnavailable)
{
throw new ArgumentException(
$"The '${ nameof(allowConnectionDraining) }' parameter cannot be true " +
$"unless the '${ nameof(nodeInstanceUnavailable) }' parameter is also true.");
}
NodeInstanceUnavailable = nodeInstanceUnavailable;
AllowConnectionDraining = allowConnectionDraining;
}
}
}

View File

@@ -1,24 +0,0 @@
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Describes an RPC call sent from .NET code to Node.js code.
/// </summary>
public class NodeInvocationInfo
{
/// <summary>
/// Specifies the path to the Node.js module (i.e., .js file) relative to the project root.
/// </summary>
public string ModuleName { get; set; }
/// <summary>
/// If set, specifies the name of CommonJS function export to be invoked.
/// If not set, the Node.js module's default export must itself be a function to be invoked.
/// </summary>
public string ExportedFunctionName { get; set; }
/// <summary>
/// A sequence of JSON-serializable arguments to be passed to the Node.js function being invoked.
/// </summary>
public object[] Args { get; set; }
}
}

View File

@@ -1,17 +0,0 @@
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Extension methods that help with populating a <see cref="NodeServicesOptions"/> object.
/// </summary>
public static class NodeServicesOptionsExtensions
{
/// <summary>
/// Configures the <see cref="INodeServices"/> service so that it will use out-of-process
/// Node.js instances and perform RPC calls over HTTP.
/// </summary>
public static void UseHttpHosting(this NodeServicesOptions options)
{
options.NodeInstanceFactory = () => new HttpNodeInstance(options);
}
}
}

View File

@@ -1,475 +0,0 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace Microsoft.AspNetCore.NodeServices.HostingModels
{
/// <summary>
/// Class responsible for launching a Node child process on the local machine, determining when it is ready to
/// accept invocations, detecting if it dies on its own, and finally terminating it on disposal.
///
/// This abstract base class uses the input/output streams of the child process to perform a simple handshake
/// to determine when the child process is ready to accept invocations. This is agnostic to the mechanism that
/// derived classes use to actually perform the invocations (e.g., they could use HTTP-RPC, or a binary TCP
/// protocol, or any other RPC-type mechanism).
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.HostingModels.INodeInstance" />
public abstract class OutOfProcessNodeInstance : INodeInstance
{
/// <summary>
/// The <see cref="ILogger"/> to which the Node.js instance's stdout/stderr is being redirected.
/// </summary>
protected readonly ILogger OutputLogger;
private const string ConnectionEstablishedMessage = "[Microsoft.AspNetCore.NodeServices:Listening]";
private readonly TaskCompletionSource<object> _connectionIsReadySource = new TaskCompletionSource<object>();
private bool _disposed;
private readonly StringAsTempFile _entryPointScript;
private FileSystemWatcher _fileSystemWatcher;
private int _invocationTimeoutMilliseconds;
private bool _launchWithDebugging;
private readonly Process _nodeProcess;
private int? _nodeDebuggingPort;
private bool _nodeProcessNeedsRestart;
private readonly string[] _watchFileExtensions;
/// <summary>
/// Creates a new instance of <see cref="OutOfProcessNodeInstance"/>.
/// </summary>
/// <param name="entryPointScript">The path to the entry point script that the Node instance should load and execute.</param>
/// <param name="projectPath">The root path of the current project. This is used when resolving Node.js module paths relative to the project root.</param>
/// <param name="watchFileExtensions">The filename extensions that should be watched within the project root. The Node instance will automatically shut itself down if any matching file changes.</param>
/// <param name="commandLineArguments">Additional command-line arguments to be passed to the Node.js instance.</param>
/// <param name="applicationStoppingToken">A token that indicates when the host application is stopping.</param>
/// <param name="nodeOutputLogger">The <see cref="ILogger"/> to which the Node.js instance's stdout/stderr (and other log information) should be written.</param>
/// <param name="environmentVars">Environment variables to be set on the Node.js process.</param>
/// <param name="invocationTimeoutMilliseconds">The maximum duration, in milliseconds, to wait for RPC calls to complete.</param>
/// <param name="launchWithDebugging">If true, passes a flag to the Node.js process telling it to accept V8 debugger connections.</param>
/// <param name="debuggingPort">If debugging is enabled, the Node.js process should listen for V8 debugger connections on this port.</param>
public OutOfProcessNodeInstance(
string entryPointScript,
string projectPath,
string[] watchFileExtensions,
string commandLineArguments,
CancellationToken applicationStoppingToken,
ILogger nodeOutputLogger,
IDictionary<string, string> environmentVars,
int invocationTimeoutMilliseconds,
bool launchWithDebugging,
int debuggingPort)
{
if (nodeOutputLogger == null)
{
throw new ArgumentNullException(nameof(nodeOutputLogger));
}
OutputLogger = nodeOutputLogger;
_entryPointScript = new StringAsTempFile(entryPointScript, applicationStoppingToken);
_invocationTimeoutMilliseconds = invocationTimeoutMilliseconds;
_launchWithDebugging = launchWithDebugging;
var startInfo = PrepareNodeProcessStartInfo(_entryPointScript.FileName, projectPath, commandLineArguments,
environmentVars, _launchWithDebugging, debuggingPort);
_nodeProcess = LaunchNodeProcess(startInfo);
_watchFileExtensions = watchFileExtensions;
_fileSystemWatcher = BeginFileWatcher(projectPath);
ConnectToInputOutputStreams();
}
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportNameOrNull">If set, specifies the CommonJS export to be invoked. If not set, the module's default CommonJS export itself must be a function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
public async Task<T> InvokeExportAsync<T>(
CancellationToken cancellationToken, string moduleName, string exportNameOrNull, params object[] args)
{
if (_nodeProcess.HasExited || _nodeProcessNeedsRestart)
{
// This special kind of exception triggers a transparent retry - NodeServicesImpl will launch
// a new Node instance and pass the invocation to that one instead.
// Note that if the Node process is listening for debugger connections, then we need it to shut
// down immediately and not stay open for connection draining (because if it did, the new Node
// instance wouldn't able to start, because the old one would still hold the debugging port).
var message = _nodeProcess.HasExited
? "The Node process has exited"
: "The Node process needs to restart";
throw new NodeInvocationException(
message,
details: null,
nodeInstanceUnavailable: true,
allowConnectionDraining: !_launchWithDebugging);
}
// Construct a new cancellation token that combines the supplied token with the configured invocation
// timeout. Technically we could avoid wrapping the cancellationToken if no timeout is configured,
// but that's not really a major use case, since timeouts are enabled by default.
using (var timeoutSource = new CancellationTokenSource())
using (var combinedCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutSource.Token))
{
if (_invocationTimeoutMilliseconds > 0)
{
timeoutSource.CancelAfter(_invocationTimeoutMilliseconds);
}
// By overwriting the supplied cancellation token, we ensure that it isn't accidentally used
// below. We only want to pass through the token that respects timeouts.
cancellationToken = combinedCancellationTokenSource.Token;
var connectionDidSucceed = false;
try
{
// Wait until the connection is established. This will throw if the connection fails to initialize,
// or if cancellation is requested first. Note that we can't really cancel the "establishing connection"
// task because that's shared with all callers, but we can stop waiting for it if this call is cancelled.
await _connectionIsReadySource.Task.OrThrowOnCancellation(cancellationToken);
connectionDidSucceed = true;
return await InvokeExportAsync<T>(new NodeInvocationInfo
{
ModuleName = moduleName,
ExportedFunctionName = exportNameOrNull,
Args = args
}, cancellationToken);
}
catch (TaskCanceledException)
{
if (timeoutSource.IsCancellationRequested)
{
// It was very common for developers to report 'TaskCanceledException' when encountering almost any
// trouble when using NodeServices. Now we have a default invocation timeout, and attempt to give
// a more descriptive exception message if it happens.
if (!connectionDidSucceed)
{
// This is very unlikely, but for debugging, it's still useful to differentiate it from the
// case below.
throw new NodeInvocationException(
$"Attempt to connect to Node timed out after {_invocationTimeoutMilliseconds}ms.",
string.Empty);
}
else
{
// Developers encounter this fairly often (if their Node code fails without invoking the callback,
// all that the .NET side knows is that the invocation eventually times out). Previously, this surfaced
// as a TaskCanceledException, but this led to a lot of issue reports. Now we throw the following
// descriptive error.
throw new NodeInvocationException(
$"The Node invocation timed out after {_invocationTimeoutMilliseconds}ms.",
$"You can change the timeout duration by setting the {NodeServicesOptions.TimeoutConfigPropertyName} "
+ $"property on {nameof(NodeServicesOptions)}.\n\n"
+ "The first debugging step is to ensure that your Node.js function always invokes the supplied "
+ "callback (or throws an exception synchronously), even if it encounters an error. Otherwise, "
+ "the .NET code has no way to know that it is finished or has failed."
);
}
}
else
{
throw;
}
}
}
}
/// <summary>
/// Disposes this instance.
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="invocationInfo">Specifies the Node.js function to be invoked and arguments to be passed to it.</param>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
protected abstract Task<T> InvokeExportAsync<T>(
NodeInvocationInfo invocationInfo,
CancellationToken cancellationToken);
/// <summary>
/// Configures a <see cref="ProcessStartInfo"/> instance describing how to launch the Node.js process.
/// </summary>
/// <param name="entryPointFilename">The entrypoint JavaScript file that the Node.js process should execute.</param>
/// <param name="projectPath">The root path of the project. This is used when locating Node.js modules relative to the project root.</param>
/// <param name="commandLineArguments">Command-line arguments to be passed to the Node.js process.</param>
/// <param name="environmentVars">Environment variables to be set on the Node.js process.</param>
/// <param name="launchWithDebugging">If true, passes a flag to the Node.js process telling it to accept V8 Inspector connections.</param>
/// <param name="debuggingPort">If debugging is enabled, the Node.js process should listen for V8 Inspector connections on this port.</param>
/// <returns></returns>
protected virtual ProcessStartInfo PrepareNodeProcessStartInfo(
string entryPointFilename, string projectPath, string commandLineArguments,
IDictionary<string, string> environmentVars, bool launchWithDebugging, int debuggingPort)
{
// This method is virtual, as it provides a way to override the NODE_PATH or the path to node.exe
string debuggingArgs;
if (launchWithDebugging)
{
debuggingArgs = debuggingPort != default(int) ? $"--inspect={debuggingPort} " : "--inspect ";
_nodeDebuggingPort = debuggingPort;
}
else
{
debuggingArgs = string.Empty;
}
var thisProcessPid = Process.GetCurrentProcess().Id;
var startInfo = new ProcessStartInfo("node")
{
Arguments = $"{debuggingArgs}\"{entryPointFilename}\" --parentPid {thisProcessPid} {commandLineArguments ?? string.Empty}",
UseShellExecute = false,
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
WorkingDirectory = projectPath
};
// Append environment vars
if (environmentVars != null)
{
foreach (var envVarKey in environmentVars.Keys)
{
var envVarValue = environmentVars[envVarKey];
if (envVarValue != null)
{
SetEnvironmentVariable(startInfo, envVarKey, envVarValue);
}
}
}
// Append projectPath to NODE_PATH so it can locate node_modules
var existingNodePath = Environment.GetEnvironmentVariable("NODE_PATH") ?? string.Empty;
if (existingNodePath != string.Empty)
{
existingNodePath += Path.PathSeparator;
}
var nodePathValue = existingNodePath + Path.Combine(projectPath, "node_modules");
SetEnvironmentVariable(startInfo, "NODE_PATH", nodePathValue);
return startInfo;
}
/// <summary>
/// Virtual method invoked whenever the Node.js process emits a line to its stdout.
/// </summary>
/// <param name="outputData">The line emitted to the Node.js process's stdout.</param>
protected virtual void OnOutputDataReceived(string outputData)
{
OutputLogger.LogInformation(outputData);
}
/// <summary>
/// Virtual method invoked whenever the Node.js process emits a line to its stderr.
/// </summary>
/// <param name="errorData">The line emitted to the Node.js process's stderr.</param>
protected virtual void OnErrorDataReceived(string errorData)
{
OutputLogger.LogError(errorData);
}
/// <summary>
/// Disposes the instance.
/// </summary>
/// <param name="disposing">True if the object is disposing or false if it is finalizing.</param>
protected virtual void Dispose(bool disposing)
{
if (!_disposed)
{
if (disposing)
{
_entryPointScript.Dispose();
EnsureFileSystemWatcherIsDisposed();
}
// Make sure the Node process is finished
// TODO: Is there a more graceful way to end it? Or does this still let it perform any cleanup?
if (!_nodeProcess.HasExited)
{
_nodeProcess.Kill();
}
_disposed = true;
}
}
private void EnsureFileSystemWatcherIsDisposed()
{
if (_fileSystemWatcher != null)
{
_fileSystemWatcher.Dispose();
_fileSystemWatcher = null;
}
}
private static void SetEnvironmentVariable(ProcessStartInfo startInfo, string name, string value)
{
startInfo.Environment[name] = value;
}
private static Process LaunchNodeProcess(ProcessStartInfo startInfo)
{
try {
var process = Process.Start(startInfo);
// On Mac at least, a killed child process is left open as a zombie until the parent
// captures its exit code. We don't need the exit code for this process, and don't want
// to use process.WaitForExit() explicitly (we'd have to block the thread until it really
// has exited), but we don't want to leave zombies lying around either. It's sufficient
// to use process.EnableRaisingEvents so that .NET will grab the exit code and let the
// zombie be cleaned away without having to block our thread.
process.EnableRaisingEvents = true;
return process;
} catch (Exception ex) {
var message = "Failed to start Node process. To resolve this:.\n\n"
+ "[1] Ensure that Node.js is installed and can be found in one of the PATH directories.\n"
+ $" Current PATH enviroment variable is: { Environment.GetEnvironmentVariable("PATH") }\n"
+ " Make sure the Node executable is in one of those directories, or update your PATH.\n\n"
+ "[2] See the InnerException for further details of the cause.";
throw new InvalidOperationException(message, ex);
}
}
private static string UnencodeNewlines(string str)
{
if (str != null)
{
// The token here needs to match the const in OverrideStdOutputs.ts.
// See the comment there for why we're doing this.
str = str.Replace("__ns_newline__", Environment.NewLine);
}
return str;
}
private void ConnectToInputOutputStreams()
{
var initializationIsCompleted = false;
_nodeProcess.OutputDataReceived += (sender, evt) =>
{
if (evt.Data == ConnectionEstablishedMessage && !initializationIsCompleted)
{
_connectionIsReadySource.SetResult(null);
initializationIsCompleted = true;
}
else if (evt.Data != null)
{
OnOutputDataReceived(UnencodeNewlines(evt.Data));
}
};
_nodeProcess.ErrorDataReceived += (sender, evt) =>
{
if (evt.Data != null)
{
if (_launchWithDebugging && IsDebuggerMessage(evt.Data))
{
OutputLogger.LogWarning(evt.Data);
}
else
{
OnErrorDataReceived(UnencodeNewlines(evt.Data));
}
}
};
_nodeProcess.BeginOutputReadLine();
_nodeProcess.BeginErrorReadLine();
}
private static bool IsDebuggerMessage(string message)
{
return message.StartsWith("Debugger attached", StringComparison.Ordinal) ||
message.StartsWith("Debugger listening ", StringComparison.Ordinal) ||
message.StartsWith("To start debugging", StringComparison.Ordinal) ||
message.Equals("Warning: This is an experimental feature and could change at any time.", StringComparison.Ordinal) ||
message.Equals("For help see https://nodejs.org/en/docs/inspector", StringComparison.Ordinal) ||
message.Contains("chrome-devtools:");
}
private FileSystemWatcher BeginFileWatcher(string rootDir)
{
if (_watchFileExtensions == null || _watchFileExtensions.Length == 0)
{
// Nothing to watch
return null;
}
var watcher = new FileSystemWatcher(rootDir)
{
IncludeSubdirectories = true,
NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.FileName | NotifyFilters.DirectoryName
};
watcher.Changed += OnFileChanged;
watcher.Created += OnFileChanged;
watcher.Deleted += OnFileChanged;
watcher.Renamed += OnFileRenamed;
watcher.EnableRaisingEvents = true;
return watcher;
}
private void OnFileChanged(object source, FileSystemEventArgs e)
{
if (IsFilenameBeingWatched(e.FullPath))
{
RestartDueToFileChange(e.FullPath);
}
}
private void OnFileRenamed(object source, RenamedEventArgs e)
{
if (IsFilenameBeingWatched(e.OldFullPath) || IsFilenameBeingWatched(e.FullPath))
{
RestartDueToFileChange(e.OldFullPath);
}
}
private bool IsFilenameBeingWatched(string fullPath)
{
if (string.IsNullOrEmpty(fullPath))
{
return false;
}
else
{
var actualExtension = Path.GetExtension(fullPath) ?? string.Empty;
return _watchFileExtensions.Any(actualExtension.Equals);
}
}
private void RestartDueToFileChange(string fullPath)
{
OutputLogger.LogInformation($"Node will restart because file changed: {fullPath}");
_nodeProcessNeedsRestart = true;
// There's no need to watch for any more changes, since we're already restarting, and if the
// restart takes some time (e.g., due to connection draining), we could end up getting duplicate
// notifications.
EnsureFileSystemWatcherIsDisposed();
}
/// <summary>
/// Implements the finalization part of the IDisposable pattern by calling Dispose(false).
/// </summary>
~OutOfProcessNodeInstance()
{
Dispose(false);
}
}
}

View File

@@ -1,54 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Represents the ability to invoke code in a Node.js environment. Although the underlying Node.js instance
/// might change over time (e.g., the process might be restarted), the <see cref="INodeServices"/> instance
/// will remain constant.
/// </summary>
public interface INodeServices : IDisposable
{
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root whose default CommonJS export is the function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeAsync<T>(string moduleName, params object[] args);
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root whose default CommonJS export is the function to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeAsync<T>(CancellationToken cancellationToken, string moduleName, params object[] args);
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportedFunctionName">Specifies the CommonJS export to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeExportAsync<T>(string moduleName, string exportedFunctionName, params object[] args);
/// <summary>
/// Asynchronously invokes code in the Node.js instance.
/// </summary>
/// <typeparam name="T">The JSON-serializable data type that the Node.js code will asynchronously return.</typeparam>
/// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel the invocation.</param>
/// <param name="moduleName">The path to the Node.js module (i.e., JavaScript file) relative to your project root that contains the code to be invoked.</param>
/// <param name="exportedFunctionName">Specifies the CommonJS export to be invoked.</param>
/// <param name="args">Any sequence of JSON-serializable arguments to be passed to the Node.js function.</param>
/// <returns>A <see cref="Task{TResult}"/> representing the completion of the RPC call.</returns>
Task<T> InvokeExportAsync<T>(CancellationToken cancellationToken, string moduleName, string exportedFunctionName, params object[] args);
}
}

View File

@@ -1,28 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<Import Project="..\..\build\common.props" />
<PropertyGroup>
<Description>Invoke Node.js modules at runtime in ASP.NET Core applications.</Description>
<TargetFramework>netstandard2.0</TargetFramework>
<PackageTags>aspnetcore;aspnetcoremvc;nodeservices</PackageTags>
<TypeScriptCompileBlocked>true</TypeScriptCompileBlocked>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup>
<ItemGroup>
<EmbeddedResource Include="Content\**\*" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.Hosting.Abstractions" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="$(AspNetCoreVersion)" />
<PackageReference Include="Newtonsoft.Json" Version="$(JsonNetVersion)" />
</ItemGroup>
<Target Name="PrepublishScript" BeforeTargets="PrepareForPublish" Condition=" '$(IsCrossTargetingBuild)' != 'true' ">
<Exec Command="npm install" />
<Exec Command="node node_modules/webpack/bin/webpack.js" />
</Target>
</Project>

View File

@@ -1,165 +0,0 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.NodeServices.HostingModels;
namespace Microsoft.AspNetCore.NodeServices
{
/// <summary>
/// Default implementation of INodeServices. This is the primary API surface through which developers
/// make use of this package. It provides simple "InvokeAsync" methods that dispatch calls to the
/// correct Node instance, creating and destroying those instances as needed.
///
/// If a Node instance dies (or none was yet created), this class takes care of creating a new one.
/// If a Node instance signals that it needs to be restarted (e.g., because a file changed), then this
/// class will create a new instance and dispatch future calls to it, while keeping the old instance
/// alive for a defined period so that any in-flight RPC calls can complete. This latter feature is
/// analogous to the "connection draining" feature implemented by HTTP load balancers.
/// </summary>
/// <seealso cref="Microsoft.AspNetCore.NodeServices.INodeServices" />
internal class NodeServicesImpl : INodeServices
{
private static TimeSpan ConnectionDrainingTimespan = TimeSpan.FromSeconds(15);
private Func<INodeInstance> _nodeInstanceFactory;
private INodeInstance _currentNodeInstance;
private object _currentNodeInstanceAccessLock = new object();
private Exception _instanceDelayedDisposalException;
internal NodeServicesImpl(Func<INodeInstance> nodeInstanceFactory)
{
_nodeInstanceFactory = nodeInstanceFactory;
}
public Task<T> InvokeAsync<T>(string moduleName, params object[] args)
{
return InvokeExportAsync<T>(moduleName, null, args);
}
public Task<T> InvokeAsync<T>(CancellationToken cancellationToken, string moduleName, params object[] args)
{
return InvokeExportAsync<T>(cancellationToken, moduleName, null, args);
}
public Task<T> InvokeExportAsync<T>(string moduleName, string exportedFunctionName, params object[] args)
{
return InvokeExportWithPossibleRetryAsync<T>(moduleName, exportedFunctionName, args, /* allowRetry */ true, CancellationToken.None);
}
public Task<T> InvokeExportAsync<T>(CancellationToken cancellationToken, string moduleName, string exportedFunctionName, params object[] args)
{
return InvokeExportWithPossibleRetryAsync<T>(moduleName, exportedFunctionName, args, /* allowRetry */ true, cancellationToken);
}
private async Task<T> InvokeExportWithPossibleRetryAsync<T>(string moduleName, string exportedFunctionName, object[] args, bool allowRetry, CancellationToken cancellationToken)
{
ThrowAnyOutstandingDelayedDisposalException();
var nodeInstance = GetOrCreateCurrentNodeInstance();
try
{
return await nodeInstance.InvokeExportAsync<T>(cancellationToken, moduleName, exportedFunctionName, args);
}
catch (NodeInvocationException ex)
{
// If the Node instance can't complete the invocation because it needs to restart (e.g., because the underlying
// Node process has exited, or a file it depends on has changed), then we make one attempt to restart transparently.
if (allowRetry && ex.NodeInstanceUnavailable)
{
// Perform the retry after clearing away the old instance
// Since we disposal is delayed even though the node instance is replaced immediately, this produces the
// "connection draining" feature whereby in-flight RPC calls are given a certain period to complete.
lock (_currentNodeInstanceAccessLock)
{
if (_currentNodeInstance == nodeInstance)
{
var disposalDelay = ex.AllowConnectionDraining ? ConnectionDrainingTimespan : TimeSpan.Zero;
DisposeNodeInstance(_currentNodeInstance, disposalDelay);
_currentNodeInstance = null;
}
}
// One the next call, don't allow retries, because we could get into an infinite retry loop, or a long retry
// loop that masks an underlying problem. A newly-created Node instance should be able to accept invocations,
// or something more serious must be wrong.
return await InvokeExportWithPossibleRetryAsync<T>(moduleName, exportedFunctionName, args, /* allowRetry */ false, cancellationToken);
}
else
{
throw;
}
}
}
public void Dispose()
{
lock (_currentNodeInstanceAccessLock)
{
if (_currentNodeInstance != null)
{
DisposeNodeInstance(_currentNodeInstance, delay: TimeSpan.Zero);
_currentNodeInstance = null;
}
}
}
private void DisposeNodeInstance(INodeInstance nodeInstance, TimeSpan delay)
{
if (delay == TimeSpan.Zero)
{
nodeInstance.Dispose();
}
else
{
Task.Run(async () => {
try
{
await Task.Delay(delay);
nodeInstance.Dispose();
}
catch(Exception ex)
{
// Nothing's waiting for the delayed disposal task, so any exceptions in it would
// by default just get ignored. To make these discoverable, capture them here so
// they can be rethrown to the next caller to InvokeExportAsync.
_instanceDelayedDisposalException = ex;
}
});
}
}
private void ThrowAnyOutstandingDelayedDisposalException()
{
if (_instanceDelayedDisposalException != null)
{
var ex = _instanceDelayedDisposalException;
_instanceDelayedDisposalException = null;
throw new AggregateException(
"A previous attempt to dispose a Node instance failed. See InnerException for details.",
ex);
}
}
private INodeInstance GetOrCreateCurrentNodeInstance()
{
var instance = _currentNodeInstance;
if (instance == null)
{
lock (_currentNodeInstanceAccessLock)
{
instance = _currentNodeInstance;
if (instance == null)
{
instance = _currentNodeInstance = CreateNewNodeInstance();
}
}
}
return instance;
}
private INodeInstance CreateNewNodeInstance()
{
return _nodeInstanceFactory();
}
}
}

Some files were not shown because too many files have changed in this diff Show More