chore: fix docker emulator

This commit is contained in:
David Germain
2024-05-25 22:00:36 +02:00
committed by benfurber
parent be5728b6e3
commit a39793ba6f
78 changed files with 735 additions and 1665 deletions

View File

@@ -24,23 +24,62 @@ This project and everyone participating in it is governed by the [Code of Conduc
## 📟   Getting started ## 📟   Getting started
**Prerequisites** ### Prerequisites
- [Node.js v20](https://nodejs.org/en/download/) - [Node.js v20](https://nodejs.org/en/download/)
- [Yarn v3](https://yarnpkg.com/getting-started/install) - [Yarn v3](https://yarnpkg.com/getting-started/install)
With the above tools available, you are ready: ### One time setup
1. Fork the repository. 1. Fork the repository.
2. Clone the project from the fork you have created previously at first step : 2. Clone the project from the fork you have created previously at first step:
`git clone https://github.com/`**your-github-user**`/community-platform.git`
```
git clone https://github.com/<your-github-username>/community-platform.git
```
3. Install dependencies 3. Install dependencies
`yarn` ```
yarn install
```
4. Run the dev server ### Running the web app
`yarn start`
There are two options.
#### Option 1
This option is simple but only starts the frontend. The backend services are hosted on the internet (https://precious-plastics-v4-dev.firebaseapp.com) and may be accessed by many developers.
This setup is:
- Good for starting
- Good for frontend development
- Bad for backend development
Simply run:
```
yarn run start
```
In this case:
- frontend: http://localhost:3000
#### Option 2
This option is slightly more complicated but allows you to run the frontend and backend system locally (except sending emails.)
This setup is:
- Good for frontend development
- Good for backend development
See the details at [here](https://docs.platform.onearmy.earth/Backend%20Development/firebase-emulator/).
### Learn more
More information is available in the [developer documentation](https://docs.platform.onearmy.earth/). More information is available in the [developer documentation](https://docs.platform.onearmy.earth/).

19
docker-compose.yml Normal file
View File

@@ -0,0 +1,19 @@
# TODO: update to a more new version
version: "2.1"
services:
emulator:
build:
context: ./
dockerfile: ./functions/Dockerfile.emulator
ports:
- 4001-4008:4001-4008
volumes:
- ./functions:/app/functions
- ./functions/data/emulator/:/app/seed
# TODO: make logs easily viewable on the host machine.
# - ./logs/ui-debug.log:/app/ui-debug.log
# - ./logs/firestore-debug.log:/app/firestore-debug.log
# - ./logs/database-debug.log:/app/database-debug.log
# - ./logs/pubsub-debug.log:/app/pubsub-debug.log
# - ./logs/firebase-debug.log:/app/firebase-debug.log

View File

@@ -100,28 +100,36 @@
"emulators": { "emulators": {
"ui": { "ui": {
"enabled": true, "enabled": true,
"port": 4001 "port": 4001,
"host": "0.0.0.0"
}, },
"functions": { "functions": {
"port": 4002 "port": 4002,
"host": "0.0.0.0"
}, },
"firestore": { "firestore": {
"port": 4003 "port": 4003,
"host": "0.0.0.0"
}, },
"hosting": { "hosting": {
"port": 4004 "port": 4004,
"host": "0.0.0.0"
}, },
"auth": { "auth": {
"port": 4005 "port": 4005,
"host": "0.0.0.0"
}, },
"database": { "database": {
"port": 4006 "port": 4006,
"host": "0.0.0.0"
}, },
"storage": { "storage": {
"port": 4007 "port": 4007,
"host": "0.0.0.0"
}, },
"pubsub": { "pubsub": {
"port": 4008 "port": 4008,
"host": "0.0.0.0"
} }
}, },
"storage": { "storage": {

View File

@@ -17,7 +17,6 @@ lib/
dist dist
# Emulator data # Emulator data
data/emulated
data/exported data/exported
data/seed/* data/seed/*
!data/seed/.gitkeep !data/seed/.gitkeep

View File

@@ -0,0 +1,70 @@
####################################################################
#
# Due to complexities with Yarn workspaces, this Dockerfile
# does not create the `dist` folder. It should be mounted from
# the host machine to the container, using the `-v` flag.
#
# Optionally, initial data can be setup by mounting `/app/seed`.
#
# COMMANDS (used from the root directory of the project):
# docker build -f ./functions/Dockerfile.emulator -t emulator .
# docker run -v ./functions:/app/functions -p 4001-4008:4001-4008 -it emulator
#
# HOW TO DEBUG THE CONTAINER WHILE IT IS RUNNING:
# 1) Open a new terminal.
# 2) Run `docker ps` command.
# 3) Find the name for the container.
# 4) Run `docker exec -it <name> bash` command.
#
# TECHNICAL NOTES:
# Due to Docker, the Firebase emulators should run on 0.0.0.0
# https://stackoverflow.com/a/52518929
#
####################################################################
FROM node:20.9.0-bullseye-slim
ENV SHOULD_REGULARLY_EXPORT_DATA='false'
WORKDIR /app
RUN \
apt-get update && \
# For Firebase
# https://firebase.google.com/docs/emulator-suite/install_and_configure
apt-get -y install openjdk-11-jre-headless && \
# For debugging
apt-get -y install nano && \
apt-get clean
# TODO: someone else can figure out a good way to install the Firebase CLI.
RUN npm install -g firebase-tools
# Doing setup saves time when running the container.
# There are no setup commands for functions, hosting, or auth.
RUN \
firebase setup:emulators:ui && \
firebase setup:emulators:firestore && \
firebase setup:emulators:database && \
firebase setup:emulators:storage && \
firebase setup:emulators:pubsub
COPY ./../firebase.json /app/firebase.json
COPY ./../firebase.storage.rules /app/firebase.storage.rules
COPY ./../firestore.indexes.json /app/firestore.indexes.json
COPY ./../firestore.rules /app/firestore.rules
# Create script for easy exporting
RUN \
echo "#!/bin/bash\n" >> /app/export.sh && \
echo "firebase emulators:export --project demo-community-platform-emulated --force /app/dump\n" >> /app/export.sh && \
chmod +x /app/export.sh
# These should be the ports specified in firebase.json
EXPOSE 4001 4002 4003 4004 4005 4006 4007 4008
CMD \
firebase emulators:start \
--project demo-community-platform-emulated \
--only auth,functions,firestore,pubsub,storage,hosting,database \
--import=/app/seed

View File

@@ -4,7 +4,7 @@
This documentation likely requires updating, and the information provided may no longer be fully valid. Please feel free to create a new issue for any specific items identified as conflict/confusing or possibly no longer valid. This documentation likely requires updating, and the information provided may no longer be fully valid. Please feel free to create a new issue for any specific items identified as conflict/confusing or possibly no longer valid.
Some additional, newer information can also be found in [Firebase Emulators Docs](../packages/documentation/docs/Backend%20Development/firebase-emulators.md) Some additional, newer information can also be found in [Firebase Emulators Docs](../packages/documentation/docs/Backend%20Development/firebase-emulator.md)
--- ---

View File

@@ -0,0 +1,72 @@
{
"kind": "identitytoolkit#DownloadAccountResponse",
"users": [
{
"localId": "2djSop7XvY0NEg7QY5rZrm0Wc2N1",
"createdAt": "1716744480141",
"lastLoginAt": "1716746234545",
"displayName": "admin",
"passwordHash": "fakeHash:salt=fakeSaltRpTXy52dXY1DgyPT6odF:password=wow_backend_development_is_fun",
"salt": "fakeSaltRpTXy52dXY1DgyPT6odF",
"passwordUpdatedAt": 1716744766896,
"providerUserInfo": [
{
"providerId": "password",
"email": "admin@example.com",
"federatedId": "admin@example.com",
"rawId": "admin@example.com",
"displayName": "admin"
}
],
"validSince": "1716744766",
"email": "admin@example.com",
"emailVerified": false,
"disabled": false,
"lastRefreshAt": "2024-05-26T17:57:14.545Z"
},
{
"localId": "WrCrfu7FS3vJcFswZr1yufwlPtCl",
"lastLoginAt": "1716745935132",
"emailVerified": false,
"email": "precious-plastic@example.com",
"salt": "fakeSalt5z1E4omMRGeny2KD9xUb",
"passwordHash": "fakeHash:salt=fakeSalt5z1E4omMRGeny2KD9xUb:password=precious-plastic",
"passwordUpdatedAt": 1716745327556,
"validSince": "1716745327",
"createdAt": "1716745327556",
"providerUserInfo": [
{
"providerId": "password",
"email": "precious-plastic@example.com",
"federatedId": "precious-plastic@example.com",
"rawId": "precious-plastic@example.com",
"displayName": "precious-plastic"
}
],
"lastRefreshAt": "2024-05-26T17:52:15.132Z",
"displayName": "precious-plastic"
},
{
"localId": "uf0mBGfLKGQwoJ1FWiqH0xQSatKM",
"lastLoginAt": "1716746297841",
"emailVerified": false,
"email": "normal_jim@example.com",
"salt": "fakeSalt88tfIY4E8ASJgTGvE3th",
"passwordHash": "fakeHash:salt=fakeSalt88tfIY4E8ASJgTGvE3th:password=thanks_emulator_man",
"passwordUpdatedAt": 1716746026812,
"validSince": "1716746026",
"createdAt": "1716746026812",
"providerUserInfo": [
{
"providerId": "password",
"email": "normal_jim@example.com",
"federatedId": "normal_jim@example.com",
"rawId": "normal_jim@example.com",
"displayName": "normal_jim"
}
],
"lastRefreshAt": "2024-05-26T17:58:17.841Z",
"displayName": "normal_jim"
}
]
}

View File

@@ -0,0 +1,4 @@
{
"signIn": { "allowDuplicateEmails": false },
"emailPrivacyConfig": { "enableImprovedEmailPrivacy": false }
}

View File

@@ -0,0 +1,20 @@
{
"version": "13.10.1",
"firestore": {
"version": "1.19.6",
"path": "firestore_export",
"metadata_file": "firestore_export/firestore_export.overall_export_metadata"
},
"database": {
"version": "4.11.2",
"path": "database_export"
},
"auth": {
"version": "13.10.1",
"path": "auth_export"
},
"storage": {
"version": "13.10.1",
"path": "storage_export"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 483 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 138 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 99 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 308 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 271 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 99 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 308 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

View File

@@ -0,0 +1,10 @@
{
"buckets": [
{
"id": "demo-community-platform-emulated.appspot.com"
},
{
"id": "default-bucket"
}
]
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/sSJu2x5kxs6piq2L0U6y/shredder-5-18fb5fb1d5f.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745089801,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["10d20f04-0b90-4c8b-9b7e-c50c87bc0714"],
"etag": "Nqnyu2+k4zpFsVWiaL5TPKwrBug",
"customMetadata": {},
"timeCreated": "2024-05-26T17:38:09.801Z",
"updated": "2024-05-26T17:38:09.801Z",
"size": 60814,
"md5Hash": "VmzZq8sK2od3snVOMkN8fw==",
"crc32c": "3186773085"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/users/precious-plastic/5e0701c9be0f82741b72d8b2_Version-4-team-18ee73f92fa-18fb600cf66.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745458997,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["f2bb53ba-4797-4b87-a77f-f9b8c63568ab"],
"etag": "cr6McE2FKWCJWvEYkPz1OIQ6KyE",
"customMetadata": {},
"timeCreated": "2024-05-26T17:44:18.997Z",
"updated": "2024-05-26T17:44:18.997Z",
"size": 494337,
"md5Hash": "BH2ssfCv5sFxvwv68s9qKQ==",
"crc32c": "3221583768"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/sSJu2x5kxs6piq2L0U6y/shredder-2-18391b3997d-18fb5fa4b14.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745089742,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["0180dc00-c762-498f-8353-d379d591554f"],
"etag": "KjajQC8dFUsg0dzLFUPkJ/wJGI0",
"customMetadata": {},
"timeCreated": "2024-05-26T17:38:09.742Z",
"updated": "2024-05-26T17:38:09.742Z",
"size": 54654,
"md5Hash": "VVJqkJWfPH+7DH6rUj3fBA==",
"crc32c": "1100890625"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/research/pSkYAfYlfuwupSDl80o2/1.0-18fb607b2e6.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745912112,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["8acd16f0-8232-4287-b251-44d4cd4e7805"],
"etag": "NzAxyfe2mKwDHY+U08dv77t/Dv4",
"customMetadata": {},
"timeCreated": "2024-05-26T17:51:52.112Z",
"updated": "2024-05-26T17:51:52.112Z",
"size": 141680,
"md5Hash": "PkNIqbc99Cbfuowl+LEtpA==",
"crc32c": "395316221"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/UDaRgClKlDljxxFHyKsX/0-18fb6035d59.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745625971,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["269b9fea-191d-4df2-be62-05c6293d59e8"],
"etag": "U/EZ0QlpjgqNMIiGDCRRh4O0M4Q",
"customMetadata": {},
"timeCreated": "2024-05-26T17:47:05.971Z",
"updated": "2024-05-26T17:47:05.971Z",
"size": 101464,
"md5Hash": "tlu4Ai3wQVBPI1Bk6NB7eQ==",
"crc32c": "3209819119"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/users/normal_jim/alien-18fb60a8d46.png",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716746166570,
"contentType": "image/png",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["b680dc51-f3f7-42fd-8692-786ea01ac3d9"],
"etag": "Xo1hlQI3aYu0uvZXuCtywUj+Cj8",
"customMetadata": {},
"timeCreated": "2024-05-26T17:56:06.570Z",
"updated": "2024-05-26T17:56:06.570Z",
"size": 314851,
"md5Hash": "ZAINcN7FvvpmzTrSLZnl9A==",
"crc32c": "1701022809"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/UDaRgClKlDljxxFHyKsX/2-18fb602cae9.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745626023,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["fd42a6b7-8ef8-4bd6-b7f8-1952a9c45022"],
"etag": "GRk5mQL6P/yBe9b9tl+EbvmkxK8",
"customMetadata": {},
"timeCreated": "2024-05-26T17:47:06.024Z",
"updated": "2024-05-26T17:47:06.024Z",
"size": 65482,
"md5Hash": "3F99N7j/5D/SQ/RErV9sFA==",
"crc32c": "1128590318"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/UDaRgClKlDljxxFHyKsX/4-18fb60328e3.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745626056,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["62c680ae-6717-485e-8bef-43726d73d00a"],
"etag": "pRDx9CLvKn62C9kzx4AFwUhivkE",
"customMetadata": {},
"timeCreated": "2024-05-26T17:47:06.056Z",
"updated": "2024-05-26T17:47:06.056Z",
"size": 60814,
"md5Hash": "VmzZq8sK2od3snVOMkN8fw==",
"crc32c": "3186773085"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/sSJu2x5kxs6piq2L0U6y/shredder-6-18fb5fb12d6.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745089800,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["1ce9164b-c3ff-459b-ac9d-a52617eee796"],
"etag": "OAgke8b8Y+0rxxPk4DaFGRGTENU",
"customMetadata": {},
"timeCreated": "2024-05-26T17:38:09.800Z",
"updated": "2024-05-26T17:38:09.800Z",
"size": 69109,
"md5Hash": "gt/ZoORMy9hBCYF2faCvIg==",
"crc32c": "1141308651"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/UDaRgClKlDljxxFHyKsX/Shredder 2.1.zip",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745626127,
"contentType": "application/zip",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["7fef5284-b868-4db5-8302-640945ff5b8c"],
"etag": "vVtQknzoGw9NWHFqq2GPaUlB8gE",
"customMetadata": {},
"timeCreated": "2024-05-26T17:47:06.127Z",
"updated": "2024-05-26T17:47:06.127Z",
"size": 1171353,
"md5Hash": "RVrOhVIv05WSlrtqesl2UA==",
"crc32c": "3718242995"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/sSJu2x5kxs6piq2L0U6y/Shredder 2.1.zip",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745089872,
"contentType": "application/zip",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["1c6dc383-8bd2-468d-88a1-c5a23855599a"],
"etag": "aHjLBKHYwiSoztZfa1M5u7HLfEk",
"customMetadata": {},
"timeCreated": "2024-05-26T17:38:09.872Z",
"updated": "2024-05-26T17:38:09.872Z",
"size": 1171353,
"md5Hash": "RVrOhVIv05WSlrtqesl2UA==",
"crc32c": "3718242995"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/users/admin/cool-18fb5f2a735.png",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716744578807,
"contentType": "image/png",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["64f1965c-e31c-4699-8f84-dc923a5dd111"],
"etag": "oJ1dy9zR/YCeRLkSZgpI9aR/mIg",
"customMetadata": {},
"timeCreated": "2024-05-26T17:29:38.807Z",
"updated": "2024-05-26T17:29:38.807Z",
"size": 277516,
"md5Hash": "csuSVP67y0MLu48BnWsXuw==",
"crc32c": "662078174"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/sSJu2x5kxs6piq2L0U6y/shredder-1-18391b41642-18fb5f8ed91.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745089653,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["7348fecc-0938-4e34-bf77-8c3c3c524a9f"],
"etag": "TH/M2doWMQjL4QC6rG4lp+y6W1U",
"customMetadata": {},
"timeCreated": "2024-05-26T17:38:09.654Z",
"updated": "2024-05-26T17:38:09.654Z",
"size": 101464,
"md5Hash": "tlu4Ai3wQVBPI1Bk6NB7eQ==",
"crc32c": "3209819119"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/users/normal_jim/alien-18fb609ca55.png",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716746079955,
"contentType": "image/png",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["1f10f645-0c4a-49f5-a403-788c79ca1a11"],
"etag": "QuHyL/3IorFXBB9KNp1lt/CLKPA",
"customMetadata": {},
"timeCreated": "2024-05-26T17:54:39.955Z",
"updated": "2024-05-26T17:54:39.955Z",
"size": 314851,
"md5Hash": "ZAINcN7FvvpmzTrSLZnl9A==",
"crc32c": "1701022809"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/sSJu2x5kxs6piq2L0U6y/shredder-3-18fb5fa86db.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745089767,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["f6e5502c-8910-4588-947b-8d9d6490a445"],
"etag": "3/MxuobStVXqJjhVsNj7wOMNMdk",
"customMetadata": {},
"timeCreated": "2024-05-26T17:38:09.767Z",
"updated": "2024-05-26T17:38:09.767Z",
"size": 65482,
"md5Hash": "3F99N7j/5D/SQ/RErV9sFA==",
"crc32c": "1128590318"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/UDaRgClKlDljxxFHyKsX/1-18fb60297d1.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745625997,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["372f62ba-6207-45a8-b576-da3f76e39368"],
"etag": "XEtBfdK5yshev0OvvoHUB3ug/ds",
"customMetadata": {},
"timeCreated": "2024-05-26T17:47:05.997Z",
"updated": "2024-05-26T17:47:05.997Z",
"size": 54654,
"md5Hash": "VVJqkJWfPH+7DH6rUj3fBA==",
"crc32c": "1100890625"
}

View File

@@ -0,0 +1,17 @@
{
"name": "uploads/howtos/UDaRgClKlDljxxFHyKsX/3-18fb603025d.jpg",
"bucket": "default-bucket",
"metageneration": 1,
"generation": 1716745626054,
"contentType": "image/jpeg",
"storageClass": "STANDARD",
"contentDisposition": "inline",
"downloadTokens": ["b8b86016-1a38-440f-b76b-969c28b52449"],
"etag": "c4zNC9R8rWotfYyI/cu4xunaQwk",
"customMetadata": {},
"timeCreated": "2024-05-26T17:47:06.054Z",
"updated": "2024-05-26T17:47:06.054Z",
"size": 69109,
"md5Hash": "gt/ZoORMy9hBCYF2faCvIg==",
"crc32c": "1141308651"
}

View File

@@ -6,12 +6,9 @@
"hoistingLimits": "workspaces" "hoistingLimits": "workspaces"
}, },
"scripts": { "scripts": {
"start": "ts-node scripts/start.ts", "watch": "./node_modules/.bin/webpack --watch",
"serve:live": "yarn copyDevConfig && yarn serve",
"lint": "tslint --project tsconfig.json", "lint": "tslint --project tsconfig.json",
"build": "./node_modules/.bin/webpack", "build": "./node_modules/.bin/webpack",
"watch": "./node_modules/.bin/webpack --watch",
"copyDevConfig": "firebase functions:config:get > .runtimeconfig.json",
"shell": "yarn build && firebase functions:shell", "shell": "yarn build && firebase functions:shell",
"test": "firebase emulators:exec --only functions,firestore,hosting,auth,database,pubsub,storage --project demo-community-platform-emulated 'jest . --forceExit --detectOpenHandles --coverage --reporters=default --reporters=jest-junit'", "test": "firebase emulators:exec --only functions,firestore,hosting,auth,database,pubsub,storage --project demo-community-platform-emulated 'jest . --forceExit --detectOpenHandles --coverage --reporters=default --reporters=jest-junit'",
"test-ci": "./node_modules/.bin/firebase emulators:exec --only functions,firestore,hosting,auth,database,pubsub,storage --project demo-community-platform-emulated 'yarn jest . --forceExit --detectOpenHandles --coverage --reporters=default --reporters=jest-junit'", "test-ci": "./node_modules/.bin/firebase emulators:exec --only functions,firestore,hosting,auth,database,pubsub,storage --project demo-community-platform-emulated 'yarn jest . --forceExit --detectOpenHandles --coverage --reporters=default --reporters=jest-junit'",

View File

@@ -1,30 +0,0 @@
import * as path from 'path'
export const FUNCTIONS_DIR = path.resolve(__dirname, '../')
// When passing to CLI only relative folder names used
export const EMULATOR_SEED_FOLDER = 'data/seed'
export const EMULATOR_IMPORT_FOLDER = 'data/emulated'
export const EMULATOR_EXPORT_FOLDER = 'data/exported'
// For custom scripts full paths user
export const EMULATOR_SEED_PATH = path.resolve(
FUNCTIONS_DIR,
EMULATOR_SEED_FOLDER,
)
export const EMULATOR_IMPORT_PATH = path.resolve(
FUNCTIONS_DIR,
EMULATOR_IMPORT_FOLDER,
)
export const EMULATOR_EXPORT_PATH = path.resolve(
FUNCTIONS_DIR,
EMULATOR_EXPORT_FOLDER,
)
// For compiling src folder
export const PLATFORM_ROOT_PATH = path.resolve(FUNCTIONS_DIR, '..')
export const PLATFORM_LIB_PATH = path.resolve(PLATFORM_ROOT_PATH, 'lib')
export const PLATFORM_TSCONFIG_TYPES_PATH = path.resolve(
PLATFORM_ROOT_PATH,
'tsconfig.src-types.json',
)

View File

@@ -1,24 +0,0 @@
import type { configVars } from '../../src/config/config'
/** Variables populates in the same way firebase functions:config:set does for use in testing */
export const runtimeConfigTest: configVars = {
analytics: {
tracking_code: 'fake_tracking_code',
view_id: 'fake_view_id',
},
integrations: {
discord_webhook: 'https://fake_discord_webhook.local',
discord_alert_channel_webhook:
'https://fake_discord_alert_channel_webhook.local',
slack_webhook: 'https://fake_slack_webhook.local',
patreon_client_id: 'fake_patreon_client_id',
patreon_client_secret: 'fake_patreon_client_secret',
},
service: null as any,
deployment: {
site_url: 'http://localhost:4000',
},
prerender: {
api_key: 'fake_prerender_key',
},
}

View File

@@ -1,7 +0,0 @@
import { writeFileSync } from 'fs'
import { resolve } from 'path'
import { FUNCTIONS_DIR } from '../paths'
import { runtimeConfigTest } from './model'
const runtimeConfigPath = resolve(FUNCTIONS_DIR, '.runtimeconfig.json')
writeFileSync(runtimeConfigPath, JSON.stringify(runtimeConfigTest))

View File

@@ -1,5 +0,0 @@
process.env.FUNCTIONS_EMULATOR = 'true'
process.env.FIREBASE_AUTH_EMULATOR_HOST = 'localhost:4005'
// https://github.com/firebase/firebase-admin-node/issues/116
process.env.FIREBASE_DATABASE_EMULATOR_HOST = 'http://127.0.0.1:4006'
process.env.FIRESTORE_EMULATOR_HOST = 'localhost:4003'

View File

@@ -1,139 +0,0 @@
import { spawn } from 'child_process'
import * as path from 'path'
import webpack from 'webpack'
import { Watching } from 'webpack'
import * as os from 'os'
import * as fs from 'fs-extra'
import webpackConfig from '../webpack.config'
import { EMULATOR_EXPORT_FOLDER, EMULATOR_IMPORT_FOLDER } from './paths'
/**
* Start the functions emulator and functions source code in parallel
* TODO - merge/replace with docker methods
*
* NOTE - whilst similar functionality can be achieved with packages like 'concurrently',
* SIGTERM signals don't seem to always be handled correctly and the emulator doesn't complete
* export operations. Similarly webpack watch cli respawns even after SIGINT so better to run programmatically
*/
function main() {
// CLI: concurrently --kill-others-on-fail --names \"emulator,functions\" -c \"blue,magenta\" \"yarn serve:emulated\" \"yarn watch\"
compileAndWatchFunctions()
.then((webpackWatcher) => {
if (webpackWatcher) {
// start emulator only after compiler running (to pass close callback)
startEmulator(webpackWatcher)
}
})
.catch((err) => {
console.error(err)
process.exit(1)
})
}
main()
/** Programmatically run webpack in watch mode */
async function compileAndWatchFunctions(): Promise<Watching> {
// CLI: webpack --watch
const compiler = webpack(webpackConfig)
// Start a build in watch mode
const watcher = compiler.watch(
{
aggregateTimeout: 300,
poll: undefined,
},
(err, stats) => {
if (stats === undefined) {
console.log('[Compile Error] stats undefined')
process.exit(1)
}
if (stats.hasErrors()) {
const info = stats.toJson()
console.log('[Compile Error]', info.errors)
process.exit(1)
}
if (err) {
console.log('[Compiler Error]', err)
}
},
)
// Wait for the first build to be completed before resolving (to ensure dist folder populated)
return new Promise((resolve) => {
compiler.hooks.afterCompile.tap('build complete', () => {
resolve(watcher)
})
})
}
/**
* Spawn a shell to run the firebase emulators from
* Includes a custom environment configuration to enable full access to api methods which are otherwise limited
* to non-authenticated users. It achieves this by having 2 sets of credentials:
*
* 1) A genuine (read-only) service account that authenticates with google servers
* 2) A fake project specified to run the emulator against
*
* The reason we need both is because google expects authenticated users to access various 3rd party apis before
* code execution, e.g. https://github.com/firebase/firebase-tools/issues/1683 and https://github.com/firebase/firebase-tools/issues/1708
*/
function startEmulator(functionsCompiler: Watching) {
// call firebase bin directly in case not installed globally
const FIREBASE_BIN = path.resolve(__dirname, '../node_modules/.bin/firebase')
// the name of the project that generated service account credentials has access to
const REAL_PROJECT_ID = 'precious-plastics-v4-dev'
// any project id can be specified (doesn't have to be real) - functions will be available on the endpoint
const EMULATOR_PROJECT_ID = 'demo-community-platform-emulated'
let cmd = `${FIREBASE_BIN} use ${REAL_PROJECT_ID} && ${FIREBASE_BIN} --project=${EMULATOR_PROJECT_ID} emulators:start`
cmd = `${cmd} --import=${EMULATOR_IMPORT_FOLDER}`
// change this value if also wanting to export data
if (false) {
cmd = `${cmd} --export-on-exit=${EMULATOR_EXPORT_FOLDER}`
}
const env = {
GCLOUD_PROJECT: EMULATOR_PROJECT_ID,
GOOGLE_APPLICATION_CREDENTIALS: prepareGoogleApplicationCredentials(),
}
const child = spawn(cmd, {
shell: true,
stdio: ['inherit', 'inherit', 'inherit'],
env,
} as any)
// listen for close and kill functions compiler if error thrown
child.on('close', (code) => {
if (code === 1) {
console.error('[Emulator Error]')
functionsCompiler.close(() =>
console.log('Functions compiler terminated'),
)
}
})
}
/**
* Generate a custom service-account file for use with GOOGLE_APPLICATION_CREDENTIALS application login.
* @returns path to generated json file
*
* Note - whilst it is insecure to publish service account details in an open-source repo,
* the limited priviledges available to the demo project service account encrypted below are
* considered safe enough for sharing
*/
function prepareGoogleApplicationCredentials() {
const serviceAccountPath = path.resolve(
os.tmpdir(),
'firebase-functions-emulator.json',
)
const READ_ONLY_SERVICE_ACCOUNT_B64 = `ewogICJ0eXBlIjogInNlcnZpY2VfYWNjb3VudCIsCiAgInByb2plY3RfaWQiOiAicHJlY2lvdXMtcGxhc3RpY3MtdjQtZGV2IiwKICAicHJpdmF0ZV9rZXlfaWQiOiAiOTY5N2MyOWJjNmE3NWM2MmUzOGYzMzJiNTA3YTIwMDJjZTkxODk4ZCIsCiAgInByaXZhdGVfa2V5IjogIi0tLS0tQkVHSU4gUFJJVkFURSBLRVktLS0tLVxuTUlJRXZnSUJBREFOQmdrcWhraUc5dzBCQVFFRkFBU0NCS2d3Z2dTa0FnRUFBb0lCQVFDemZuOHlOWFJYZUcwM1xueXRnYzJsQzZtZ3o5WWhUZDNVNytnU3ZEc2t3aUl0YVZ2OU1oRXVpSFRLWHlZditCMGVCWTRkV0pSZjNPUW9wSVxuS2V3ZGdlVUw1YlhkVm5NZDkzTVlpVGVrY1RzMk5xTU5CeW5VZlpvemdXMVU1Ym1tS0lhT2dvbkNBUW1Nd01TZVxuNHZPQS9FaXFxdGppRG83TzNKT2VOOWFtS2hadUhwWVd2bHdmNU1MVmw3dTkzR2ZCdFpmZ0RlVmFpR2RkTU1PbVxuRit5SWJCNlFSbC9sNjhJaWt5UmtNSmcwRmtQOWhBb1NMK240aHZSYlMzSkFmMlpMcFJKZUFPaW9LbnJ6R3dLVFxuQ0NkZUZhcDBFNFpkNVppbmNDMXkyVFF6M3J4ZXFudGxvOXlFUWRvTE9Kbm5lN05DS3draS9xYnV4NWZaUFh3ZFxucXQ3T3BRYUpBZ01CQUFFQ2dnRUFCdEcvSDU5V0I1WTIwUXd5OUhxclgwR0h6WThldzVTYlFoSlNnVFY2akwvMVxuMXR2aU43TVdGRURhVzZoODlGZk96aFdyWlJNY2lzdnVvTUg5KzF1Q1loYTB0Mzluc1h0am15cW9hMllWWmJDQ1xuOXBWZmRwZ2NoZ2tzYUJHdndWTXdGSVU3V2x4cmVsWmZDZm5ObmtpSGNydDVzTEgwcFVHT1ZyQWdwckM1NkM1UFxuSlE2VzhLZHMvcVhQeXRmMktvWHhhR2ExaGFWNGd3ZHo1bStXVWcrNTFZdFVFbEg0V0ROSkQvV3RiVUJodUNpRVxuWlhzZGNrVWVuT0xjTUJ5V0RldEVYVi9OaHRzK2t2RCtvNFMvd3N2UHZySWZpRGtGQTVzNUQzMVh0NDczeG9GQVxuUkxmZ0hIVWtTMDFDWjlSdFJpS1I2SThjTlFidVRJYjcyWVlqbXN5dnNRS0JnUUR5Rmc5cGg0Z24zbkUxL3Jib1xuN1Z0K2RnYlJzdDErWEtWeUdRQjJ0ejRibFYrYnZuY3daTXBIZGhNc3hDSnlkVllibDlNeGZXbkhqRTI0U0I4YVxuUUxUcU1QTXhqVTNHWURwOVVHL2ZZR296TmVxalZaNjJwNVFoTDhOZ2o4bktpUzBBalpqeGJIYWN2dGtCMU42T1xuWjIraEhhb0ZyM2tBOTZrbU0xamhkaEJZbVFLQmdRQzl6M3pDdkJxdlNqSFRXQisvNDExaU5oc3ZiaVNBQURuL1xuZ0pNMlF1UFFpK3VNVW9mNHlNK3BFcGNRSzFyaHo3Y2ZINGtiTWt6aGk5NGRtaXNMK1lkeW81di9aT25ZNmIvMVxuUTYrYXlzRmYwMXdoVjVHWDE5QmNiOThyNFdTTjgxc0lzZlhJQ1hBWHI2bXd3K0wxWDJHYXhlMzBBMk92UkF4cFxuZ0VRc1hYa2pjUUtCZ1FEaUdreUd5YmtYVTZEMVIxTmF0ZVhRZFRmbFAyTzBFNS9Lc3lORnZkdmFNMmM2dFdmb1xuNFJvMEtFbThjK3VnYjRyZTlxeWYrbnlEamIxQk1zc3AzK21aR2VMcUV3bmpFQmxRMVlISFpldUtyUDdiVXFxTFxuK25SVmtxQ3VYVjJoTndHN0ZJVVdaN0ZZc0w5S0FLRms2NkxOSGtHZ1VjVjRhOWVtQUNzeFdPM25jUUtCZ1FDclxucUNxM1hqQnYySlNwQXFoci9HNW10SEh2ZWhldVh3WVVxSzM1dzVLTjl3eEY4aG1nQjlPdG51OVpJeXhrelZwWlxuM2tZN2Ywa0NMV0RwdXBRMWx5eEVvK3dmazU3Y21jRU5TWEpWZGdwZDVDTU0wRW9PWFpIRkZ6Tm9Wc1YranRnRVxuVEJUd0hJRHdHdUJHeVZESEFjU2VtV1B5YXVKTERpcC9ldzJzWmJoNU1RS0JnQ1ZROU9qaTJNb2xtQ0M2bTYzeFxuYXMxMnIxdjJhV3FzbHVlTHRvNHV4NEh4ZEkxN0JQU3RsWjdIKy95Z0hXdmxDUWNJTU5TWkRpSFpBVWh0Mzg1aVxubmp0WFYxVkxZR05sNEIyRXJabU82VUhMTzAySndOMUw0M1d4bm5yY3ZlMFp4ZnJ5bEpkUVpTTElUaWFraGVpRlxuN0piK2FxNCtkTVdDYk1yVnp4WGozb29KXG4tLS0tLUVORCBQUklWQVRFIEtFWS0tLS0tXG4iLAogICJjbGllbnRfZW1haWwiOiAiYmFja2VuZC1mdW5jdGlvbnMtZGV2QHByZWNpb3VzLXBsYXN0aWNzLXY0LWRldi5pYW0uZ3NlcnZpY2VhY2NvdW50LmNvbSIsCiAgImNsaWVudF9pZCI6ICIxMTA1OTEyOTQyODk1MDE1NzI4NTEiLAogICJhdXRoX3VyaSI6ICJodHRwczovL2FjY291bnRzLmdvb2dsZS5jb20vby9vYXV0aDIvYXV0aCIsCiAgInRva2VuX3VyaSI6ICJodHRwczovL29hdXRoMi5nb29nbGVhcGlzLmNvbS90b2tlbiIsCiAgImF1dGhfcHJvdmlkZXJfeDUwOV9jZXJ0X3VybCI6ICJodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9vYXV0aDIvdjEvY2VydHMiLAogICJjbGllbnRfeDUwOV9jZXJ0X3VybCI6ICJodHRwczovL3d3dy5nb29nbGVhcGlzLmNvbS9yb2JvdC92MS9tZXRhZGF0YS94NTA5L2JhY2tlbmQtZnVuY3Rpb25zLWRldiU0MHByZWNpb3VzLXBsYXN0aWNzLXY0LWRldi5pYW0uZ3NlcnZpY2VhY2NvdW50LmNvbSIKfQ==`
const buffer = Buffer.from(READ_ONLY_SERVICE_ACCOUNT_B64, 'base64')
const serviceAccountTxt = buffer.toString('utf8')
fs.writeFileSync(serviceAccountPath, serviceAccountTxt)
// add script to delete generated file on process exit
process.on('SIGINT', () => {
fs.removeSync(serviceAccountPath)
process.exit(0)
})
return serviceAccountPath
}

View File

@@ -20,9 +20,9 @@
"start:components": "yarn workspace oa-components dev", "start:components": "yarn workspace oa-components dev",
"start:platform": "yarn build:shared && vite", "start:platform": "yarn build:shared && vite",
"start:platform-ci": "yarn build:shared && vite --port 3456", "start:platform-ci": "yarn build:shared && vite --port 3456",
"start:emulated": "concurrently --kill-others --names functions,themes,components,platform --prefix-colors yellow,cyan,blue,magenta \"yarn workspace functions start\" \"yarn start:themes\" \"yarn start:components\" \"cross-env PORT=4000 yarn start:platform\"", "frontend:watch:for-emulated-backend": "concurrently --kill-others --names themes,components,platform --prefix-colors yellow,cyan,blue,magenta \"yarn start:themes\" \"yarn start:components\" \"cross-env PORT=4000 yarn start:platform\"",
"start:emulated:docker": "concurrently --names functions,themes,components,platform,emulators --prefix-colors yellow,cyan,blue,magenta,green --kill-others \"yarn workspace functions watch\" \"yarn start:themes\" \"yarn start:components\" \"cross-env PORT=4000 yarn start:platform\" \"yarn workspace oa-emulators-docker start\"", "backend:emulator:watch": "concurrently --kill-others \"yarn workspace functions watch\" \"docker-compose up --force-recreate --build emulator\"",
"start:emulated:docker:local": "concurrently --names functions,themes,components,platform,emulators --prefix-colors yellow,cyan,blue,magenta,green --kill-others \"yarn workspace functions watch\" \"yarn start:themes\" \"yarn start:components\" \"cross-env PORT=4000 yarn start:platform\" \"yarn workspace oa-emulators-docker start --repo=\"", "backend:emulator:stop": "docker stop $(docker ps -a -q)",
"build:themes": "yarn workspace oa-themes build", "build:themes": "yarn workspace oa-themes build",
"build:components": "yarn workspace oa-components build", "build:components": "yarn workspace oa-components build",
"build:vite": "tsc && vite build", "build:vite": "tsc && vite build",

View File

@@ -0,0 +1,173 @@
---
id: firebase-emulator
title: Firebase Emulator
---
# Introduction
To run backend functions locally, Firebase provides a suite of emulators to mimic most functionality seen online (e.g firestore, storage, functions, triggers etc.)
For simplicity, although each service is an individual emulator and we are running multiple services, we will refer to them all a emulator.
# Getting started
We start the frontend and backend separately, so we have two different commands. Generally, for development, you would have both commands running at the same time in different terminals.
## Prerequisites
The emulator can be a bit tricky to setup and populate with seed data, so a Docker image has been created that contains all the necessary setup.
You will need to be able to run `docker-compose` commands on your local machine.
The easiest way to do that is to install [Docker Desktop](https://docs.docker.com/desktop/).
You can ensure it is running with:
```sh
docker-compose -v
# Docker Compose version v2.20.3
```
## Commands
To make things easier, some Yarn commands were created.
### Starting the frontend
```
yarn run frontend:watch:for-emulated-backend
```
This is similar to `yarn run start` but configures the frontend to connect to the local backend.
### Starting the backend
```
yarn run backend:emulator:watch
```
This starts the Firebase emulator, loads code into it, and watches for changes. There is initial data but any changes will be lost after the emulator is stopped.
### Stopping the backend
Due to some technical limitations, the `CTRL+C` keyboard shortcut may not stop the emulator, so it may be necessary to run:
```
yarn run backend:emulator:stop
```
## Note
It is assumed that all of these commands will be ran from the root directory of the project. Running them from elsewhere may cause issues.
## Visiting the frontend
The frontend should start at [localhost:4000](http://localhost:4000). You should see a small banner at the bottom of the page that informs emulators are in use.
![](./images/emulator-docker-frontend.png)
## Visiting the emulator dashboard
The emulator should start at [localhost:4001](http://localhost:4001).
![Dashboard](./images/firebase-emulator-dashboard.png)
Clicking on tabs will take you to a page similar to the Firebase console, from where you can interact with individual services.
## Seed data
The emulator loads hardcoded data and any changes are only stored in temporary memory. No changes are kept between sessions.
You may experience some strange data issues but that is probably due to the browser's caching mechanisms. You can verify that by using another browser; in that case the original browser's indexeddb cache would need to be manually cleared.
### User logins
The seed comes preloaded with some user accounts. When it is running, you can see a complete list at [localhost:4001/auth](http://localhost:4001/auth).
Examples:
Admin user account:
```
email: admin@example.com
password: wow_backend_development_is_fun
```
Normal user account:
```
email: normal_jim@example.com
password: thanks_emulator_man
```
### Improving it
You can improve the seed data by making changes via the application or Firebase UI, exporting it, and making a pull request. This will help make development/testing easier for you and others in the future.
1. Get the container name using `docker ps`.
2. Run the export script:
```
docker exec -it <name> /app/export.sh
```
3. Transfer the data from the container to your machine:
```
docker cp <name>:/app/dump ./functions/data/
```
4. Delete the current `emulator` folder.
5. Rename the `dump` folder to `emulator`.
## Function development
### Writing functions code
The emulator binds to the `functions/dist` folder so that changes made will be reflected in the emulator. On Linux these changes should be picked up immediately. On Windows the changes are not always detected and may require spinning the emulator down and then starting back up.
### Invoking functions
Functions can be invoked in different ways depending on their trigger type.
For functions triggered by storage or database changes, making changes directly on the dashboard or from the frontend should trigger the corresponding function.
Similarly callable functions should be called triggered from frontend code.
For functions triggered by http request you can call directly either from command line, a web browser or REST client such as [Insomnia](https://insomnia.rest/)
E.g. calling the emulator `seed-users-create` function via a GET request:
```
http://localhost:4002/demo-community-platform-emulated/us-central1/emulator/seed-users-create
```
![](images/emulator-docker-http-req.png)
Read more about [connecting your app to the Cloud Functions Emulator](https://firebase.google.com/docs/emulator-suite/connect_functions).
### Accessing logs
If the emulator throws an error you may want to check generated debug.log files. These will exist in the container in the root `/app` folder.
You can access the file system within the docker container directly using the
[Remote-Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension for vscode, and running the command to `attach to running container`.
![](images/emulator-docker-remote.png)
Once running in the container you can open the `/app` folder to view files
![](images/emulator-docker-remote-files.png)
Alternatively you can request Docker to execute commands directly on the container, e.g.
```
docker exec -it <name> ls
docker exec -it <name> cat /app/firestore-debug.log
```
The `name` can be retrieved by running `docker ps`.
## Troubleshooting
See the `Dockerfile.emulator` for some debugging tips.

View File

@@ -1,162 +0,0 @@
---
id: firebase-emulators-docker
title: Firestore Emulators
---
In order to test backend functions locally, firebase provides a suite of emulators to mimic most functionality seen online (e.g firestore, storage, functions, triggers etc.)
The emulators can be a bit tricky to setup and populate with seed data, and so instead an image has been created in docker that contains all the code to run the emulators and functions code
## Prerequisites
You will need to be able to run `docker` commands locally, the easiest way is to install [Docker Desktop](https://docs.docker.com/desktop/)
You can ensure it is running with the command `docker -v`
```sh
docker -v
# Docker version 20.10.14, build a224086
```
## Getting Started
Make sure to build the project
```
yarn build
```
Then you can start the emulators
```
yarn start:emulated:docker
```
This will start the following:
- **Docker emulator**
May take a few minutes to download the required image
- **Functions src watcher**
To recompile functions on update
- **Platform server**
On port 4000 to indicate that it should communicate with emulators instead of live site)
## Emulator Dashboard
The emulator should start at [http://localhost:4001](http://localhost:4001). Follow the link to see an overview of the available services
![Dashboard](./images/firebase-emulators-dashboard.png)
Clicking on individual tabs will take you to a page similar to the firebase console, from where you can interact with services.
Note - any data populated into the emulator will be deleted after the emulator has closed (restoring to original state). See the section below about persistent and seed data
## Resetting seed data
When the emulator is stopped the image is destroyed, so each time the emulators are restarted a clean set of data will be available.
If using the frontend data changes may still persist due to the browser's own caching mechanisms. In this case the browser indexeddb cache will need to be manually cleared
## Frontend
The frontend should start on http://localhost:4000
You should see a small banner at the bottom of the page that informs emulators are in use.
![](./images/emulators-docker-frontend.png)
The data that appears will have been exported at the time the image was made, and so may be slightly outdated when compared to the live site. You can see the time the data was last exported.
You can see the version of data used in the command line output, e.g. data exported from precious plastic
![](../images/emulators-docker-cli.png)
### User Login
By default the image comes preloaded with user auth accounts as found in [shared\mocks\authUsers.ts](https://github.com/ONEARMY/community-platform/tree/master/shared/mocks/authUsers.ts). This means you can login as any of these users, e.g.
```
email: 'demo_admin@example.com',
password: 'demo_admin',
```
## Function Development
### Writing functions code
The emulators bind to the `functions/dist` folder so that changes made will be reflected in the emulators. On linux these changes should be picked up immediately, and so live-reload can be added for functions development via `yarn workspace functions watch`
If running on windows the changes are not always detected, and may require spinning the emulators down and then starting back up
### Invoking functions
Functions can be invoked in different ways depending on their trigger type.
For functions triggered by storage or database changes, making changes directly on the dashboard or from the frontend should trigger the corresponding function.
Similarly callable functions should be called triggered from frontend code.
For functions triggered by http request you can call directly either from command line, a web browser or REST client such as [Insomnia](https://insomnia.rest/)
E.g. calling the emulator `seed-users-create` function via a GET request:
```
http://localhost:4002/demo-community-platform-emulated/us-central1/emulator/seed-users-create
```
![](images/emulators-docker-http-req.png)
Read more about [Connecting your app the Cloud Functions Emulator](https://firebase.google.com/docs/emulator-suite/connect_functions).
### Accessing Logs
If the emulator throws an error you may want to check generated debug.log files. These will exist in the container in the root `/app` folder.
You can access the file system within the docker container directly using the
[Remote-Containers](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) extension for vscode, and running the command to `attach to running container`
![](images/emulators-docker-remote.png)
Once running in the container you can open the `/app` folder to view files
![](images/emulator-docker-remote-files.png)
Alternatively you can request docker to execute commands directly on the container, e.g.
```
docker exec -it community-platform-emulator ls
docker exec -it community-platform-emulator cat /app/firestore-debug.log
```
## Extending the image
The code used to build the docker image can all be found in the [packages/emulators-docker](https://github.com/ONEARMY/community-platform/tree/master/packages/emulators-docker) workspace.
### Updating seed data
Admins can export data online via google cloud console. Once exported it should be formatted in a namespaced way (e.g. `pp-yyyy-mm-dd` for data exported from precious-plastic) and placed in the seed_data folder for the emulators-docker workspace.
Additionally any references to the previous data should be replaced with the updated (e.g. github action, gitignore and default config defined in workspace common.ts file)
### Custom Image
A custom image can be built and run by passing custom repo or tag args to the build script, e.g.
```
yarn workspace oa-emulators-docker build --repo=my_custom_repo --tag=my_custom_tag
```
If just intending to test locally a blank `--repo=` can be provided to avoid trying to pull an image from dockerhub and run locally
```
yarn workspace oa-emulators-docker build --repo=
```
```
yarn start:emulated:docker:local
```
## Troubleshooting
### Known Issues
See list of known issues in the workspace [README](https://github.com/ONEARMY/community-platform/tree/master/packages/emulators-docker)

View File

@@ -1,196 +0,0 @@
# Firebase Emulators (Legacy)
In order to test backend functions locally, firebase provides a suite of emulators to mimic most functionality seen online (e.g firestore, storage, functions, triggers etc.)
In order to use the emulators run the following start script
## Prerequisites
To run emulators locally you will need (as described in: https://firebase.google.com/docs/emulator-suite/install_and_configure)
- [Firebase CLI](https://firebase.google.com/docs/cli) version 8.14.0 or higher
- [Java](https://openjdk.java.net/install/) version 1.8 or higher
## Getting Started
```
yarn start:emulated
```
This will start the following:
- **Functions emulator**
May take a few minutes to download required binaries when running for the first time
- **Functions src watcher**
To recompile functions on update
- **Platform server**
On port 4000 to indicate that it should communicate with emulators instead of live site)
## Emulator Dashboard
The emulator should start at http://localhost:4001. Follow the link to see an overview of the available services
![Dashboard](./images/firebase-emulators-dashboard.png)
Clicking on individual tabs will take you to a page similar to the firebase console, from where you can interact with services.
Note - any data populated into the emulator will be deleted after the emulator has closed (restoring to original state). See the section below about persistent and seed data
# Seed data
By default the emulators load any data found in the [functions/data/emulated](https://github.com/ONEARMY/community-platform/tree/master/functions/data/emulated) folder, which can be previously exported from another firebase app or emulator instance.
By default this data is not committed to the repo and so initial data will be empty, however specific zip files have been generated from site backup files and can be loaded for testing
## Loading seed data
By default when the script first runs it will populate seed data from [functions/data/seed](https://github.com/ONEARMY/community-platform/tree/master/functions/data/seed). This can be repopulated either by deleting the [functions/data/emulated](https://github.com/ONEARMY/community-platform/tree/master/functions/data/emulated) folder, or by manually calling the seed data script:
```
yarn workspace functions run emulator:seed
```
This will load the default seed data from the zip file [functions/data/seed](https://github.com/ONEARMY/community-platform/tree/master/functions/data/seed/seed-default.zip).
The default data contains a snapshot of most howtos, mappins etc. from the export data of the file (so may not be fully up-to-date). It also includes 2 user profiles for login:
```
username: demo_user@example.com
password: demo_user
```
```
username: demo_admin@example.com
password: demo_admin
```
If you need newer or other data sources contact the repo admins who can hopefully help out.
The fully seeded database should look something like this:
![Seeded DB](./images/firebase-emulator-seeded.png)
## Updating seed data
When the emulators close they discard any changes made, so seed data documents that have been updated will revert to their original state next time the emulator is loaded.
Whilst this is useful to preserve a clean testing state, sometimes it might be desirable to persist changes (such as adding additional auth users, or specific example docs)
This can be achieved by passing the `--export-on-exit=./path/to/export/folder` flag to the script that starts the functions emulators. This can be run by modifying the functions start at [functions/scripts/start.ts](https://github.com/ONEARMY/community-platform/tree/master/functions/scripts/start.ts)
```js
// change this value if also wanting to export data
if (false) {
cmd = `${cmd} --export-on-exit=${EMULATOR_IMPORT_FOLDER}`
}
```
NOTE - due to filepath handling this is usually best done on a mac/linux device (windows export formattedly inconsistently for linux)
## Resetting seed data
As previously mentioned, all data will be reverted back to original/seed state after emulators have closed, so there is no need to reset. If manually exported data has been copied to overwrite the seed data, the default seed can be restored using the load script above.
# Calling Functions
## HTTP Functions
E.g. A development and testing API has been created at [functions/src/dev/index.ts](https://github.com/ONEARMY/community-platform/tree/master/functions/src/dev/index.ts). When running it can be called by making a GET request to:
```
http://localhost:4002/{projectId}/us-central1/dev
```
Where the projectId may be specified from configuration (default for emulators is `demo-community-platform-emulated`)
Using a REST client like [Insomnia](https://insomnia.rest/) or [Postman](https://www.getpostman.com/) can simplify the process of making api requests
_E.g. Insomnia Rest Client_
![Insomnia Rest Client](./images/firebase-emulator-rest-client.png)
# Authentication
By default emulators allow full read/write access to all resources, however firebase functions still expect an authenticated user in order to access various external APIs before completing operations.
The current workaround for this is authenticating using a custom service-account that has limited (read-only) access to resources on the testing project. This is done automatically during the start script [functions/scripts/start.ts](https://github.com/ONEARMY/community-platform/tree/master/functions/scripts/start.ts).
Alternatively developers can request access to join the firebase project for full access to the testing project, however it's hoped that this is not required.
# Troubleshooting
## Port in use
_Error Message_
`Error: Could not start Database Emulator, port taken.`
Should see exact issue in warning, e.g.
`! firestore: Port 4003 is not open on localhost, could not start Firestore Emulator.`
Or
`Something is already running on port 4000.`
Try to identify what is already running, and if required kill the process. Methods may differ depending on operating system, here are a couple examples:
_Windows: List processes on port 4003_
<!--- spell-checker: disable --->
```c
netstat -ano | findstr 4003
/* example output */
TCP 127.0.0.1:4003 0.0.0.0:0 LISTENING 8272
```
<!--- spell-checker: enable --->
_Windows: Kill process_
```c
taskkill /F /PID 8272
```
_Linux: List processes on port_
See a few examples at: https://stackoverflow.com/questions/11583562/how-to-kill-a-process-running-on-particular-port-in-linux/32592965
e.g.
```
sudo apt-get install lsof
```
```
npx cross-port-killer 4003
```
## Firestore Emulator fatal error
If one of the emulators throws a fatal error you might see a vague error message such as:
```
⚠ firestore: Fatal error occurred:
Firestore Emulator has exited with code: 1,
stopping all running emulators
```
Usually a more informative log can be found in a created log file, e.g. [firestore-debug.log](https://github.com/ONEARMY/community-platform/tree/master/functions/firestore-debug.log)
```
Exception in thread "main" com.google.cloud.datastore.core.exception.DatastoreException: /mnt/c/apps/oneArmy/community-platform/functions/data/emulated/firestore_export/all_namespaces\all_kinds\all_namespaces_all_kinds.export_metadata (No such file or directory)
at com.google.cloud.datastore.emulator.impl.ExportImportUtil.parseBackupFile(ExportImportUtil.java:316)
at com.google.cloud.datastore.emulator.impl.ExportImportUtil.fetchEntities(ExportImportUtil.java:62)
at com.google.cloud.datastore.emulator.firestore.CloudFirestore.main(CloudFirestore.java:90)
Caused by: java.io.FileNotFoundException: /mnt/c/apps/oneArmy/community-platform/functions/data/emulated/firestore_export/all_namespaces\all_kinds\all_namespaces_all_kinds.export_metadata (No such file or directory)
at java.base/java.io.FileInputStream.open0(Native Method)
at java.base/java.io.FileInputStream.open(FileInputStream.java:219)
at java.base/java.io.FileInputStream.<init>(FileInputStream.java:157)
at com.google.cloud.datastore.emulator.impl.ExportImportUtil.parseBackupFile(ExportImportUtil.java:312)
```
In this example it is trying to locate the seed data which does not exist, so to fix run the seed command
```
yarn workspace functions run emulator:seed
```
There might be similar issues logged in [firebase-debug.log](https://github.com/ONEARMY/community-platform/tree/master/functions/firebase-debug.log), however this file might be deleted on exit so will require opening before crash

View File

@@ -42,4 +42,4 @@ Any existing data may be replaced and lost during restore operations
::: :::
Alternatively the backup could be loaded into the emulator and data queried from there as required. Alternatively the backup could be loaded into the emulator and data queried from there as required.
See the [Firebase Emulators](./firebase-emulators-docker.md) for more information See the [Firebase Emulator](./firebase-emulator.md) for more information

View File

@@ -161,4 +161,4 @@ There may also be some cases where methods want to be tested against production
There currently isn't a single automated way to do this, however you can see an example of the manual steps involved in the `test_functions` step of the [CircleCI pipeline](https://github.com/ONEARMY/community-platform/blob/feat/aggregation-tests/.circleci/config.yml#L244-L245) There currently isn't a single automated way to do this, however you can see an example of the manual steps involved in the `test_functions` step of the [CircleCI pipeline](https://github.com/ONEARMY/community-platform/blob/feat/aggregation-tests/.circleci/config.yml#L244-L245)
Alternatively developers can follow the steps in [Firebase Emulators Docker](./firebase-emulators-docker.md) to run the docker emulators locally and manually invoke functions Alternatively developers can follow the steps in [Firebase Emulators Docker](./firebase-emulator.md) to run the docker emulators locally and manually invoke functions

View File

@@ -41,7 +41,7 @@ module.exports = {
label: 'Backend Development', label: 'Backend Development',
items: [ items: [
'Backend Development/BackendOverview', 'Backend Development/BackendOverview',
'Backend Development/firebase-emulators-docker', 'Backend Development/firebase-emulator',
'Backend Development/firestore-backup', 'Backend Development/firestore-backup',
'Backend Development/integrations', 'Backend Development/integrations',
'Backend Development/tests', 'Backend Development/tests',

View File

@@ -1,7 +0,0 @@
node_modules
app
seed_data/*
# retain latest tar file
!seed_data/pp-2023-11-09c.tar.gz
exports
build.args

View File

@@ -1,105 +0,0 @@
# Use bullseye instead of alpine due to v11.18 pubsub crash issue (could try revert post emulator 0.7.1)
# https://github.com/firebase/firebase-tools/issues/5256
# https://github.com/micrometer-metrics/micrometer/issues/2776)
# Use node 18.15 (instead of 18.x)
# https://github.com/firebase/firebase-tools/issues/5614#issuecomment-1508515106
# (note - may be fixed now that procps installed - to review)
FROM node:18.15.0-bullseye-slim AS community-platform-builder
ARG FIREBASE_TOOLS_VERSION
ARG BUILD_DATE
ARG VCS_REF
LABEL org.label-schema.schema-version="1.0" \
org.label-schema.name="" \
org.label-schema.version=${FIREBASE_TOOLS_VERSION} \
org.label-schema.build-date=${BUILD_DATE} \
org.label-schema.description="" \
org.label-schema.url="" \
org.label-schema.vcs-url="" \
org.label-schema.vcs-ref=${VCS_REF}
ENV HOME=/home/node
# Install Java and curl
# NOTE - this will not cache unless running with buildkit
RUN apt-get update && apt-get -y install openjdk-11-jre-headless dos2unix curl procps && apt-get clean
# Install firebase and check versions
RUN \
yarn global add firebase-tools@${FIREBASE_TOOLS_VERSION} && \
firebase -V && \
java -version && \
chown -R node:node $HOME
# First run to setup emulators
RUN firebase setup:emulators:database && \
firebase setup:emulators:firestore && \
firebase setup:emulators:pubsub && \
firebase setup:emulators:storage && \
firebase setup:emulators:ui
WORKDIR /app
# Copy dist package.json and install (step will be cached unless changed)
RUN mkdir -p /app/functions/dist
COPY ./app/functions/dist/package.json /app/functions/dist/package.json
RUN cd /app/functions/dist && yarn install && yarn cache clean
# Copy additional config files (done individually to not override dist package.json)
COPY ./app/.firebaserc /app/.firebaserc
COPY ./app/firebase.json /app/firebase.json
COPY ./app/firebase.storage.rules /app/firebase.storage.rules
COPY ./app/credentials.json /app/credentials.json
COPY ./app/functions/.runtimeconfig.json /app/functions/.runtimeconfig.json
COPY ./app/functions/dist/index.js /app/functions/dist/index.js
# Copy seed data
RUN mkdir -p /app/seed_data && mkdir -p /app/import
COPY ./seed_data/pp-2023-11-09c /app/seed_data
# Copy config files. Ensure executable and lf line format
RUN mkdir -p /app/config
COPY ./config /app/config
RUN dos2unix /app/config/bootstrap.sh && chmod +x /app/config/bootstrap.sh
# Prompt firebase to use json credentials for login by exporting variable
ENV GOOGLE_APPLICATION_CREDENTIALS=/app/credentials.json
# Ensure runtime config vars picked up (https://github.com/firebase/firebase-tools/issues/3983)
ENV CLOUD_RUNTIME_CONFIG=/app/functions/.runtimeconfig.json
# Fix: ensure database emulator can configure with url
# https://github.com/firebase/firebase-admin-node/issues/116
ENV FIREBASE_DATABASE_EMULATOR_HOST = 'http://127.0.0.1:4006'
# Troubleshooting - can just run to get cli access to exec below manually and check logs
# CMD [ "/bin/sh" ]
# Prepare seed data used in the app
RUN \
# Include a temporary env file to avoid timeouts (https://github.com/firebase/firebase-tools/issues/2837)
echo "FUNCTIONS_EMULATOR_TIMEOUT_SECONDS=540s" > /app/functions/dist/.env.local && \
# Make a first run of emulators to ensure configured correctly and allow any seed data to be processed
# via bootstrap script. Once processed seed data is then re-exported for use at runtime
firebase emulators:exec \
--project ${FIRESTORE_PROJECT_NAME:-demo-community-platform-emulated} \
--import=/app/seed_data --export-on-exit=/app/import \
"/bin/bash /app/config/bootstrap.sh" \
# Check that data exists and remove seed once complete
&& rm -R /app/seed_data \
&& rm -R /app/functions/dist/.env.local \
# Clear global firebase-tools (will run from local node_modules which is pinned to same version), and dangling emulator zips
# shaves around 200MB off final image
&& yarn global remove firebase-tools && yarn cache clean --all\
&& rm /home/node/.cache/firebase/emulators/*.zip
# TODO - find a way to run the functions test spec against emulator
# Exposed Ports - These should match firebase.json config
EXPOSE 4001 4002 4003 4004 4005 4006 4007 5000
CMD ./functions/dist/node_modules/.bin/firebase emulators:start \
--only auth,functions,firestore,pubsub,storage,hosting,database \
--project ${FIRESTORE_PROJECT_NAME:-demo-community-platform-emulated} \
--import=/app/import

View File

@@ -1,60 +0,0 @@
## Quick start
See [documentation](../../packages/documentation/docs/Backend%20Development/firebase-emulators-docker.md)
## About
This workspace aims at providing a fully configured and seeded suite of firebase emulators within a docker container for use with the oa-community-platform.
Whilst it is possible to download and execute firebase emulators directly, there are a few problems related to do so including:
- Required java download
- Configuring service account and/or additional auth
- Manual seed data required
- Handling port bindings
By providing a docker image we can address all issues above and provide better pathways for future integration with additional dockerised services
## TODO
[?] - Support live-reload for functions (linux)
[ ] - Optimise image size (reduce RUN commands, possible multi-stage build)
[ ] - Remove all legacy functions code
[ ] - Add tests to ensure data is exported as expected (e.g all collections exist)
[ ] - Consider binding functions src folder and not dist (will require configuring yarn workspaces to populate shared as required, known issue below)
[ ] - Find means to have functions-specific lock file and use as part of build process
[ ] - Automate seed data update (cron action to export and make pr)
[ ] - Add docker-compose image for easier customisation/volume mapping (?)
[ ] - Possible option to use without functions dist (currently requires functions build during start)
[ ] - Provide windows-based docker image (for live-reload on windows)
## Known Issues
- DB can only make write updates from client sdk if project names match, and so the DOCKER file may need to be updated if using a project name that is not the same as the one hardcoded into the frontend (currently `demo-community-platform-emulated`)
- Changes made within the workspace package.json will not be reflected in the container.
Node_modules cannot be bound via volumes as they depend on OS, and so updating package.json will require new build with updated modules. Workaround would be binding full functions src with platform-specific docker image (e.g. node-18-win/node-18-linux) or just documenting required build update (discussion about node-windows support: https://github.com/nodejs/docker-node/pull/362)
- Live-reload function changes (doesn't seem to detect through volumes on WSL)
https://forums.docker.com/t/file-system-watch-does-not-work-with-mounted-volumes/12038/20
https://github.com/merofeev/docker-windows-volume-watcher
Possibly require manual watch on win and exec on image like in L104 https://github.dev/merofeev/docker-windows-volume-watcher/blob/master/docker_volume_watcher/container_notifier.py
- Ideally we would want to just copy functions src code into the docker volume and execute directly from there (would avoid issue with livereload), however as the functions depend on other workspaces (namely shared and the main src workspace) binding these will have the same issue as node_modules.
It might be possible to manually build and create symlinks within the docker volume node_modules (in the same way yarn workspaces makes symlinks to the real workspaces), however this adds considerable overhead. It would be more viable once shared src types moved to shared folder, so at least only one shared workspace to bind
- Firebase realtime database emulator does not work. All other emulators support providing a `0.0.0.0` host binding to access the docker host, however the realtime database emulator does not appear to be working when set.
Requires further investigation, possibly linked to https://github.com/firebase/firebase-tools/issues/2633
- [Boxen](https://www.npmjs.com/package/boxen), globby and log-update packages have been pinned to older versions as newer require es module imports, which is not currently supported by dockerode (https://github.com/apocas/dockerode/issues/632)
## Links
https://hub.docker.com/r/goatlab/firebase-emulator
https://hub.docker.com/r/andreysenov/firebase-tools
https://hub.docker.com/r/mtlynch/firestore-emulator
https://github.com/goat-io/fluent/blob/master/src/Docker/Database/Firebase/Dockerfile

View File

@@ -1,9 +0,0 @@
#!/bin/bash
# DB Bootstrap - call emulator function to clean seed data
echo "Cleaning seed data"
curl -vs http://localhost:4002/demo-community-platform-emulated/us-central1/emulator/seed-clean
echo "Creating seed users"
curl -vs http://localhost:4002/demo-community-platform-emulated/us-central1/emulator/seed-users-create
echo "Creating seed user content"
curl -vs http://localhost:4002/demo-community-platform-emulated/us-central1/emulator/seed-content-generate
echo "Complete"

View File

@@ -1,24 +0,0 @@
{
"name": "oa-emulators-docker",
"version": "1.0.0",
"private": true,
"scripts": {
"build": "ts-node src/build.ts",
"prepare": "ts-node src/prepare.ts",
"start": "ts-node src/start.ts"
},
"dependencies": {
"boxen": "^5.1.2",
"dockerode": "^3.3.5",
"fs-extra": "^10.0.1",
"functions": "workspace:*",
"globby": "^11.0.2",
"log-update": "^4.0.0"
},
"devDependencies": {
"@types/dockerode": "^3.3.8",
"@types/fs-extra": "^9.0.13",
"ts-node": "^10.7.0",
"typescript": "^5.1.6"
}
}

View File

@@ -1,74 +0,0 @@
import Dockerode from 'dockerode'
import { IMAGE_NAME } from './common'
import { PATHS } from './paths'
import { prepare } from './prepare'
const docker = new Dockerode()
async function build() {
const buildArgs = await prepare()
const stream = await startDockerBuild(buildArgs)
await followBuildProgress(stream)
}
/**
* Initialise docker build
* @param buildargs key-value pair of args to be passed into Dockerfile
*/
async function startDockerBuild(buildargs: Record<string, string>) {
const stream = await docker.buildImage(
{
context: PATHS.workspaceDir,
// Paths listed here will be available to dockerfile
src: ['Dockerfile', 'app', 'seed_data', 'config'],
},
{
t: IMAGE_NAME,
buildargs,
},
)
return stream
}
/**
* Docker builds are triggered in the background, so that the current scripts are unaware
* of any progress updated and when completed/failed.
* Add bindings to docker modem to track progress, proxy logs to main stdout, and resolve
* as promise build completion/fail
*/
async function followBuildProgress(stream: NodeJS.ReadableStream) {
await new Promise((resolve, reject) => {
// pipe logs, reformatting text which defaults to nested json
docker.modem.followProgress(
stream as any,
(error, result) => {
if (error) {
reject(error)
}
resolve(result)
},
(onProgress) => {
const { stream, error, errorDetail } = onProgress || {}
if (stream && typeof stream === 'string') {
let output = stream
// avoid duplicate line spacing caused by console logging text split
// across multiple lines
if (stream.endsWith('\n')) {
output = stream.slice(0, -1)
}
console.log(output)
}
if (error) {
console.error(error)
if (errorDetail != error) {
console.error(errorDetail)
}
}
},
)
})
}
if (require.main === module) {
build()
}

View File

@@ -1,85 +0,0 @@
const { repo, tag } = extractArgs()
export const REPOSITORY = repo ? `${repo}/` : ''
export const CONTAINER_NAME = 'community-platform-emulator'
export const TAG_NAME = tag
export const IMAGE_NAME = `${REPOSITORY}${CONTAINER_NAME}:${TAG_NAME}`
interface IDockerPortMapping {
expose?: boolean
port: number
type: string
hostPort: string
}
/** Convert emulators port to mapping supported by dockerode */
export function getFirebasePortMapping() {
const portMapping: IDockerPortMapping[] = Object.values<{ port: number }>(
FIREBASE_JSON_EMULATORS_DEFAULT,
).map(({ port }) => {
return { port, expose: true, hostPort: `${port}`, type: 'tcp' }
})
return portMapping
}
/** Default configuration to provide to firebase.json for mapping emulators within docker */
export const FIREBASE_JSON_EMULATORS_DEFAULT = {
ui: {
enabled: true,
port: 4001,
host: '0.0.0.0',
},
functions: {
port: 4002,
host: '0.0.0.0',
},
firestore: {
port: 4003,
host: '0.0.0.0',
},
auth: {
port: 4005,
host: '0.0.0.0',
},
database: {
port: 4006,
host: '0.0.0.0',
},
storage: {
port: 4007,
host: '0.0.0.0',
},
pubsub: {
port: 4008,
host: '0.0.0.0',
},
// Fix address not available issue
// https://github.com/firebase/firebase-tools/issues/4741
hub: {
port: 4400,
host: '0.0.0.0',
},
logging: {
port: 4500,
host: '0.0.0.0',
},
eventarc: {
port: 9299,
host: '0.0.0.0',
},
}
/** Minimal method to extract optional repo and tag args */
function extractArgs() {
const args = { repo: 'onearmyplatform', tag: 'pp-2023-11-09c' }
process.argv.slice(2).forEach((arg) => {
const [selector, value] = arg
.split('=')
.map((v) => v.trim().replace('--', ''))
if (Object.prototype.hasOwnProperty.call(args, selector)) {
args[selector] = value
} else {
console.warn('Arg not recognised', selector)
}
})
return args
}

View File

@@ -1,25 +0,0 @@
import path from 'path'
const workspaceDir = path.resolve(__dirname, '../')
const rootDir = path.resolve(workspaceDir, '../../')
const firebaseJson = path.resolve(rootDir, 'firebase.json')
const dockerFile = path.resolve(workspaceDir, 'Dockerfile')
const seedDataDir = path.resolve(workspaceDir, 'seed_data')
const buildArgsFile = path.resolve(workspaceDir, 'build.args')
const functionsDir = path.resolve(rootDir, 'functions')
export const PATHS = {
workspaceDir,
rootDir,
firebaseJson,
dockerFile,
functionsDir,
seedDataDir,
buildArgsFile,
}

View File

@@ -1,248 +0,0 @@
import chalk from 'chalk'
import { execSync, spawnSync } from 'child_process'
import fs from 'fs-extra'
import { runtimeConfigTest } from 'functions/scripts/runtimeConfig/model'
import { sync as globbySync } from 'globby'
import path from 'path'
import { FIREBASE_JSON_EMULATORS_DEFAULT } from './common'
import { PATHS } from './paths'
/**
* Prepare all files required for build
* Includes building functions workspace, copying to local folder, creating
* dummy credentials for firebase auth and rewriting required mappings in firebase.json
*/
export async function prepare() {
createSeedZips()
ensureSeedData()
prepareFunctionsBuild()
buildFunctions()
copyAppFiles()
populateDummyCredentials()
addRuntimeConfig()
updateFirebaseJson()
const buildArgs = generateBuildArgs()
return buildArgs
}
/**
* Ensure seed data from tar files has been extracted to working folder
* NOTE - assumes tar executable exists on local machine
*/
function ensureSeedData() {
const seedFiles = fs
.readdirSync(PATHS.seedDataDir, { withFileTypes: true })
.filter((entry) => entry.isFile() && path.extname(entry.name) === '.gz')
.map((entry) => ({
name: entry.name.replace('.tar.gz', ''),
zipPath: path.resolve(PATHS.seedDataDir, entry.name),
}))
for (const seedFile of seedFiles) {
const seedFolder = path.resolve(PATHS.seedDataDir, seedFile.name)
if (!fs.existsSync(seedFolder)) {
fs.mkdirSync(seedFolder)
const cmd = `tar -xzvf "${seedFile.zipPath}" -C "${seedFolder}"`
console.log(chalk.yellow(cmd))
spawnSync(cmd, { stdio: 'inherit', shell: true })
}
}
}
/**
* Functions expect index.html to be built from frontend folder for use in SEO render functions
* Populate a placeholder if does not exist
**/
function prepareFunctionsBuild() {
const buildIndexHtmlPath = path.resolve(PATHS.rootDir, 'build', 'index.html')
if (!fs.existsSync(buildIndexHtmlPath)) {
fs.ensureFileSync(buildIndexHtmlPath)
fs.writeFileSync(
buildIndexHtmlPath,
`<!DOCTYPE html><html lang="en"></html>`,
)
}
}
/**
* Docker image will include all current functions, so ensure compiled
* Changes can be mapped with a volume
* */
function buildFunctions() {
console.log(chalk.yellow('Building functions workspace...'))
spawnSync('yarn workspace functions build', {
stdio: 'inherit',
shell: true,
})
}
/**
* Firebase emulators may still expect credentials in case they need to access production service in cases
* where emulator not supported (e.g. some api discovery methods), with possible exception of projects starting 'demo-'.
* Provide dummy credentials so that emulators still work when calling emulated services - must still pass checks so
* fake credentials from: https://github.com/google/oauth2l/blob/master/integration/fixtures/fake-service-account.json
* (production credentials could be mapped with volume if desirable)
*/
function populateDummyCredentials() {
const credentialsPath = path.resolve(
PATHS.workspaceDir,
'app',
'credentials.json',
)
const dummyCredentials = {
type: 'service_account',
// Project ID prefixed with `demo` as per https://github.com/firebase/firebase-tools/issues/5170
project_id: 'demo-community-platform-emulated',
private_key_id: 'abc',
private_key:
'-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDY3E8o1NEFcjMM\nHW/5ZfFJw29/8NEqpViNjQIx95Xx5KDtJ+nWn9+OW0uqsSqKlKGhAdAo+Q6bjx2c\nuXVsXTu7XrZUY5Kltvj94DvUa1wjNXs606r/RxWTJ58bfdC+gLLxBfGnB6CwK0YQ\nxnfpjNbkUfVVzO0MQD7UP0Hl5ZcY0Puvxd/yHuONQn/rIAieTHH1pqgW+zrH/y3c\n59IGThC9PPtugI9ea8RSnVj3PWz1bX2UkCDpy9IRh9LzJLaYYX9RUd7++dULUlat\nAaXBh1U6emUDzhrIsgApjDVtimOPbmQWmX1S60mqQikRpVYZ8u+NDD+LNw+/Eovn\nxCj2Y3z1AgMBAAECggEAWDBzoqO1IvVXjBA2lqId10T6hXmN3j1ifyH+aAqK+FVl\nGjyWjDj0xWQcJ9ync7bQ6fSeTeNGzP0M6kzDU1+w6FgyZqwdmXWI2VmEizRjwk+/\n/uLQUcL7I55Dxn7KUoZs/rZPmQDxmGLoue60Gg6z3yLzVcKiDc7cnhzhdBgDc8vd\nQorNAlqGPRnm3EqKQ6VQp6fyQmCAxrr45kspRXNLddat3AMsuqImDkqGKBmF3Q1y\nxWGe81LphUiRqvqbyUlh6cdSZ8pLBpc9m0c3qWPKs9paqBIvgUPlvOZMqec6x4S6\nChbdkkTRLnbsRr0Yg/nDeEPlkhRBhasXpxpMUBgPywKBgQDs2axNkFjbU94uXvd5\nznUhDVxPFBuxyUHtsJNqW4p/ujLNimGet5E/YthCnQeC2P3Ym7c3fiz68amM6hiA\nOnW7HYPZ+jKFnefpAtjyOOs46AkftEg07T9XjwWNPt8+8l0DYawPoJgbM5iE0L2O\nx8TU1Vs4mXc+ql9F90GzI0x3VwKBgQDqZOOqWw3hTnNT07Ixqnmd3dugV9S7eW6o\nU9OoUgJB4rYTpG+yFqNqbRT8bkx37iKBMEReppqonOqGm4wtuRR6LSLlgcIU9Iwx\nyfH12UWqVmFSHsgZFqM/cK3wGev38h1WBIOx3/djKn7BdlKVh8kWyx6uC8bmV+E6\nOoK0vJD6kwKBgHAySOnROBZlqzkiKW8c+uU2VATtzJSydrWm0J4wUPJifNBa/hVW\ndcqmAzXC9xznt5AVa3wxHBOfyKaE+ig8CSsjNyNZ3vbmr0X04FoV1m91k2TeXNod\njMTobkPThaNm4eLJMN2SQJuaHGTGERWC0l3T18t+/zrDMDCPiSLX1NAvAoGBAN1T\nVLJYdjvIMxf1bm59VYcepbK7HLHFkRq6xMJMZbtG0ryraZjUzYvB4q4VjHk2UDiC\nlhx13tXWDZH7MJtABzjyg+AI7XWSEQs2cBXACos0M4Myc6lU+eL+iA+OuoUOhmrh\nqmT8YYGu76/IBWUSqWuvcpHPpwl7871i4Ga/I3qnAoGBANNkKAcMoeAbJQK7a/Rn\nwPEJB+dPgNDIaboAsh1nZhVhN5cvdvCWuEYgOGCPQLYQF0zmTLcM+sVxOYgfy8mV\nfbNgPgsP5xmu6dw2COBKdtozw0HrWSRjACd1N4yGu75+wPCcX/gQarcjRcXXZeEa\nNtBLSfcqPULqD+h7br9lEJio\n-----END PRIVATE KEY-----\n',
client_email: '123-abc@developer.gserviceaccount.com',
client_id: '123-abc.apps.googleusercontent.com',
auth_uri: 'https://accounts.google.com/o/oauth2/auth',
token_uri: 'http://localhost:8080/token',
// token_uri: 'https://oauth2.googleapis.com/token',
auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs',
client_x509_cert_url:
'https://www.googleapis.com/robot/v1/metadata/x509/backend-functions-dev%40community-platform-emulated.iam.gserviceaccount.com',
}
fs.writeJsonSync(credentialsPath, dummyCredentials)
}
/**
* Docker build cannot access files from outside context directory so
* place temporary copy in local app folder that will be populated during build
*/
function copyAppFiles() {
const appFolder = path.resolve(PATHS.workspaceDir, 'app')
fs.ensureDirSync(appFolder)
const functionsFiles = globbySync(['functions/dist'], {
cwd: path.resolve(PATHS.rootDir),
})
/** Alternative glob pattern to match against src - requires refactor as described in readme known issues */
// const functionsFiles = globbySync(['functions/'], {
// gitignore: true,
// cwd: path.resolve(PATHS.rootDir, 'functions'),
// ignore: ['data', 'node_modules', 'scripts'],
// }).map(filename=>`functions/${filename}`)
const additionalFiles = [
'firebase.json',
'.firebaserc',
'firebase.storage.rules',
'functions/package.json',
]
const srcFiles = [...additionalFiles, ...functionsFiles]
const targetFiles = globbySync(['**'], { cwd: appFolder })
// Remove target files that no longer exist in source
targetFiles.forEach((filename) => {
if (!srcFiles.includes(filename)) {
fs.removeSync(path.resolve(appFolder, filename))
}
})
// Copy src files that do not exist in target or have been modified
for (const filename of srcFiles) {
const src = path.resolve(PATHS.rootDir, filename)
if (fs.existsSync(src)) {
const { mtime, atime } = fs.statSync(src)
const dest = path.resolve(PATHS.workspaceDir, 'app', filename)
// only copy if doesn't exist or changed. Retain timestamps for future comparison
if (
!fs.existsSync(dest) ||
fs.statSync(dest).mtime.getTime() !== mtime.getTime()
) {
fs.copySync(src, dest)
fs.utimesSync(dest, atime, mtime)
}
}
}
}
/** Update firebase json so that emulator ui host binds to docker */
function updateFirebaseJson() {
const firebaseJsonPath = path.resolve(
PATHS.workspaceDir,
'app',
'firebase.json',
)
const firebaseJson = fs.readJsonSync(firebaseJsonPath)
firebaseJson.emulators = FIREBASE_JSON_EMULATORS_DEFAULT
// HACK - Remove configured extensions as they require authenticated user to setup
// https://github.com/firebase/firebase-tools/issues/5510
// If resolved in future may consider removing and updating dockerfile to include setup, e.g.
// `firebase ext:install --local firebase/firestore-send-email`
if ('extensions' in firebaseJson) {
delete firebaseJson.extensions
}
fs.writeFileSync(firebaseJsonPath, JSON.stringify(firebaseJson, null, 2))
}
/** Populate a runtime config file to set default firebase config variables for test */
function addRuntimeConfig() {
const target = path.resolve(
PATHS.workspaceDir,
'app',
'functions',
'.runtimeconfig.json',
)
fs.writeFileSync(target, JSON.stringify(runtimeConfigTest, null, 2))
}
/**
* Generate .tar.gz files for all data in import folder (as exported from firestore)
* NOTE - whilst not used in default workflow still useful to have when testing locally
* and can be run by replacing the prepare function with `createSeedZips()`
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
function createSeedZips() {
const seedFolders = fs
.readdirSync(PATHS.seedDataDir, { withFileTypes: true })
.filter((entry) => entry.isDirectory())
.map((entry) => ({
name: entry.name,
zipPath: path.resolve(PATHS.seedDataDir, `${entry.name}.tar.gz`),
folderPath: path.resolve(PATHS.seedDataDir, entry.name),
}))
for (const seedFolder of seedFolders) {
if (!fs.existsSync(seedFolder.zipPath)) {
const cmd = `tar -czvf "${seedFolder.zipPath}" -C ${seedFolder.folderPath} ."`
console.log(chalk.yellow(cmd))
spawnSync(cmd, { stdio: 'inherit', shell: true })
}
}
}
/** Create a list of args to pass into the Dockerfile build command */
function generateBuildArgs() {
const buildArgs: Record<string, string> = {}
const functionsPackageJsonPath = path.resolve(
PATHS.functionsDir,
'../package.json',
)
// assign the docker firebase-tools version as same running in local functions workspace
const functionsPackageJson = fs.readJsonSync(functionsPackageJsonPath)
buildArgs.FIREBASE_TOOLS_VERSION =
functionsPackageJson.dependencies['firebase-tools']
// assign date and git commit sha ref
buildArgs.BUILD_DATE = new Date().toISOString()
buildArgs.VCS_REF = execSync('git rev-parse HEAD').toString().trim()
// write args to file to read from dockerfile ci
fs.writeFileSync(
PATHS.buildArgsFile,
Object.entries(buildArgs)
.map(([k, v]) => `${k}=${v}`)
.join('\n'),
)
console.table(buildArgs)
return buildArgs
}
// Allow direct execution of file as well as import
if (require.main === module) {
prepare()
}

View File

@@ -1,278 +0,0 @@
import boxen from 'boxen'
import Dockerode from 'dockerode'
import { existsSync, readdirSync } from 'fs-extra'
import logUpdate from 'log-update'
import { resolve } from 'path'
import {
CONTAINER_NAME,
getFirebasePortMapping,
IMAGE_NAME,
TAG_NAME,
} from './common'
import { PATHS } from './paths'
const docker = new Dockerode()
/** Attach to running container or create if does not exist */
async function start() {
console.log('Container Start:', IMAGE_NAME)
let container: Dockerode.Container
const allContainers = await docker.listContainers()
const existingContainer = allContainers.find((c) =>
c.Names.includes(`/${CONTAINER_NAME}`),
)
if (existingContainer) {
container = docker.getContainer(existingContainer.Id)
} else {
container = await createNewContainer()
}
if (container) {
const { State } = await inspectContainer(container)
if (State.Running) {
await containerLogs(container)
attachContainer(container)
} else {
attachContainer(container)
startContainer(container)
}
} else {
console.error('Failed to create container')
process.exit(1)
}
}
/** Get latest container logs */
async function containerLogs(container: Dockerode.Container, tail?: number) {
const logs: Buffer = (await container.logs({
stderr: true,
stdout: true,
tail,
follow: false, // return as string
})) as any
console.log(logs.toString('utf8'))
}
async function inspectContainer(container: Dockerode.Container) {
const data = await container.inspect()
return data
}
/** Show log messages to indicate emulators ready for interaction and correct port */
async function handleEmulatorReady() {
console.log(
boxen(
`
🦾 Emulator Up and Running!
${TAG_NAME}
Visit: http://localhost:4001 for dashboard
`,
{
borderColor: 'magenta',
title: 'OneArmy Docker',
titleAlignment: 'center',
},
),
)
}
function attachContainer(container: Dockerode.Container) {
container.attach(
{ stream: true, stdout: true, stderr: true },
(err, stream) => {
stream.pipe(process.stdout)
// HACK - determine when functions init complete via console logs
stream.on('data', (data) => {
const msg: string = data.toString()
if (msg.includes('Issues? Report them at')) {
handleEmulatorReady()
}
})
},
)
// Handle ^C
process.on('SIGINT', () => container.stop().then(() => process.exit(0)))
}
async function createNewContainer() {
// ensure functions dist exists for binding
const functionsDistIndex = resolve(PATHS.functionsDir, 'dist', 'index.js')
if (!existsSync(functionsDistIndex)) {
console.log('Waiting for functions to be built...')
await _wait(5000)
return createNewContainer()
}
const allImages = await docker.listImages()
const existingImage = allImages.find((c) =>
c.RepoTags.includes(`${IMAGE_NAME}`),
)
// pull remote image if required
if (!existingImage) {
await pullRemoteImage(IMAGE_NAME)
}
const { ExposedPorts, PortBindings } = rewritePortMapping()
return new Promise<Dockerode.Container>((resolve, reject) => {
docker.createContainer(
{
// Image: 'goatlab/firebase-emulator:latest',
Image: IMAGE_NAME,
name: CONTAINER_NAME,
Tty: true,
ExposedPorts,
// Cmd: ['/bin/sh'], // uncomment to override default command to allow debugging on container
HostConfig: {
AutoRemove: true, // assume best to fully remove after use and provide clean environment each run
PortBindings,
Binds: createVolumeBinds(),
},
},
function (err, container) {
if (err) {
reject(err)
}
resolve(container)
},
)
})
}
async function pullRemoteImage(imageName: string) {
const updates: any = {}
return new Promise((resolve, reject) => {
docker.pull(imageName, {}, (err, stream) => {
if (err) {
handleImagePullFail(imageName)
reject(err)
}
docker.modem.followProgress(
stream,
(finshedErr, finishedResult) => {
logUpdate.done()
if (finshedErr) reject(finshedErr)
resolve(finishedResult)
},
(progress) => {
// provide console log updates in a reasonable format
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { id, progressDetail, ...update } = progress
updates[id] = update
logUpdate(
...Object.values<any>(updates).map((entry) =>
JSON.stringify(entry, null, 2).replace(/[{}"]/g, ''),
),
)
},
)
})
})
}
function startContainer(container: Dockerode.Container) {
container.start((err) => {
if (err) {
console.error(err)
process.exit(1)
}
})
}
/**
* Create a map of local files to container volume
* Files from local functions dist replace those in container to support local testing updates
* However entire dist folder not copied as container dist also includes node_modules for the
* specific container os (may be different to local user)
*/
function createVolumeBinds() {
const volumeBinds: string[] = []
const functionsDist = resolve(PATHS.functionsDir, 'dist')
for (const name of readdirSync(functionsDist)) {
const localPath = resolve(functionsDist, name)
const containerPath = `/app/functions/dist/${name}`
volumeBinds.push(`${localPath}:${containerPath}`)
}
return volumeBinds
}
/**
* Dockerode has a somewhat verbose method of defining ports (cf. -p 3000:3000)
* Provide easier to syntax
* Original source: https://github.com/the-concierge/concierge/blob/master/src/api/images/run.ts#L58
* Linked issue: https://github.com/moby/moby/issues/23432#issuecomment-228842172
**/
function rewritePortMapping() {
const portMapping = getFirebasePortMapping()
const ExposedPorts = portMapping
.filter((port) => port.expose)
.reduce((prev, curr) => {
const key = `${curr.port}/${curr.type}`
prev[key] = {}
return prev
}, {} as any)
const PortBindings = portMapping
.filter((port) => port.expose)
.reduce((prev, curr) => {
const key = `${curr.port}/${curr.type}`
const hostCfg: any = {}
// If a hostPort is specified, pass the option through to Docker
if (curr.hostPort) {
hostCfg.HostPort = curr.hostPort
}
prev[key] = [hostCfg]
return prev
}, {} as any)
return { ExposedPorts, PortBindings }
}
function handleImagePullFail(imageName: string) {
const baseName = imageName.split(':')[0]
console.log(
boxen(
`🙁 Failed to pull image
${imageName}
See list of available images at:
https://hub.docker.com/r/${baseName}/tags `,
{
borderColor: 'red',
title: 'Error',
titleAlignment: 'center',
padding: 1,
},
),
)
}
/**
* Execute an arbitrary command from within the container
* NOTE - not currently used, previously included to trigger api endpoint after load.
* Retaining for future ref
* */
// eslint-disable-next-line @typescript-eslint/no-unused-vars
function execContainerCmd(container: Dockerode.Container, Cmd: string[]) {
return container.exec(
{
Cmd,
AttachStdin: true,
AttachStderr: true,
AttachStdout: true,
},
(err, exec) => {
exec.start({ hijack: true, stdin: true }, (err, stream) => {
docker.modem.demuxStream(stream, process.stdout, process.stderr)
})
},
)
}
/** Wait an aribitrary number of milliseconds before continuing */
async function _wait(ms: number) {
return new Promise((resolve) => {
setTimeout(resolve, ms)
})
}
start()

View File

@@ -1,12 +0,0 @@
{
"compilerOptions": {
"target": "es6",
"lib": ["es6"],
"types": ["node"],
"moduleResolution": "Node",
"esModuleInterop": true,
"resolveJsonModule": true,
"skipLibCheck": true
},
"include": ["src/**/*.ts"]
}

156
yarn.lock
View File

@@ -1920,13 +1920,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@balena/dockerignore@npm:^1.0.2":
version: 1.0.2
resolution: "@balena/dockerignore@npm:1.0.2"
checksum: 0d39f8fbcfd1a983a44bced54508471ab81aaaa40e2c62b46a9f97eac9d6b265790799f16919216db486331dedaacdde6ecbd6b7abe285d39bc50de111991699
languageName: node
linkType: hard
"@base2/pretty-print-object@npm:1.0.1": "@base2/pretty-print-object@npm:1.0.1":
version: 1.0.1 version: 1.0.1
resolution: "@base2/pretty-print-object@npm:1.0.1" resolution: "@base2/pretty-print-object@npm:1.0.1"
@@ -8398,27 +8391,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@types/docker-modem@npm:*":
version: 3.0.6
resolution: "@types/docker-modem@npm:3.0.6"
dependencies:
"@types/node": "*"
"@types/ssh2": "*"
checksum: cc58e8189f6ec5a2b8ca890207402178a97ddac8c80d125dc65d8ab29034b5db736de15e99b91b2d74e66d14e26e73b6b8b33216613dd15fd3aa6b82c11a83ed
languageName: node
linkType: hard
"@types/dockerode@npm:^3.3.8":
version: 3.3.29
resolution: "@types/dockerode@npm:3.3.29"
dependencies:
"@types/docker-modem": "*"
"@types/node": "*"
"@types/ssh2": "*"
checksum: e69dc6f3c70f7a4573e61ea697cb18b89f49198afeda713f8cd862ac0f0d4b6a36b308542933a743269e9936f61ca85809a55d0c5f2ad4933244135cd25643d9
languageName: node
linkType: hard
"@types/doctrine@npm:^0.0.3": "@types/doctrine@npm:^0.0.3":
version: 0.0.3 version: 0.0.3
resolution: "@types/doctrine@npm:0.0.3" resolution: "@types/doctrine@npm:0.0.3"
@@ -8568,15 +8540,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@types/fs-extra@npm:^9.0.13":
version: 9.0.13
resolution: "@types/fs-extra@npm:9.0.13"
dependencies:
"@types/node": "*"
checksum: add79e212acd5ac76b97b9045834e03a7996aef60a814185e0459088fd290519a3c1620865d588fa36c4498bf614210d2a703af5cf80aa1dbc125db78f6edac3
languageName: node
linkType: hard
"@types/geojson@npm:*": "@types/geojson@npm:*":
version: 7946.0.14 version: 7946.0.14
resolution: "@types/geojson@npm:7946.0.14" resolution: "@types/geojson@npm:7946.0.14"
@@ -8877,7 +8840,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@types/node@npm:^18.0.0, @types/node@npm:^18.11.18": "@types/node@npm:^18.0.0":
version: 18.19.33 version: 18.19.33
resolution: "@types/node@npm:18.19.33" resolution: "@types/node@npm:18.19.33"
dependencies: dependencies:
@@ -9202,15 +9165,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"@types/ssh2@npm:*":
version: 1.15.0
resolution: "@types/ssh2@npm:1.15.0"
dependencies:
"@types/node": ^18.11.18
checksum: d1c82b3fd1fee59d102fad44932c2f8bf6047506b9ca20856eed7484b1466a9901a9a3fbbfe41d7de71e8882b4cd5f634624773e69d63f0b8ab83a7a85731dce
languageName: node
linkType: hard
"@types/stack-utils@npm:^2.0.0": "@types/stack-utils@npm:^2.0.0":
version: 2.0.3 version: 2.0.3
resolution: "@types/stack-utils@npm:2.0.3" resolution: "@types/stack-utils@npm:2.0.3"
@@ -11092,7 +11046,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"asn1@npm:^0.2.6, asn1@npm:~0.2.3": "asn1@npm:~0.2.3":
version: 0.2.6 version: 0.2.6
resolution: "asn1@npm:0.2.6" resolution: "asn1@npm:0.2.6"
dependencies: dependencies:
@@ -11591,7 +11545,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"bcrypt-pbkdf@npm:^1.0.0, bcrypt-pbkdf@npm:^1.0.2": "bcrypt-pbkdf@npm:^1.0.0":
version: 1.0.2 version: 1.0.2
resolution: "bcrypt-pbkdf@npm:1.0.2" resolution: "bcrypt-pbkdf@npm:1.0.2"
dependencies: dependencies:
@@ -11713,7 +11667,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"boxen@npm:^5.0.0, boxen@npm:^5.1.2": "boxen@npm:^5.0.0":
version: 5.1.2 version: 5.1.2
resolution: "boxen@npm:5.1.2" resolution: "boxen@npm:5.1.2"
dependencies: dependencies:
@@ -11912,13 +11866,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"buildcheck@npm:~0.0.6":
version: 0.0.6
resolution: "buildcheck@npm:0.0.6"
checksum: ad61759dc98d62e931df2c9f54ccac7b522e600c6e13bdcfdc2c9a872a818648c87765ee209c850f022174da4dd7c6a450c00357c5391705d26b9c5807c2a076
languageName: node
linkType: hard
"builtin-modules@npm:^3.3.0": "builtin-modules@npm:^3.3.0":
version: 3.3.0 version: 3.3.0
resolution: "builtin-modules@npm:3.3.0" resolution: "builtin-modules@npm:3.3.0"
@@ -13423,17 +13370,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"cpu-features@npm:~0.0.9":
version: 0.0.10
resolution: "cpu-features@npm:0.0.10"
dependencies:
buildcheck: ~0.0.6
nan: ^2.19.0
node-gyp: latest
checksum: ab17e25cea0b642bdcfd163d3d872be4cc7d821e854d41048557799e990d672ee1cc7bd1d4e7c4de0309b1683d4c001d36ba8569b5035d1e7e2ff2d681f681d7
languageName: node
linkType: hard
"crc-32@npm:^1.2.0": "crc-32@npm:^1.2.0":
version: 1.2.2 version: 1.2.2
resolution: "crc-32@npm:1.2.2" resolution: "crc-32@npm:1.2.2"
@@ -14582,29 +14518,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"docker-modem@npm:^3.0.0":
version: 3.0.8
resolution: "docker-modem@npm:3.0.8"
dependencies:
debug: ^4.1.1
readable-stream: ^3.5.0
split-ca: ^1.0.1
ssh2: ^1.11.0
checksum: e3675c9b1ad800be8fb1cb9c5621fbef20a75bfedcd6e01b69808eadd7f0165681e4e30d1700897b788a67dbf4769964fcccd19c3d66f6d2499bb7aede6b34df
languageName: node
linkType: hard
"dockerode@npm:^3.3.5":
version: 3.3.5
resolution: "dockerode@npm:3.3.5"
dependencies:
"@balena/dockerignore": ^1.0.2
docker-modem: ^3.0.0
tar-fs: ~2.0.1
checksum: 7f6650422b07fa7ea9d5801f04b1a432634446b5fe37b995b8302b953b64e93abf1bb4596c2fb574ba47aafee685ef2ab959cc86c9654add5a26d09541bbbcc6
languageName: node
linkType: hard
"doctrine@npm:^2.1.0": "doctrine@npm:^2.1.0":
version: 2.1.0 version: 2.1.0
resolution: "doctrine@npm:2.1.0" resolution: "doctrine@npm:2.1.0"
@@ -17303,7 +17216,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"fs-extra@npm:^10.0.0, fs-extra@npm:^10.0.1, fs-extra@npm:^10.1.0": "fs-extra@npm:^10.0.0, fs-extra@npm:^10.1.0":
version: 10.1.0 version: 10.1.0
resolution: "fs-extra@npm:10.1.0" resolution: "fs-extra@npm:10.1.0"
dependencies: dependencies:
@@ -17409,7 +17322,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"functions@workspace:*, functions@workspace:functions": "functions@workspace:functions":
version: 0.0.0-use.local version: 0.0.0-use.local
resolution: "functions@workspace:functions" resolution: "functions@workspace:functions"
dependencies: dependencies:
@@ -23343,7 +23256,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"nan@npm:^2.18.0, nan@npm:^2.19.0": "nan@npm:^2.19.0":
version: 2.19.0 version: 2.19.0
resolution: "nan@npm:2.19.0" resolution: "nan@npm:2.19.0"
dependencies: dependencies:
@@ -23795,23 +23708,6 @@ __metadata:
languageName: unknown languageName: unknown
linkType: soft linkType: soft
"oa-emulators-docker@workspace:packages/emulators-docker":
version: 0.0.0-use.local
resolution: "oa-emulators-docker@workspace:packages/emulators-docker"
dependencies:
"@types/dockerode": ^3.3.8
"@types/fs-extra": ^9.0.13
boxen: ^5.1.2
dockerode: ^3.3.5
fs-extra: ^10.0.1
functions: "workspace:*"
globby: ^11.0.2
log-update: ^4.0.0
ts-node: ^10.7.0
typescript: ^5.1.6
languageName: unknown
linkType: soft
"oa-scripts@workspace:scripts": "oa-scripts@workspace:scripts":
version: 0.0.0-use.local version: 0.0.0-use.local
resolution: "oa-scripts@workspace:scripts" resolution: "oa-scripts@workspace:scripts"
@@ -28444,13 +28340,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"split-ca@npm:^1.0.1":
version: 1.0.1
resolution: "split-ca@npm:1.0.1"
checksum: 1e7409938a95ee843fe2593156a5735e6ee63772748ee448ea8477a5a3e3abde193c3325b3696e56a5aff07c7dcf6b1f6a2f2a036895b4f3afe96abb366d893f
languageName: node
linkType: hard
"split-string@npm:^3.0.1, split-string@npm:^3.0.2": "split-string@npm:^3.0.1, split-string@npm:^3.0.2":
version: 3.1.0 version: 3.1.0
resolution: "split-string@npm:3.1.0" resolution: "split-string@npm:3.1.0"
@@ -28490,23 +28379,6 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"ssh2@npm:^1.11.0":
version: 1.15.0
resolution: "ssh2@npm:1.15.0"
dependencies:
asn1: ^0.2.6
bcrypt-pbkdf: ^1.0.2
cpu-features: ~0.0.9
nan: ^2.18.0
dependenciesMeta:
cpu-features:
optional: true
nan:
optional: true
checksum: 56baa07dc0dd8d97aefa05033b8a95d220a34b2f203aa9116173d7adc5e9fd46be22d7cfed99cdd9f5548862ae44abd1ec136e20ea856d5c470a0df0e5aea9d1
languageName: node
linkType: hard
"sshpk@npm:^1.14.1, sshpk@npm:^1.7.0": "sshpk@npm:^1.14.1, sshpk@npm:^1.7.0":
version: 1.18.0 version: 1.18.0
resolution: "sshpk@npm:1.18.0" resolution: "sshpk@npm:1.18.0"
@@ -29258,19 +29130,7 @@ __metadata:
languageName: node languageName: node
linkType: hard linkType: hard
"tar-fs@npm:~2.0.1": "tar-stream@npm:^2.1.4, tar-stream@npm:^2.2.0":
version: 2.0.1
resolution: "tar-fs@npm:2.0.1"
dependencies:
chownr: ^1.1.1
mkdirp-classic: ^0.5.2
pump: ^3.0.0
tar-stream: ^2.0.0
checksum: 26cd297ed2421bc8038ce1a4ca442296b53739f409847d495d46086e5713d8db27f2c03ba2f461d0f5ddbc790045628188a8544f8ae32cbb6238b279b68d0247
languageName: node
linkType: hard
"tar-stream@npm:^2.0.0, tar-stream@npm:^2.1.4, tar-stream@npm:^2.2.0":
version: 2.2.0 version: 2.2.0
resolution: "tar-stream@npm:2.2.0" resolution: "tar-stream@npm:2.2.0"
dependencies: dependencies: