diff --git a/node_modules/.bin/color-support b/node_modules/.bin/color-support deleted file mode 120000 index 59e65069..00000000 --- a/node_modules/.bin/color-support +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../color-support/bin.js" "$@" -else - exec node "$basedir/../color-support/bin.js" "$@" -fi diff --git a/node_modules/.bin/node-pre-gyp b/node_modules/.bin/node-pre-gyp deleted file mode 120000 index 004c3be1..00000000 --- a/node_modules/.bin/node-pre-gyp +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" "$@" -else - exec node "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" "$@" -fi diff --git a/node_modules/.bin/nopt b/node_modules/.bin/nopt deleted file mode 120000 index f1ec43bc..00000000 --- a/node_modules/.bin/nopt +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../nopt/bin/nopt.js" "$@" -else - exec node "$basedir/../nopt/bin/nopt.js" "$@" -fi diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index b11c890d..5f8d43da 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -84,6 +84,31 @@ "fast-uri": "^2.0.0" } }, + "node_modules/@adminjs/fastify/node_modules/@fastify/formbody": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@fastify/formbody/-/formbody-7.4.0.tgz", + "integrity": "sha512-H3C6h1GN56/SMrZS8N2vCT2cZr7mIHzBHzOBa5OPpjfB/D6FzP9mMpE02ZzrFX0ANeh0BAJdoXKOF2e7IbV+Og==", + "dependencies": { + "fast-querystring": "^1.0.0", + "fastify-plugin": "^4.0.0" + } + }, + "node_modules/@adminjs/fastify/node_modules/@fastify/multipart": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-7.7.3.tgz", + "integrity": "sha512-MG4Gd9FNEXc8qx0OgqoXM10EGO/dN/0iVQ8SrpFMU3d6F6KUfcqD2ZyoQhkm9LWrbiMgdHv5a43x78lASdn5GA==", + "dependencies": { + "@fastify/busboy": "^1.0.0", + "@fastify/deepmerge": "^1.0.0", + "@fastify/error": "^3.0.0", + "@fastify/swagger": "^8.3.1", + "@fastify/swagger-ui": "^1.8.0", + "end-of-stream": "^1.4.4", + "fastify-plugin": "^4.0.0", + "secure-json-parse": "^2.4.0", + "stream-wormhole": "^1.1.0" + } + }, "node_modules/@adminjs/fastify/node_modules/avvio": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/avvio/-/avvio-8.2.0.tgz", @@ -150,6 +175,17 @@ "node": ">=10" } }, + "node_modules/@adminjs/fastify/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/@adminjs/fastify/node_modules/pino": { "version": "8.8.0", "resolved": "https://registry.npmjs.org/pino/-/pino-8.8.0.tgz", @@ -2170,6 +2206,14 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.47.tgz", "integrity": "sha512-OuJi8bIng4wYHHA3YpKauL58dZrPxro3d0tabPHyiNF8rKfGKuVfr83oFlPLmKri1cX+Z3cJP39GXmnqkP11Gw==" }, + "node_modules/@fastify/accept-negotiator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@fastify/accept-negotiator/-/accept-negotiator-1.1.0.tgz", + "integrity": "sha512-OIHZrb2ImZ7XG85HXOONLcJWGosv7sIvM2ifAPQVhg9Lv7qdmMBNVaai4QTdyuaqbKM5eO6sLSQOYI7wEQeCJQ==", + "engines": { + "node": ">=14" + } + }, "node_modules/@fastify/ajv-compiler": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@fastify/ajv-compiler/-/ajv-compiler-1.1.0.tgz", @@ -2264,14 +2308,29 @@ } }, "node_modules/@fastify/formbody": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@fastify/formbody/-/formbody-7.4.0.tgz", - "integrity": "sha512-H3C6h1GN56/SMrZS8N2vCT2cZr7mIHzBHzOBa5OPpjfB/D6FzP9mMpE02ZzrFX0ANeh0BAJdoXKOF2e7IbV+Og==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@fastify/formbody/-/formbody-8.0.2.tgz", + "integrity": "sha512-84v5J2KrkXzjgBpYnaNRPqwgMsmY7ZDjuj0YVuMR3NXCJRCgKEZy/taSP1wUYGn0onfxJpLyRGDLa+NMaDJtnA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], "dependencies": { - "fast-querystring": "^1.0.0", - "fastify-plugin": "^4.0.0" + "fast-querystring": "^1.1.2", + "fastify-plugin": "^5.0.0" } }, + "node_modules/@fastify/formbody/node_modules/fastify-plugin": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-5.0.1.tgz", + "integrity": "sha512-HCxs+YnRaWzCl+cWRYFnHmeRFyR5GVnJTAaCJQiYzQSDwK9MgJdyAsuL3nh0EWRCYMgQ5MeziymvmAhUHYHDUQ==" + }, "node_modules/@fastify/jwt": { "version": "6.5.0", "resolved": "https://registry.npmjs.org/@fastify/jwt/-/jwt-6.5.0.tgz", @@ -2285,18 +2344,103 @@ } }, "node_modules/@fastify/multipart": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-7.3.0.tgz", - "integrity": "sha512-tbzQiRFxoADCn0G10CqiQ/nDWWcfegtwg826Pfz2h7+XvuqJhGnko0TbafrWIY7hnGD+sNCGMdiTVsxxs6zigA==", + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-9.0.3.tgz", + "integrity": "sha512-pJogxQCrT12/6I5Fh6jr3narwcymA0pv4B0jbC7c6Bl9wnrxomEUnV0d26w6gUls7gSXmhG8JGRMmHFIPsxt1g==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], "dependencies": { - "@fastify/busboy": "^1.0.0", - "@fastify/deepmerge": "^1.0.0", - "@fastify/error": "^3.0.0", - "end-of-stream": "^1.4.4", - "fastify-plugin": "^4.0.0", - "hexoid": "^1.0.0", - "secure-json-parse": "^2.4.0", - "stream-wormhole": "^1.1.0" + "@fastify/busboy": "^3.0.0", + "@fastify/deepmerge": "^2.0.0", + "@fastify/error": "^4.0.0", + "fastify-plugin": "^5.0.0", + "secure-json-parse": "^3.0.0" + } + }, + "node_modules/@fastify/multipart/node_modules/@fastify/busboy": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.2.0.tgz", + "integrity": "sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==" + }, + "node_modules/@fastify/multipart/node_modules/@fastify/deepmerge": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@fastify/deepmerge/-/deepmerge-2.0.2.tgz", + "integrity": "sha512-3wuLdX5iiiYeZWP6bQrjqhrcvBIf0NHbQH1Ur1WbHvoiuTYUEItgygea3zs8aHpiitn0lOB8gX20u1qO+FDm7Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/@fastify/multipart/node_modules/@fastify/error": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.2.0.tgz", + "integrity": "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/@fastify/multipart/node_modules/fastify-plugin": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-5.0.1.tgz", + "integrity": "sha512-HCxs+YnRaWzCl+cWRYFnHmeRFyR5GVnJTAaCJQiYzQSDwK9MgJdyAsuL3nh0EWRCYMgQ5MeziymvmAhUHYHDUQ==" + }, + "node_modules/@fastify/multipart/node_modules/secure-json-parse": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-3.0.2.tgz", + "integrity": "sha512-H6nS2o8bWfpFEV6U38sOSjS7bTbdgbCGU9wEM6W14P5H0QOsz94KCusifV44GpHDTu2nqZbuDNhTzu+mjDSw1w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/@fastify/send": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@fastify/send/-/send-2.1.0.tgz", + "integrity": "sha512-yNYiY6sDkexoJR0D8IDy3aRP3+L4wdqCpvx5WP+VtEU58sn7USmKynBzDQex5X42Zzvw2gNzzYgP90UfWShLFA==", + "dependencies": { + "@lukeed/ms": "^2.0.1", + "escape-html": "~1.0.3", + "fast-decode-uri-component": "^1.0.1", + "http-errors": "2.0.0", + "mime": "^3.0.0" + } + }, + "node_modules/@fastify/send/node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" } }, "node_modules/@fastify/session": { @@ -2315,6 +2459,129 @@ "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-3.0.1.tgz", "integrity": "sha512-qKcDXmuZadJqdTm6vlCqioEbyewF60b/0LOFCcYN1B6BIZGlYJumWWOYs70SFYLDAH4YqdE1cxH/RKMG7rFxgA==" }, + "node_modules/@fastify/static": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@fastify/static/-/static-6.12.0.tgz", + "integrity": "sha512-KK1B84E6QD/FcQWxDI2aiUCwHxMJBI1KeCUzm1BwYpPY1b742+jeKruGHP2uOluuM6OkBPI8CIANrXcCRtC2oQ==", + "dependencies": { + "@fastify/accept-negotiator": "^1.0.0", + "@fastify/send": "^2.0.0", + "content-disposition": "^0.5.3", + "fastify-plugin": "^4.0.0", + "glob": "^8.0.1", + "p-limit": "^3.1.0" + } + }, + "node_modules/@fastify/static/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@fastify/static/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@fastify/static/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@fastify/swagger": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/@fastify/swagger/-/swagger-8.15.0.tgz", + "integrity": "sha512-zy+HEEKFqPMS2sFUsQU5X0MHplhKJvWeohBwTCkBAJA/GDYGLGUWQaETEhptiqxK7Hs0fQB9B4MDb3pbwIiCwA==", + "dependencies": { + "fastify-plugin": "^4.0.0", + "json-schema-resolver": "^2.0.0", + "openapi-types": "^12.0.0", + "rfdc": "^1.3.0", + "yaml": "^2.2.2" + } + }, + "node_modules/@fastify/swagger-ui": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/@fastify/swagger-ui/-/swagger-ui-1.10.2.tgz", + "integrity": "sha512-f2mRqtblm6eRAFQ3e8zSngxVNEtiYY7rISKQVjPA++ZsWc5WYlPVTb6Bx0G/zy0BIoucNqDr/Q2Vb/kTYkOq1A==", + "dependencies": { + "@fastify/static": "^6.0.0", + "fastify-plugin": "^4.0.0", + "openapi-types": "^12.0.2", + "rfdc": "^1.3.0", + "yaml": "^2.2.2" + } + }, + "node_modules/@fastify/swagger-ui/node_modules/openapi-types": { + "version": "12.1.3", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", + "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==" + }, + "node_modules/@fastify/swagger-ui/node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, + "node_modules/@fastify/swagger/node_modules/json-schema-resolver": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/json-schema-resolver/-/json-schema-resolver-2.0.0.tgz", + "integrity": "sha512-pJ4XLQP4Q9HTxl6RVDLJ8Cyh1uitSs0CzDBAz1uoJ4sRD/Bk7cFSXL1FUXDW3zJ7YnfliJx6eu8Jn283bpZ4Yg==", + "dependencies": { + "debug": "^4.1.1", + "rfdc": "^1.1.4", + "uri-js": "^4.2.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/Eomm/json-schema-resolver?sponsor=1" + } + }, + "node_modules/@fastify/swagger/node_modules/openapi-types": { + "version": "12.1.3", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", + "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==" + }, + "node_modules/@fastify/swagger/node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, "node_modules/@fastify/view": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/@fastify/view/-/view-7.3.0.tgz", @@ -2325,79 +2592,94 @@ } }, "node_modules/@firebase/app-check-interop-types": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@firebase/app-check-interop-types/-/app-check-interop-types-0.3.2.tgz", - "integrity": "sha512-LMs47Vinv2HBMZi49C09dJxp0QT5LwDzFaVGf/+ITHe3BlIhUiLNttkATSXplc89A2lAaeTqjgqVkiRfUGyQiQ==" + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@firebase/app-check-interop-types/-/app-check-interop-types-0.3.3.tgz", + "integrity": "sha512-gAlxfPLT2j8bTI/qfe3ahl2I2YcBQ8cFIBdhAQA4I2f3TndcO+22YizyGYuttLHPQEpWkhmpFW60VCFEPg4g5A==" }, "node_modules/@firebase/app-types": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/@firebase/app-types/-/app-types-0.9.2.tgz", - "integrity": "sha512-oMEZ1TDlBz479lmABwWsWjzHwheQKiAgnuKxE0pz0IXCVx7/rtlkx1fQ6GfgK24WCrxDKMplZrT50Kh04iMbXQ==" + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/@firebase/app-types/-/app-types-0.9.3.tgz", + "integrity": "sha512-kRVpIl4vVGJ4baogMDINbyrIOtOxqhkZQg4jTq3l8Lw6WSk0xfpEYzezFu+Kl4ve4fbPl79dvwRtaFqAC/ucCw==" }, "node_modules/@firebase/auth-interop-types": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@firebase/auth-interop-types/-/auth-interop-types-0.2.3.tgz", - "integrity": "sha512-Fc9wuJGgxoxQeavybiuwgyi+0rssr76b+nHpj+eGhXFYAdudMWyfBHvFL/I5fEHniUM/UQdFzi9VXJK2iZF7FQ==" + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@firebase/auth-interop-types/-/auth-interop-types-0.2.4.tgz", + "integrity": "sha512-JPgcXKCuO+CWqGDnigBtvo09HeBs5u/Ktc2GaFj2m01hLarbxthLNm7Fk8iOP1aqAtXV+fnnGj7U28xmk7IwVA==" }, "node_modules/@firebase/component": { - "version": "0.6.8", - "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.6.8.tgz", - "integrity": "sha512-LcNvxGLLGjBwB0dJUsBGCej2fqAepWyBubs4jt1Tiuns7QLbXHuyObZ4aMeBjZjWx4m8g1LoVI9QFpSaq/k4/g==", + "version": "0.6.11", + "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.6.11.tgz", + "integrity": "sha512-eQbeCgPukLgsKD0Kw5wQgsMDX5LeoI1MIrziNDjmc6XDq5ZQnuUymANQgAb2wp1tSF9zDSXyxJmIUXaKgN58Ug==", "dependencies": { - "@firebase/util": "1.9.7", + "@firebase/util": "1.10.2", "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@firebase/database": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@firebase/database/-/database-1.0.7.tgz", - "integrity": "sha512-wjXr5AO8RPxVVg7rRCYffT7FMtBjHRfJ9KMwi19MbOf0vBf0H9YqW3WCgcnLpXI6ehiUcU3z3qgPnnU0nK6SnA==", - "dependencies": { - "@firebase/app-check-interop-types": "0.3.2", - "@firebase/auth-interop-types": "0.2.3", - "@firebase/component": "0.6.8", - "@firebase/logger": "0.4.2", - "@firebase/util": "1.9.7", + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/@firebase/database/-/database-1.0.10.tgz", + "integrity": "sha512-sWp2g92u7xT4BojGbTXZ80iaSIaL6GAL0pwvM0CO/hb0nHSnABAqsH7AhnWGsGvXuEvbPr7blZylPaR9J+GSuQ==", + "dependencies": { + "@firebase/app-check-interop-types": "0.3.3", + "@firebase/auth-interop-types": "0.2.4", + "@firebase/component": "0.6.11", + "@firebase/logger": "0.4.4", + "@firebase/util": "1.10.2", "faye-websocket": "0.11.4", "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@firebase/database-compat": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-1.0.7.tgz", - "integrity": "sha512-R/3B+VVzEFN5YcHmfWns3eitA8fHLTL03io+FIoMcTYkajFnrBdS3A+g/KceN9omP7FYYYGTQWF9lvbEx6eMEg==", - "dependencies": { - "@firebase/component": "0.6.8", - "@firebase/database": "1.0.7", - "@firebase/database-types": "1.0.4", - "@firebase/logger": "0.4.2", - "@firebase/util": "1.9.7", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-2.0.1.tgz", + "integrity": "sha512-IsFivOjdE1GrjTeKoBU/ZMenESKDXidFDzZzHBPQ/4P20ptGdrl3oLlWrV/QJqJ9lND4IidE3z4Xr5JyfUW1vg==", + "dependencies": { + "@firebase/component": "0.6.11", + "@firebase/database": "1.0.10", + "@firebase/database-types": "1.0.7", + "@firebase/logger": "0.4.4", + "@firebase/util": "1.10.2", "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@firebase/database-types": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-1.0.4.tgz", - "integrity": "sha512-mz9ZzbH6euFXbcBo+enuJ36I5dR5w+enJHHjy9Y5ThCdKUseqfDjW3vCp1YxE9zygFCSjJJ/z1cQ+zodvUcwPQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-1.0.7.tgz", + "integrity": "sha512-I7zcLfJXrM0WM+ksFmFdAMdlq/DFmpeMNa+/GNsLyFo5u/lX5zzkPzGe3srVWqaBQBY5KprylDGxOsP6ETfL0A==", "dependencies": { - "@firebase/app-types": "0.9.2", - "@firebase/util": "1.9.7" + "@firebase/app-types": "0.9.3", + "@firebase/util": "1.10.2" } }, "node_modules/@firebase/logger": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.4.2.tgz", - "integrity": "sha512-Q1VuA5M1Gjqrwom6I6NUU4lQXdo9IAQieXlujeHZWvRt1b7qQ0KwBaNAjgxG27jgF9/mUwsNmO8ptBCGVYhB0A==", + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.4.4.tgz", + "integrity": "sha512-mH0PEh1zoXGnaR8gD1DeGeNZtWFKbnz9hDO91dIml3iou1gpOnLqXQ2dJfB71dj6dpmUjcQ6phY3ZZJbjErr9g==", "dependencies": { "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@firebase/util": { - "version": "1.9.7", - "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.9.7.tgz", - "integrity": "sha512-fBVNH/8bRbYjqlbIhZ+lBtdAAS4WqZumx03K06/u7fJSpz1TGjEMm1ImvKD47w+xaFKIP2ori6z8BrbakRfjJA==", + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.10.2.tgz", + "integrity": "sha512-qnSHIoE9FK+HYnNhTI8q14evyqbc/vHRivfB4TgCIUOl4tosmKSQlp7ltymOlMP4xVIJTg5wrkfcZ60X4nUf7Q==", "dependencies": { "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@floating-ui/core": { @@ -2414,11 +2696,12 @@ } }, "node_modules/@google-cloud/firestore": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.9.0.tgz", - "integrity": "sha512-c4ALHT3G08rV7Zwv8Z2KG63gZh66iKdhCBeDfCpIkLrjX6EAjTD/szMdj14M+FnQuClZLFfW5bAgoOjfNmLtJg==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.0.tgz", + "integrity": "sha512-88uZ+jLsp1aVMj7gh3EKYH1aulTAMFAp8sH/v5a9w8q8iqSG27RiWLoxSAFr/XocZ9hGiWH1kEnBw+zl3xAgNA==", "optional": true, "dependencies": { + "@opentelemetry/api": "^1.3.0", "fast-deep-equal": "^3.1.1", "functional-red-black-tree": "^1.0.1", "google-gax": "^4.3.3", @@ -2494,10 +2777,21 @@ "node": ">=10.0.0" } }, + "node_modules/@google-cloud/storage/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/@grpc/grpc-js": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.11.1.tgz", - "integrity": "sha512-gyt/WayZrVPH2w/UTLansS7F9Nwld472JxxaETamrM8HNlsa+jSLNyKAZmhxI2Me4c3mQHFiS1wWHDY1g1Kthw==", + "version": "1.12.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz", + "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==", "optional": true, "dependencies": { "@grpc/proto-loader": "^0.7.13", @@ -2638,56 +2932,13 @@ "node": ">=8" } }, - "node_modules/@mapbox/node-pre-gyp": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", - "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", - "dependencies": { - "detect-libc": "^2.0.0", - "https-proxy-agent": "^5.0.0", - "make-dir": "^3.1.0", - "node-fetch": "^2.6.7", - "nopt": "^5.0.0", - "npmlog": "^5.0.1", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.11" - }, - "bin": { - "node-pre-gyp": "bin/node-pre-gyp" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", - "bin": { - "semver": "bin/semver.js" - }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "optional": true, "engines": { - "node": ">=10" + "node": ">=8.0.0" } }, "node_modules/@popperjs/core": { @@ -3599,11 +3850,11 @@ } }, "node_modules/@types/node": { - "version": "22.5.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.0.tgz", - "integrity": "sha512-DkFrJOe+rfdHTqqMg0bSNlGlQ85hSoh2TPzZyhHsXnMtligRWpxUySiyw8FY14ITt24HVCiQPWxS3KO/QlGmWg==", + "version": "22.10.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.6.tgz", + "integrity": "sha512-qNiuwC4ZDAUNcY47xgaSuS92cjf8JbSUoaKS77bmLG1rU7MlATVSiw/IlrjtIyyskXBZ8KkNfjK/P5na7rgXbQ==", "dependencies": { - "undici-types": "~6.19.2" + "undici-types": "~6.20.0" } }, "node_modules/@types/parse-json": { @@ -3860,6 +4111,17 @@ "node": ">= 6" } }, + "node_modules/adminjs/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", @@ -3946,11 +4208,6 @@ "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz", "integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==" }, - "node_modules/aproba": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" - }, "node_modules/archiver": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/archiver/-/archiver-5.3.1.tgz", @@ -4025,19 +4282,6 @@ "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==" }, - "node_modules/are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "deprecated": "This package is no longer supported.", - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -4170,6 +4414,17 @@ "node": ">= 6" } }, + "node_modules/axios/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/babel-plugin-macros": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz", @@ -4263,19 +4518,6 @@ } ] }, - "node_modules/bcrypt": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz", - "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==", - "hasInstallScript": true, - "dependencies": { - "@mapbox/node-pre-gyp": "^1.0.11", - "node-addon-api": "^5.0.0" - }, - "engines": { - "node": ">= 10.0.0" - } - }, "node_modules/bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", @@ -4284,6 +4526,14 @@ "tweetnacl": "^0.14.3" } }, + "node_modules/bcryptjs": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-3.0.2.tgz", + "integrity": "sha512-k38b3XOZKv60C4E2hVsXTolJWfkGRMbILBIe2IBITXciy5bOsTKot5kDrf3ZfufQtQOUN5mXceUEpU1rTl9Uog==", + "bin": { + "bcrypt": "bin/bcrypt" + } + }, "node_modules/big-integer": { "version": "1.6.51", "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", @@ -4665,14 +4915,6 @@ "fsevents": "~2.3.2" } }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "engines": { - "node": ">=10" - } - }, "node_modules/classnames": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", @@ -4803,14 +5045,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "bin": { - "color-support": "bin.js" - } - }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -4884,11 +5118,6 @@ "typedarray": "^0.0.6" } }, - "node_modules/console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" - }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -5218,11 +5447,6 @@ "node": ">=0.4.0" } }, - "node_modules/delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" - }, "node_modules/denque": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", @@ -5253,14 +5477,6 @@ "npm": "1.2.8000 || >= 1.4.16" } }, - "node_modules/detect-libc": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", - "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", - "engines": { - "node": ">=8" - } - }, "node_modules/dezalgo": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", @@ -5390,7 +5606,8 @@ "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "optional": true }, "node_modules/encodeurl": { "version": "1.0.2", @@ -5702,9 +5919,9 @@ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" }, "node_modules/fast-querystring": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.0.0.tgz", - "integrity": "sha512-3LQi62IhQoDlmt4ULCYmh17vRO2EtS7hTSsG4WwoKWgV7GLMKBOecEh+aiavASnLx8I2y89OD33AGLo0ccRhzA==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.1.2.tgz", + "integrity": "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg==", "dependencies": { "fast-decode-uri-component": "^1.0.1" } @@ -5745,9 +5962,9 @@ "integrity": "sha512-cIusKBIt/R/oI6z/1nyfe2FvGKVTohVRfvkOhvx0nCEW+xf5NoCXjAHcWp93uOUBchzYcsvPlrapAdX1uW+YGg==" }, "node_modules/fast-xml-parser": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", - "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.1.tgz", + "integrity": "sha512-y655CeyUQ+jj7KBbYMc4FG01V8ZQqjN+gDYGJ50RtfsUB8iG9AmwmwoAgeKLJdmueKKMrH1RJ7yXHTSoczdv5w==", "funding": [ { "type": "github", @@ -6038,6 +6255,7 @@ "version": "5.4.0", "resolved": "https://registry.npmjs.org/fastify-multipart/-/fastify-multipart-5.4.0.tgz", "integrity": "sha512-Pafy4mtcuFUnFM/t0kgCdL854KIEoDymNVdv4nD7uBfV7lBCQq/NVEuNnaNXAbuCTpeXzYRzi50lSDa9ZM838A==", + "deprecated": "Please use @fastify/multipart@6.0.0 instead", "dependencies": { "fastify-multipart-deprecated": "npm:fastify-multipart@5.3.1", "process-warning": "^1.0.0" @@ -6277,26 +6495,27 @@ } }, "node_modules/firebase-admin": { - "version": "12.3.1", - "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-12.3.1.tgz", - "integrity": "sha512-vEr3s3esl8nPIA9r/feDT4nzIXCfov1CyyCSpMQWp6x63Q104qke0MEGZlrHUZVROtl8FLus6niP/M9I1s4VBA==", + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-13.0.2.tgz", + "integrity": "sha512-YWVpoN+tZVSRXF0qC0gojoF5bSqvBRbnBk8+xUtFiguM2L4vB7f0moAwV1VVWDDHvTnvQ68OyTMpdp6wKo/clw==", "dependencies": { "@fastify/busboy": "^3.0.0", - "@firebase/database-compat": "^1.0.2", - "@firebase/database-types": "^1.0.0", - "@types/node": "^22.0.1", + "@firebase/database-compat": "^2.0.0", + "@firebase/database-types": "^1.0.6", + "@types/node": "^22.8.7", "farmhash-modern": "^1.1.0", + "google-auth-library": "^9.14.2", "jsonwebtoken": "^9.0.0", "jwks-rsa": "^3.1.0", "node-forge": "^1.3.1", - "uuid": "^10.0.0" + "uuid": "^11.0.2" }, "engines": { - "node": ">=14" + "node": ">=18" }, "optionalDependencies": { - "@google-cloud/firestore": "^7.7.0", - "@google-cloud/storage": "^7.7.0" + "@google-cloud/firestore": "^7.11.0", + "@google-cloud/storage": "^7.14.0" } }, "node_modules/firebase-admin/node_modules/@fastify/busboy": { @@ -6336,9 +6555,9 @@ } }, "node_modules/firebase-admin/node_modules/@google-cloud/storage": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.12.1.tgz", - "integrity": "sha512-Z3ZzOnF3YKLuvpkvF+TjQ6lztxcAyTILp+FjKonmVpEwPa9vFvxpZjubLR4sB6bf19i/8HL2AXRjA0YFgHFRmQ==", + "version": "7.15.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.15.0.tgz", + "integrity": "sha512-/j/+8DFuEOo33fbdX0V5wjooOoFahEaMEdImHBmM2tH9MPHJYNtmXOf2sGUmZmiufSukmBEvdlzYgDkkgeBiVQ==", "optional": true, "dependencies": { "@google-cloud/paginator": "^5.0.0", @@ -6383,7 +6602,6 @@ "version": "6.7.1", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", - "optional": true, "dependencies": { "extend": "^3.0.2", "https-proxy-agent": "^7.0.1", @@ -6396,24 +6614,19 @@ } }, "node_modules/firebase-admin/node_modules/gaxios/node_modules/agent-base": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", - "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", - "optional": true, - "dependencies": { - "debug": "^4.3.4" - }, + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", "engines": { "node": ">= 14" } }, "node_modules/firebase-admin/node_modules/gaxios/node_modules/https-proxy-agent": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", - "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", - "optional": true, + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "dependencies": { - "agent-base": "^7.0.2", + "agent-base": "^7.1.2", "debug": "4" }, "engines": { @@ -6428,7 +6641,6 @@ "https://github.com/sponsors/broofa", "https://github.com/sponsors/ctavan" ], - "optional": true, "bin": { "uuid": "dist/bin/uuid" } @@ -6437,7 +6649,6 @@ "version": "6.1.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", - "optional": true, "dependencies": { "gaxios": "^6.0.0", "json-bigint": "^1.0.0" @@ -6447,10 +6658,9 @@ } }, "node_modules/firebase-admin/node_modules/google-auth-library": { - "version": "9.14.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.14.0.tgz", - "integrity": "sha512-Y/eq+RWVs55Io/anIsm24sDS8X79Tq948zVLGaa7+KlJYYqaGwp1YI37w48nzrNi12RgnzMrQD4NzdmCowT90g==", - "optional": true, + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", "dependencies": { "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", @@ -6467,7 +6677,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", - "optional": true, "dependencies": { "jwa": "^2.0.0", "safe-buffer": "^5.0.1" @@ -6477,7 +6686,6 @@ "version": "7.1.0", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", - "optional": true, "dependencies": { "gaxios": "^6.0.0", "jws": "^4.0.0" @@ -6490,7 +6698,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", - "optional": true, "dependencies": { "jwa": "^2.0.0", "safe-buffer": "^5.0.1" @@ -6535,7 +6742,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", - "optional": true, "dependencies": { "buffer-equal-constant-time": "1.0.1", "ecdsa-sig-formatter": "1.0.11", @@ -6609,15 +6815,15 @@ } }, "node_modules/firebase-admin/node_modules/uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.0.5.tgz", + "integrity": "sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA==", "funding": [ "https://github.com/sponsors/broofa", "https://github.com/sponsors/ctavan" ], "bin": { - "uuid": "dist/bin/uuid" + "uuid": "dist/esm/bin/uuid" } }, "node_modules/flat": { @@ -6676,6 +6882,17 @@ "node": ">= 0.12" } }, + "node_modules/form-data/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/formidable": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.1.tgz", @@ -6758,33 +6975,6 @@ "node": ">=6 <7 || >=8" } }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fs-minipass/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -6870,26 +7060,6 @@ "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", "optional": true }, - "node_modules/gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "deprecated": "This package is no longer supported.", - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/gaxios": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.0.tgz", @@ -7084,9 +7254,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/google-gax": { - "version": "4.3.9", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.3.9.tgz", - "integrity": "sha512-tcjQr7sXVGMdlvcG25wSv98ap1dtF4Z6mcV0rztGIddOcezw4YMb/uTXg72JPrLep+kXcVjaJjg6oo3KLf4itQ==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz", + "integrity": "sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==", "optional": true, "dependencies": { "@grpc/grpc-js": "^1.10.9", @@ -7116,13 +7286,10 @@ } }, "node_modules/google-gax/node_modules/agent-base": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", - "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", "optional": true, - "dependencies": { - "debug": "^4.3.4" - }, "engines": { "node": ">= 14" } @@ -7157,9 +7324,9 @@ } }, "node_modules/google-gax/node_modules/google-auth-library": { - "version": "9.14.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.14.0.tgz", - "integrity": "sha512-Y/eq+RWVs55Io/anIsm24sDS8X79Tq948zVLGaa7+KlJYYqaGwp1YI37w48nzrNi12RgnzMrQD4NzdmCowT90g==", + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", "optional": true, "dependencies": { "base64-js": "^1.3.0", @@ -7213,12 +7380,12 @@ } }, "node_modules/google-gax/node_modules/https-proxy-agent": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", - "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "optional": true, "dependencies": { - "agent-base": "^7.0.2", + "agent-base": "^7.1.2", "debug": "4" }, "engines": { @@ -7468,11 +7635,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" - }, "node_modules/hashlru": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/hashlru/-/hashlru-2.3.0.tgz", @@ -7586,9 +7748,9 @@ } }, "node_modules/http-parser-js": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz", - "integrity": "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==" + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.9.tgz", + "integrity": "sha512-n1XsPy3rXVxlqxVioEWdC+0+M+SQw0DpJynwtOPo1X+ZlvdzTLtDBIJJlDQTnwZIFJrZSzSGmIOUdP8tu+SgLw==" }, "node_modules/http-proxy-agent": { "version": "4.0.1", @@ -7838,6 +8000,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "optional": true, "engines": { "node": ">=8" } @@ -8465,9 +8628,9 @@ } }, "node_modules/long": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", - "integrity": "sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==", + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", + "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==", "optional": true }, "node_modules/long-timeout": { @@ -8617,16 +8780,24 @@ } }, "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", "dependencies": { - "mime-db": "1.52.0" + "mime-db": "^1.54.0" }, "engines": { "node": ">= 0.6" } }, + "node_modules/mime-types/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", @@ -8659,42 +8830,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "node_modules/mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", @@ -9083,9 +9218,10 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/multer": { - "version": "1.4.5-lts.1", - "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.1.tgz", - "integrity": "sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==", + "version": "1.4.5-lts.2", + "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.2.tgz", + "integrity": "sha512-VzGiVigcG9zUAoCNU+xShztrlr1auZOlurXynNvO9GiWD1/mTBbUljOKY+qMeazBqXgRnjzeEgJI/wyjJUHg9A==", + "deprecated": "Multer 1.x is impacted by a number of vulnerabilities, which have been patched in 2.x. You should upgrade to the latest 2.x version.", "dependencies": { "append-field": "^1.0.0", "busboy": "^1.0.0", @@ -9227,15 +9363,10 @@ "node": ">= 0.4.0" } }, - "node_modules/node-addon-api": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", - "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==" - }, "node_modules/node-cron": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.2.tgz", - "integrity": "sha512-iP8l0yGlNpE0e6q1o185yOApANRe47UPbLf4YxfbiNHt/RU5eBcGB/e0oudruheSf+LQeDMezqC5BVAb5wwRcQ==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.3.tgz", + "integrity": "sha512-dOal67//nohNgYWb+nWmg5dkFdIwDm8EpeGYMekPMrngV3637lqnX0lbUcCtgibHTz6SEz7DAIjKvKDFYCnO1A==", "dependencies": { "uuid": "8.3.2" }, @@ -9359,20 +9490,6 @@ "node": ">=4" } }, - "node_modules/nopt": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", - "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "dependencies": { - "abbrev": "1" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -9381,18 +9498,6 @@ "node": ">=0.10.0" } }, - "node_modules/npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "deprecated": "This package is no longer supported.", - "dependencies": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" - } - }, "node_modules/number-allocator": { "version": "1.0.14", "resolved": "https://registry.npmjs.org/number-allocator/-/number-allocator-1.0.14.tgz", @@ -10099,9 +10204,9 @@ } }, "node_modules/protobufjs": { - "version": "7.3.3", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.3.3.tgz", - "integrity": "sha512-HaYi2CVjiPoBR1d2zTVKVHXr9IUnpJizCjUu19vxdD3B8o4z+vfOHpIEB1358w8nv8dfUNEfDHFvMsH7QlLt/Q==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", "hasInstallScript": true, "optional": true, "dependencies": { @@ -10728,6 +10833,17 @@ "node": ">= 0.12" } }, + "node_modules/request/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/request/node_modules/qs": { "version": "6.5.3", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", @@ -11092,11 +11208,6 @@ "randombytes": "^2.1.0" } }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" - }, "node_modules/set-cookie-parser": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.5.1.tgz", @@ -11407,6 +11518,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "optional": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -11576,22 +11688,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/tar-stream": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", @@ -11617,11 +11713,6 @@ "readable-stream": "^3.4.0" } }, - "node_modules/tar/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "node_modules/teeny-request": { "version": "8.0.3", "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.3.tgz", @@ -12011,6 +12102,17 @@ "npm": ">=1.3.7" } }, + "node_modules/twilio/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/twilio/node_modules/oauth-sign": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", @@ -12148,6 +12250,17 @@ "node": ">= 0.6" } }, + "node_modules/type-is/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", @@ -12175,9 +12288,9 @@ "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==" }, "node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==" }, "node_modules/unescape": { "version": "1.0.1", @@ -12551,14 +12664,6 @@ "webidl-conversions": "^3.0.0" } }, - "node_modules/wide-align": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", - "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "dependencies": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, "node_modules/win-release": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/win-release/-/win-release-1.1.1.tgz", diff --git a/node_modules/@mapbox/node-pre-gyp/CHANGELOG.md b/node_modules/@mapbox/node-pre-gyp/CHANGELOG.md deleted file mode 100644 index 990e9297..00000000 --- a/node_modules/@mapbox/node-pre-gyp/CHANGELOG.md +++ /dev/null @@ -1,510 +0,0 @@ -# node-pre-gyp changelog - -## 1.0.11 -- Fixes dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906) - -## 1.0.10 -- Upgraded minimist to 1.2.6 to address dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906) - -## 1.0.9 -- Upgraded node-fetch to 2.6.7 to address [CVE-2022-0235](https://www.cve.org/CVERecord?id=CVE-2022-0235) -- Upgraded detect-libc to 2.0.0 to use non-blocking NodeJS(>=12) Report API - -## 1.0.8 -- Downgraded npmlog to maintain node v10 and v8 support (https://github.com/mapbox/node-pre-gyp/pull/624) - -## 1.0.7 -- Upgraded nyc and npmlog to address https://github.com/advisories/GHSA-93q8-gq69-wqmw - -## 1.0.6 -- Added node v17 to the internal node releases listing -- Upgraded various dependencies declared in package.json to latest major versions (node-fetch from 2.6.1 to 2.6.5, npmlog from 4.1.2 to 5.01, semver from 7.3.4 to 7.3.5, and tar from 6.1.0 to 6.1.11) -- Fixed bug in `staging_host` parameter (https://github.com/mapbox/node-pre-gyp/pull/590) - - -## 1.0.5 -- Fix circular reference warning with node >= v14 - -## 1.0.4 -- Added node v16 to the internal node releases listing - -## 1.0.3 -- Improved support configuring s3 uploads (solves https://github.com/mapbox/node-pre-gyp/issues/571) - - New options added in https://github.com/mapbox/node-pre-gyp/pull/576: 'bucket', 'region', and `s3ForcePathStyle` - -## 1.0.2 -- Fixed regression in proxy support (https://github.com/mapbox/node-pre-gyp/issues/572) - -## 1.0.1 -- Switched from mkdirp@1.0.4 to make-dir@3.1.0 to avoid this bug: https://github.com/isaacs/node-mkdirp/issues/31 - -## 1.0.0 -- Module is now name-spaced at `@mapbox/node-pre-gyp` and the original `node-pre-gyp` is deprecated. -- New: support for staging and production s3 targets (see README.md) -- BREAKING: no longer supporting `node_pre_gyp_accessKeyId` & `node_pre_gyp_secretAccessKey`, use `AWS_ACCESS_KEY_ID` & `AWS_SECRET_ACCESS_KEY` instead to authenticate against s3 for `info`, `publish`, and `unpublish` commands. -- Dropped node v6 support, added node v14 support -- Switched tests to use mapbox-owned bucket for testing -- Added coverage tracking and linting with eslint -- Added back support for symlinks inside the tarball -- Upgraded all test apps to N-API/node-addon-api -- New: support for staging and production s3 targets (see README.md) -- Added `node_pre_gyp_s3_host` env var which has priority over the `--s3_host` option or default. -- Replaced needle with node-fetch -- Added proxy support for node-fetch -- Upgraded to mkdirp@1.x - -## 0.17.0 -- Got travis + appveyor green again -- Added support for more node versions - -## 0.16.0 - -- Added Node 15 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/520) - -## 0.15.0 - -- Bump dependency on `mkdirp` from `^0.5.1` to `^0.5.3` (https://github.com/mapbox/node-pre-gyp/pull/492) -- Bump dependency on `needle` from `^2.2.1` to `^2.5.0` (https://github.com/mapbox/node-pre-gyp/pull/502) -- Added Node 14 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/501) - -## 0.14.0 - -- Defer modules requires in napi.js (https://github.com/mapbox/node-pre-gyp/pull/434) -- Bump dependency on `tar` from `^4` to `^4.4.2` (https://github.com/mapbox/node-pre-gyp/pull/454) -- Support extracting compiled binary from local offline mirror (https://github.com/mapbox/node-pre-gyp/pull/459) -- Added Node 13 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/483) - -## 0.13.0 - -- Added Node 12 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/449) - -## 0.12.0 - -- Fixed double-build problem with node v10 (https://github.com/mapbox/node-pre-gyp/pull/428) -- Added node 11 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/422) - -## 0.11.0 - -- Fixed double-install problem with node v10 -- Significant N-API improvements (https://github.com/mapbox/node-pre-gyp/pull/405) - -## 0.10.3 - -- Now will use `request` over `needle` if request is installed. By default `needle` is used for `https`. This should unbreak proxy support that regressed in v0.9.0 - -## 0.10.2 - -- Fixed rc/deep-extent security vulnerability -- Fixed broken reinstall script do to incorrectly named get_best_napi_version - -## 0.10.1 - -- Fix needle error event (@medns) - -## 0.10.0 - -- Allow for a single-level module path when packing @allenluce (https://github.com/mapbox/node-pre-gyp/pull/371) -- Log warnings instead of errors when falling back @xzyfer (https://github.com/mapbox/node-pre-gyp/pull/366) -- Add Node.js v10 support to tests (https://github.com/mapbox/node-pre-gyp/pull/372) -- Remove retire.js from CI (https://github.com/mapbox/node-pre-gyp/pull/372) -- Remove support for Node.js v4 due to [EOL on April 30th, 2018](https://github.com/nodejs/Release/blob/7dd52354049cae99eed0e9fe01345b0722a86fde/schedule.json#L14) -- Update appveyor tests to install default NPM version instead of NPM v2.x for all Windows builds (https://github.com/mapbox/node-pre-gyp/pull/375) - -## 0.9.1 - -- Fixed regression (in v0.9.0) with support for http redirects @allenluce (https://github.com/mapbox/node-pre-gyp/pull/361) - -## 0.9.0 - -- Switched from using `request` to `needle` to reduce size of module deps (https://github.com/mapbox/node-pre-gyp/pull/350) - -## 0.8.0 - -- N-API support (@inspiredware) - -## 0.7.1 - -- Upgraded to tar v4.x - -## 0.7.0 - - - Updated request and hawk (#347) - - Dropped node v0.10.x support - -## 0.6.40 - - - Improved error reporting if an install fails - -## 0.6.39 - - - Support for node v9 - - Support for versioning on `{libc}` to allow binaries to work on non-glic linux systems like alpine linux - - -## 0.6.38 - - - Maintaining compatibility (for v0.6.x series) with node v0.10.x - -## 0.6.37 - - - Solved one part of #276: now now deduce the node ABI from the major version for node >= 2 even when not stored in the abi_crosswalk.json - - Fixed docs to avoid mentioning the deprecated and dangerous `prepublish` in package.json (#291) - - Add new node versions to crosswalk - - Ported tests to use tape instead of mocha - - Got appveyor tests passing by downgrading npm and node-gyp - -## 0.6.36 - - - Removed the running of `testbinary` during install. Because this was regressed for so long, it is too dangerous to re-enable by default. Developers needing validation can call `node-pre-gyp testbinary` directory. - - Fixed regression in v0.6.35 for electron installs (now skipping binary validation which is not yet supported for electron) - -## 0.6.35 - - - No longer recommending `npm ls` in `prepublish` (#291) - - Fixed testbinary command (#283) @szdavid92 - -## 0.6.34 - - - Added new node versions to crosswalk, including v8 - - Upgraded deps to latest versions, started using `^` instead of `~` for all deps. - -## 0.6.33 - - - Improved support for yarn - -## 0.6.32 - - - Honor npm configuration for CA bundles (@heikkipora) - - Add node-pre-gyp and npm versions to user agent (@addaleax) - - Updated various deps - - Add known node version for v7.x - -## 0.6.31 - - - Updated various deps - -## 0.6.30 - - - Update to npmlog@4.x and semver@5.3.x - - Add known node version for v6.5.0 - -## 0.6.29 - - - Add known node versions for v0.10.45, v0.12.14, v4.4.4, v5.11.1, and v6.1.0 - -## 0.6.28 - - - Now more verbose when remote binaries are not available. This is needed since npm is increasingly more quiet by default - and users need to know why builds are falling back to source compiles that might then error out. - -## 0.6.27 - - - Add known node version for node v6 - - Stopped bundling dependencies - - Documented method for module authors to avoid bundling node-pre-gyp - - See https://github.com/mapbox/node-pre-gyp/tree/master#configuring for details - -## 0.6.26 - - - Skip validation for nw runtime (https://github.com/mapbox/node-pre-gyp/pull/181) via @fleg - -## 0.6.25 - - - Improved support for auto-detection of electron runtime in `node-pre-gyp.find()` - - Pull request from @enlight - https://github.com/mapbox/node-pre-gyp/pull/187 - - Add known node version for 4.4.1 and 5.9.1 - -## 0.6.24 - - - Add known node version for 5.8.0, 5.9.0, and 4.4.0. - -## 0.6.23 - - - Add known node version for 0.10.43, 0.12.11, 4.3.2, and 5.7.1. - -## 0.6.22 - - - Add known node version for 4.3.1, and 5.7.0. - -## 0.6.21 - - - Add known node version for 0.10.42, 0.12.10, 4.3.0, and 5.6.0. - -## 0.6.20 - - - Add known node version for 4.2.5, 4.2.6, 5.4.0, 5.4.1,and 5.5.0. - -## 0.6.19 - - - Add known node version for 4.2.4 - -## 0.6.18 - - - Add new known node versions for 0.10.x, 0.12.x, 4.x, and 5.x - -## 0.6.17 - - - Re-tagged to fix packaging problem of `Error: Cannot find module 'isarray'` - -## 0.6.16 - - - Added known version in crosswalk for 5.1.0. - -## 0.6.15 - - - Upgraded tar-pack (https://github.com/mapbox/node-pre-gyp/issues/182) - - Support custom binary hosting mirror (https://github.com/mapbox/node-pre-gyp/pull/170) - - Added known version in crosswalk for 4.2.2. - -## 0.6.14 - - - Added node 5.x version - -## 0.6.13 - - - Added more known node 4.x versions - -## 0.6.12 - - - Added support for [Electron](http://electron.atom.io/). Just pass the `--runtime=electron` flag when building/installing. Thanks @zcbenz - -## 0.6.11 - - - Added known node and io.js versions including more 3.x and 4.x versions - -## 0.6.10 - - - Added known node and io.js versions including 3.x and 4.x versions - - Upgraded `tar` dep - -## 0.6.9 - - - Upgraded `rc` dep - - Updated known io.js version: v2.4.0 - -## 0.6.8 - - - Upgraded `semver` and `rimraf` deps - - Updated known node and io.js versions - -## 0.6.7 - - - Fixed `node_abi` versions for io.js 1.1.x -> 1.8.x (should be 43, but was stored as 42) (refs https://github.com/iojs/build/issues/94) - -## 0.6.6 - - - Updated with known io.js 2.0.0 version - -## 0.6.5 - - - Now respecting `npm_config_node_gyp` (https://github.com/npm/npm/pull/4887) - - Updated to semver@4.3.2 - - Updated known node v0.12.x versions and io.js 1.x versions. - -## 0.6.4 - - - Improved support for `io.js` (@fengmk2) - - Test coverage improvements (@mikemorris) - - Fixed support for `--dist-url` that regressed in 0.6.3 - -## 0.6.3 - - - Added support for passing raw options to node-gyp using `--` separator. Flags passed after - the `--` to `node-pre-gyp configure` will be passed directly to gyp while flags passed - after the `--` will be passed directly to make/visual studio. - - Added `node-pre-gyp configure` command to be able to call `node-gyp configure` directly - - Fix issue with require validation not working on windows 7 (@edgarsilva) - -## 0.6.2 - - - Support for io.js >= v1.0.2 - - Deferred require of `request` and `tar` to help speed up command line usage of `node-pre-gyp`. - -## 0.6.1 - - - Fixed bundled `tar` version - -## 0.6.0 - - - BREAKING: node odd releases like v0.11.x now use `major.minor.patch` for `{node_abi}` instead of `NODE_MODULE_VERSION` (#124) - - Added support for `toolset` option in versioning. By default is an empty string but `--toolset` can be passed to publish or install to select alternative binaries that target a custom toolset like C++11. For example to target Visual Studio 2014 modules like node-sqlite3 use `--toolset=v140`. - - Added support for `--no-rollback` option to request that a failed binary test does not remove the binary module leaves it in place. - - Added support for `--update-binary` option to request an existing binary be re-installed and the check for a valid local module be skipped. - - Added support for passing build options from `npm` through `node-pre-gyp` to `node-gyp`: `--nodedir`, `--disturl`, `--python`, and `--msvs_version` - -## 0.5.31 - - - Added support for deducing node_abi for node.js runtime from previous release if the series is even - - Added support for --target=0.10.33 - -## 0.5.30 - - - Repackaged with latest bundled deps - -## 0.5.29 - - - Added support for semver `build`. - - Fixed support for downloading from urls that include `+`. - -## 0.5.28 - - - Now reporting unix style paths only in reveal command - -## 0.5.27 - - - Fixed support for auto-detecting s3 bucket name when it contains `.` - @taavo - - Fixed support for installing when path contains a `'` - @halfdan - - Ported tests to mocha - -## 0.5.26 - - - Fix node-webkit support when `--target` option is not provided - -## 0.5.25 - - - Fix bundling of deps - -## 0.5.24 - - - Updated ABI crosswalk to incldue node v0.10.30 and v0.10.31 - -## 0.5.23 - - - Added `reveal` command. Pass no options to get all versioning data as json. Pass a second arg to grab a single versioned property value - - Added support for `--silent` (shortcut for `--loglevel=silent`) - -## 0.5.22 - - - Fixed node-webkit versioning name (NOTE: node-webkit support still experimental) - -## 0.5.21 - - - New package to fix `shasum check failed` error with v0.5.20 - -## 0.5.20 - - - Now versioning node-webkit binaries based on major.minor.patch - assuming no compatible ABI across versions (#90) - -## 0.5.19 - - - Updated to know about more node-webkit releases - -## 0.5.18 - - - Updated to know about more node-webkit releases - -## 0.5.17 - - - Updated to know about node v0.10.29 release - -## 0.5.16 - - - Now supporting all aws-sdk configuration parameters (http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html) (#86) - -## 0.5.15 - - - Fixed installation of windows packages sub directories on unix systems (#84) - -## 0.5.14 - - - Finished support for cross building using `--target_platform` option (#82) - - Now skipping binary validation on install if target arch/platform do not match the host. - - Removed multi-arch validing for OS X since it required a FAT node.js binary - -## 0.5.13 - - - Fix problem in 0.5.12 whereby the wrong versions of mkdirp and semver where bundled. - -## 0.5.12 - - - Improved support for node-webkit (@Mithgol) - -## 0.5.11 - - - Updated target versions listing - -## 0.5.10 - - - Fixed handling of `-debug` flag passed directory to node-pre-gyp (#72) - - Added optional second arg to `node_pre_gyp.find` to customize the default versioning options used to locate the runtime binary - - Failed install due to `testbinary` check failure no longer leaves behind binary (#70) - -## 0.5.9 - - - Fixed regression in `testbinary` command causing installs to fail on windows with 0.5.7 (#60) - -## 0.5.8 - - - Started bundling deps - -## 0.5.7 - - - Fixed the `testbinary` check, which is used to determine whether to re-download or source compile, to work even in complex dependency situations (#63) - - Exposed the internal `testbinary` command in node-pre-gyp command line tool - - Fixed minor bug so that `fallback_to_build` option is always respected - -## 0.5.6 - - - Added support for versioning on the `name` value in `package.json` (#57). - - Moved to using streams for reading tarball when publishing (#52) - -## 0.5.5 - - - Improved binary validation that also now works with node-webkit (@Mithgol) - - Upgraded test apps to work with node v0.11.x - - Improved test coverage - -## 0.5.4 - - - No longer depends on external install of node-gyp for compiling builds. - -## 0.5.3 - - - Reverted fix for debian/nodejs since it broke windows (#45) - -## 0.5.2 - - - Support for debian systems where the node binary is named `nodejs` (#45) - - Added `bin/node-pre-gyp.cmd` to be able to run command on windows locally (npm creates an .npm automatically when globally installed) - - Updated abi-crosswalk with node v0.10.26 entry. - -## 0.5.1 - - - Various minor bug fixes, several improving windows support for publishing. - -## 0.5.0 - - - Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`. - - Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18). - - Added `remote_path` which also supports versioning. - - Changed `remote_uri` to `host`. - -## 0.4.2 - - - Added support for `--target` flag to request cross-compile against a specific node/node-webkit version. - - Added preliminary support for node-webkit - - Fixed support for `--target_arch` option being respected in all cases. - -## 0.4.1 - - - Fixed exception when only stderr is available in binary test (@bendi / #31) - -## 0.4.0 - - - Enforce only `https:` based remote publishing access. - - Added `node-pre-gyp info` command to display listing of published binaries - - Added support for changing the directory node-pre-gyp should build in with the `-C/--directory` option. - - Added support for S3 prefixes. - -## 0.3.1 - - - Added `unpublish` command. - - Fixed module path construction in tests. - - Added ability to disable falling back to build behavior via `npm install --fallback-to-build=false` which overrides setting in a depedencies package.json `install` target. - -## 0.3.0 - - - Support for packaging all files in `module_path` directory - see `app4` for example - - Added `testpackage` command. - - Changed `clean` command to only delete `.node` not entire `build` directory since node-gyp will handle that. - - `.node` modules must be in a folder of there own since tar-pack will remove everything when it unpacks. diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/.bin/semver b/node_modules/@mapbox/node-pre-gyp/node_modules/.bin/semver deleted file mode 120000 index 77443e78..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/.bin/semver +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@" -else - exec node "$basedir/../semver/bin/semver.js" "$@" -fi diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/.bin/semver b/node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/.bin/semver deleted file mode 120000 index 77443e78..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/.bin/semver +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") - -case `uname` in - *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; -esac - -if [ -x "$basedir/node" ]; then - exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@" -else - exec node "$basedir/../semver/bin/semver.js" "$@" -fi diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver/package.json b/node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver/package.json deleted file mode 100644 index 6b970a62..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "semver", - "version": "6.3.1", - "description": "The semantic version parser used by npm.", - "main": "semver.js", - "scripts": { - "test": "tap test/ --100 --timeout=30", - "lint": "echo linting disabled", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap test/ --100 --timeout=30", - "posttest": "npm run lint" - }, - "devDependencies": { - "@npmcli/template-oss": "4.17.0", - "tap": "^12.7.0" - }, - "license": "ISC", - "repository": { - "type": "git", - "url": "https://github.com/npm/node-semver.git" - }, - "bin": { - "semver": "./bin/semver.js" - }, - "files": [ - "bin", - "range.bnf", - "semver.js" - ], - "author": "GitHub Inc.", - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "content": "./scripts/template-oss", - "version": "4.17.0" - } -} diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver/semver.js b/node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver/semver.js deleted file mode 100644 index 39319c13..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver/semver.js +++ /dev/null @@ -1,1643 +0,0 @@ -exports = module.exports = SemVer - -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} -} - -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' - -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 - -var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 - -// The actual regexps go on exports.re -var re = exports.re = [] -var safeRe = exports.safeRe = [] -var src = exports.src = [] -var t = exports.tokens = {} -var R = 0 - -function tok (n) { - t[n] = R++ -} - -var LETTERDASHNUMBER = '[a-zA-Z0-9-]' - -// Replace some greedy regex tokens to prevent regex dos issues. These regex are -// used internally via the safeRe object since all inputs in this library get -// normalized first to trim and collapse all extra whitespace. The original -// regexes are exported for userland consumption and lower level usage. A -// future breaking change could export the safer regex only with a note that -// all input should have extra whitespace removed. -var safeRegexReplacements = [ - ['\\s', 1], - ['\\d', MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], -] - -function makeSafeRe (value) { - for (var i = 0; i < safeRegexReplacements.length; i++) { - var token = safeRegexReplacements[i][0] - var max = safeRegexReplacements[i][1] - value = value - .split(token + '*').join(token + '{0,' + max + '}') - .split(token + '+').join(token + '{1,' + max + '}') - } - return value -} - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -tok('NUMERICIDENTIFIER') -src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' -tok('NUMERICIDENTIFIERLOOSE') -src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -tok('NONNUMERICIDENTIFIER') -src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' - -// ## Main Version -// Three dot-separated numeric identifiers. - -tok('MAINVERSION') -src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIER] + ')' - -tok('MAINVERSIONLOOSE') -src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -tok('PRERELEASEIDENTIFIER') -src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' - -tok('PRERELEASEIDENTIFIERLOOSE') -src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + - '|' + src[t.NONNUMERICIDENTIFIER] + ')' - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -tok('PRERELEASE') -src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' - -tok('PRERELEASELOOSE') -src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -tok('BUILDIDENTIFIER') -src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -tok('BUILD') -src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + - '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -tok('FULL') -tok('FULLPLAIN') -src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + - src[t.PRERELEASE] + '?' + - src[t.BUILD] + '?' - -src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -tok('LOOSEPLAIN') -src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + - src[t.PRERELEASELOOSE] + '?' + - src[t.BUILD] + '?' - -tok('LOOSE') -src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' - -tok('GTLT') -src[t.GTLT] = '((?:<|>)?=?)' - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -tok('XRANGEIDENTIFIERLOOSE') -src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -tok('XRANGEIDENTIFIER') -src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' - -tok('XRANGEPLAIN') -src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + - '(?:' + src[t.PRERELEASE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' - -tok('XRANGEPLAINLOOSE') -src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[t.PRERELEASELOOSE] + ')?' + - src[t.BUILD] + '?' + - ')?)?' - -tok('XRANGE') -src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' -tok('XRANGELOOSE') -src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' - -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -tok('COERCE') -src[t.COERCE] = '(^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' -tok('COERCERTL') -re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') -safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -tok('LONETILDE') -src[t.LONETILDE] = '(?:~>?)' - -tok('TILDETRIM') -src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' -re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') -safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') -var tildeTrimReplace = '$1~' - -tok('TILDE') -src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' -tok('TILDELOOSE') -src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -tok('LONECARET') -src[t.LONECARET] = '(?:\\^)' - -tok('CARETTRIM') -src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' -re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') -safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') -var caretTrimReplace = '$1^' - -tok('CARET') -src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' -tok('CARETLOOSE') -src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -tok('COMPARATORLOOSE') -src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' -tok('COMPARATOR') -src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -tok('COMPARATORTRIM') -src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + - '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' - -// this one has to use the /g flag -re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') -safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') -var comparatorTrimReplace = '$1$2$3' - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -tok('HYPHENRANGE') -src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAIN] + ')' + - '\\s*$' - -tok('HYPHENRANGELOOSE') -src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[t.XRANGEPLAINLOOSE] + ')' + - '\\s*$' - -// Star ranges basically just allow anything at all. -tok('STAR') -src[t.STAR] = '(<|>)?=?\\s*\\*' - -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) - - // Replace all greedy whitespace to prevent regex dos issues. These regex are - // used internally via the safeRe object since all inputs in this library get - // normalized first to trim and collapse all extra whitespace. The original - // regexes are exported for userland consumption and lower level usage. A - // future breaking change could export the safer regex only with a note that - // all input should have extra whitespace removed. - safeRe[i] = new RegExp(makeSafeRe(src[i])) - } -} - -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (version instanceof SemVer) { - return version - } - - if (typeof version !== 'string') { - return null - } - - if (version.length > MAX_LENGTH) { - return null - } - - var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] - if (!r.test(version)) { - return null - } - - try { - return new SemVer(version, options) - } catch (er) { - return null - } -} - -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} - -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} - -exports.SemVer = SemVer - -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } - - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } - - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } - - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose - - var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) - - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } - - this.raw = version - - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] - - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } - - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } - - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } - - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } - - this.build = m[5] ? m[5].split('.') : [] - this.format() -} - -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version -} - -SemVer.prototype.toString = function () { - return this.version -} - -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return this.compareMain(other) || this.comparePre(other) -} - -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) -} - -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } - - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) -} - -SemVer.prototype.compareBuild = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - var i = 0 - do { - var a = this.build[i] - var b = other.build[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) -} - -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break - - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this -} - -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined - } - - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } -} - -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } - } - return defaultResult // may be undefined - } -} - -exports.compareIdentifiers = compareIdentifiers - -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b - } - - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} - -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} - -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} - -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} - -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} - -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} - -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} - -exports.compareBuild = compareBuild -function compareBuild (a, b, loose) { - var versionA = new SemVer(a, loose) - var versionB = new SemVer(b, loose) - return versionA.compare(versionB) || versionA.compareBuild(versionB) -} - -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} - -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(a, b, loose) - }) -} - -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.compareBuild(b, a, loose) - }) -} - -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} - -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} - -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} - -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} - -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} - -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 -} - -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b - - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b - - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) - - case '>=': - return gte(a, b, loose) - - case '<': - return lt(a, b, loose) - - case '<=': - return lte(a, b, loose) - - default: - throw new TypeError('Invalid operator: ' + op) - } -} - -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } - } - - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) - } - - comp = comp.trim().split(/\s+/).join(' ') - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) - - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } - - debug('comp', this) -} - -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var m = comp.match(r) - - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } - - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' - } - - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } -} - -Comparator.prototype.toString = function () { - return this.value -} - -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY || version === ANY) { - return true - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } - } - - return cmp(version, this.operator, this.semver, this.options) -} - -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } - - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - var rangeTmp - - if (this.operator === '') { - if (this.value === '') { - return true - } - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - if (comp.value === '') { - return true - } - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } - - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) - - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan -} - -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) - } - } - - if (range instanceof Comparator) { - return new Range(range.value, options) - } - - if (!(this instanceof Range)) { - return new Range(range, options) - } - - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First reduce all whitespace as much as possible so we do not have to rely - // on potentially slow regexes like \s*. This is then stored and used for - // future error messages as well. - this.raw = range - .trim() - .split(/\s+/) - .join(' ') - - // First, split based on boolean or || - this.set = this.raw.split('||').map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) - - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + this.raw) - } - - this.format() -} - -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} - -Range.prototype.toString = function () { - return this.range -} - -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) - - // normalize spaces - range = range.split(/\s+/).join(' ') - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) - - return set -} - -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - - return this.set.some(function (thisComparators) { - return ( - isSatisfiable(thisComparators, options) && - range.set.some(function (rangeComparators) { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every(function (thisComparator) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) -} - -// take a set of comparators and determine whether there -// exists a version which can satisfy it -function isSatisfiable (comparators, options) { - var result = true - var remainingComparators = comparators.slice() - var testComparator = remainingComparators.pop() - - while (result && remainingComparators.length) { - result = remainingComparators.every(function (otherComparator) { - return testComparator.intersects(otherComparator, options) - }) - - testComparator = remainingComparators.pop() - } - - return result -} - -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') -} - -function replaceTilde (comp, options) { - var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - - debug('tilde return', ret) - return ret - }) -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') -} - -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } - } - - debug('caret return', ret) - return ret - }) -} - -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} - -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp - - if (gtlt === '=' && anyX) { - gtlt = '' - } - - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } - - ret = gtlt + M + '.' + m + '.' + p + pr - } else if (xm) { - ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr - } else if (xp) { - ret = '>=' + M + '.' + m + '.0' + pr + - ' <' + M + '.' + (+m + 1) + '.0' + pr - } - - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(safeRe[t.STAR], '') -} - -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } - - return (from + ' ' + to).trim() -} - -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } - } - - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } - } - return false -} - -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } - } - - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } - - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } - - // Version has a -pre, but it's not one of the ones we like. - return false - } - - return true -} - -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} - -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } - } - }) - return max -} - -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min -} - -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) - - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } - - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) -} - -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - var high = null - var low = null - - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } - } - return true -} - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) -} - -exports.coerce = coerce -function coerce (version, options) { - if (version instanceof SemVer) { - return version - } - - if (typeof version === 'number') { - version = String(version) - } - - if (typeof version !== 'string') { - return null - } - - options = options || {} - - var match = null - if (!options.rtl) { - match = version.match(safeRe[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - var next - while ((next = safeRe[t.COERCERTL].exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next - } - safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length - } - // leave it in a clean state - safeRe[t.COERCERTL].lastIndex = -1 - } - - if (match === null) { - return null - } - - return parse(match[2] + - '.' + (match[3] || '0') + - '.' + (match[4] || '0'), options) -} diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/README.md b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/README.md deleted file mode 100644 index ede7b7d0..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/README.md +++ /dev/null @@ -1,654 +0,0 @@ -semver(1) -- The semantic versioner for npm -=========================================== - -## Install - -```bash -npm install semver -```` - -## Usage - -As a node module: - -```js -const semver = require('semver') - -semver.valid('1.2.3') // '1.2.3' -semver.valid('a.b.c') // null -semver.clean(' =v1.2.3 ') // '1.2.3' -semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true -semver.gt('1.2.3', '9.8.7') // false -semver.lt('1.2.3', '9.8.7') // true -semver.minVersion('>=1.0.0') // '1.0.0' -semver.valid(semver.coerce('v2')) // '2.0.0' -semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' -``` - -You can also just load the module for the function that you care about if -you'd like to minimize your footprint. - -```js -// load the whole API at once in a single object -const semver = require('semver') - -// or just load the bits you need -// all of them listed here, just pick and choose what you want - -// classes -const SemVer = require('semver/classes/semver') -const Comparator = require('semver/classes/comparator') -const Range = require('semver/classes/range') - -// functions for working with versions -const semverParse = require('semver/functions/parse') -const semverValid = require('semver/functions/valid') -const semverClean = require('semver/functions/clean') -const semverInc = require('semver/functions/inc') -const semverDiff = require('semver/functions/diff') -const semverMajor = require('semver/functions/major') -const semverMinor = require('semver/functions/minor') -const semverPatch = require('semver/functions/patch') -const semverPrerelease = require('semver/functions/prerelease') -const semverCompare = require('semver/functions/compare') -const semverRcompare = require('semver/functions/rcompare') -const semverCompareLoose = require('semver/functions/compare-loose') -const semverCompareBuild = require('semver/functions/compare-build') -const semverSort = require('semver/functions/sort') -const semverRsort = require('semver/functions/rsort') - -// low-level comparators between versions -const semverGt = require('semver/functions/gt') -const semverLt = require('semver/functions/lt') -const semverEq = require('semver/functions/eq') -const semverNeq = require('semver/functions/neq') -const semverGte = require('semver/functions/gte') -const semverLte = require('semver/functions/lte') -const semverCmp = require('semver/functions/cmp') -const semverCoerce = require('semver/functions/coerce') - -// working with ranges -const semverSatisfies = require('semver/functions/satisfies') -const semverMaxSatisfying = require('semver/ranges/max-satisfying') -const semverMinSatisfying = require('semver/ranges/min-satisfying') -const semverToComparators = require('semver/ranges/to-comparators') -const semverMinVersion = require('semver/ranges/min-version') -const semverValidRange = require('semver/ranges/valid') -const semverOutside = require('semver/ranges/outside') -const semverGtr = require('semver/ranges/gtr') -const semverLtr = require('semver/ranges/ltr') -const semverIntersects = require('semver/ranges/intersects') -const semverSimplifyRange = require('semver/ranges/simplify') -const semverRangeSubset = require('semver/ranges/subset') -``` - -As a command-line utility: - -``` -$ semver -h - -A JavaScript implementation of the https://semver.org/ specification -Copyright Isaac Z. Schlueter - -Usage: semver [options] [ [...]] -Prints valid versions sorted by SemVer precedence - -Options: --r --range - Print versions that match the specified range. - --i --increment [] - Increment a version by the specified level. Level can - be one of: major, minor, patch, premajor, preminor, - prepatch, or prerelease. Default level is 'patch'. - Only one version may be specified. - ---preid - Identifier to be used to prefix premajor, preminor, - prepatch or prerelease version increments. - --l --loose - Interpret versions and ranges loosely - --n <0|1> - This is the base to be used for the prerelease identifier. - --p --include-prerelease - Always include prerelease versions in range matching - --c --coerce - Coerce a string into SemVer if possible - (does not imply --loose) - ---rtl - Coerce version strings right to left - ---ltr - Coerce version strings left to right (default) - -Program exits successfully if any valid version satisfies -all supplied ranges, and prints all satisfying versions. - -If no satisfying versions are found, then exits failure. - -Versions are printed in ascending order, so supplying -multiple versions to the utility will just sort them. -``` - -## Versions - -A "version" is described by the `v2.0.0` specification found at -. - -A leading `"="` or `"v"` character is stripped off and ignored. - -## Ranges - -A `version range` is a set of `comparators` that specify versions -that satisfy the range. - -A `comparator` is composed of an `operator` and a `version`. The set -of primitive `operators` is: - -* `<` Less than -* `<=` Less than or equal to -* `>` Greater than -* `>=` Greater than or equal to -* `=` Equal. If no operator is specified, then equality is assumed, - so this operator is optional but MAY be included. - -For example, the comparator `>=1.2.7` would match the versions -`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` -or `1.1.0`. The comparator `>1` is equivalent to `>=2.0.0` and -would match the versions `2.0.0` and `3.1.0`, but not the versions -`1.0.1` or `1.1.0`. - -Comparators can be joined by whitespace to form a `comparator set`, -which is satisfied by the **intersection** of all of the comparators -it includes. - -A range is composed of one or more comparator sets, joined by `||`. A -version matches a range if and only if every comparator in at least -one of the `||`-separated comparator sets is satisfied by the version. - -For example, the range `>=1.2.7 <1.3.0` would match the versions -`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, -or `1.1.0`. - -The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, -`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. - -### Prerelease Tags - -If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then -it will only be allowed to satisfy comparator sets if at least one -comparator with the same `[major, minor, patch]` tuple also has a -prerelease tag. - -For example, the range `>1.2.3-alpha.3` would be allowed to match the -version `1.2.3-alpha.7`, but it would *not* be satisfied by -`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater -than" `1.2.3-alpha.3` according to the SemVer sort rules. The version -range only accepts prerelease tags on the `1.2.3` version. -Version `3.4.5` *would* satisfy the range because it does not have a -prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. - -The purpose of this behavior is twofold. First, prerelease versions -frequently are updated very quickly, and contain many breaking changes -that are (by the author's design) not yet fit for public consumption. -Therefore, by default, they are excluded from range-matching -semantics. - -Second, a user who has opted into using a prerelease version has -indicated the intent to use *that specific* set of -alpha/beta/rc versions. By including a prerelease tag in the range, -the user is indicating that they are aware of the risk. However, it -is still not appropriate to assume that they have opted into taking a -similar risk on the *next* set of prerelease versions. - -Note that this behavior can be suppressed (treating all prerelease -versions as if they were normal versions, for range-matching) -by setting the `includePrerelease` flag on the options -object to any -[functions](https://github.com/npm/node-semver#functions) that do -range matching. - -#### Prerelease Identifiers - -The method `.inc` takes an additional `identifier` string argument that -will append the value of the string as a prerelease identifier: - -```javascript -semver.inc('1.2.3', 'prerelease', 'beta') -// '1.2.4-beta.0' -``` - -command-line example: - -```bash -$ semver 1.2.3 -i prerelease --preid beta -1.2.4-beta.0 -``` - -Which then can be used to increment further: - -```bash -$ semver 1.2.4-beta.0 -i prerelease -1.2.4-beta.1 -``` - -#### Prerelease Identifier Base - -The method `.inc` takes an optional parameter 'identifierBase' string -that will let you let your prerelease number as zero-based or one-based. -Set to `false` to omit the prerelease number altogether. -If you do not specify this parameter, it will default to zero-based. - -```javascript -semver.inc('1.2.3', 'prerelease', 'beta', '1') -// '1.2.4-beta.1' -``` - -```javascript -semver.inc('1.2.3', 'prerelease', 'beta', false) -// '1.2.4-beta' -``` - -command-line example: - -```bash -$ semver 1.2.3 -i prerelease --preid beta -n 1 -1.2.4-beta.1 -``` - -```bash -$ semver 1.2.3 -i prerelease --preid beta -n false -1.2.4-beta -``` - -### Advanced Range Syntax - -Advanced range syntax desugars to primitive comparators in -deterministic ways. - -Advanced ranges may be combined in the same way as primitive -comparators using white space or `||`. - -#### Hyphen Ranges `X.Y.Z - A.B.C` - -Specifies an inclusive set. - -* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` - -If a partial version is provided as the first version in the inclusive -range, then the missing pieces are replaced with zeroes. - -* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` - -If a partial version is provided as the second version in the -inclusive range, then all versions that start with the supplied parts -of the tuple are accepted, but nothing that would be greater than the -provided tuple parts. - -* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0` -* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0` - -#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` - -Any of `X`, `x`, or `*` may be used to "stand in" for one of the -numeric values in the `[major, minor, patch]` tuple. - -* `*` := `>=0.0.0` (Any non-prerelease version satisfies, unless - `includePrerelease` is specified, in which case any version at all - satisfies) -* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version) -* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions) - -A partial version range is treated as an X-Range, so the special -character is in fact optional. - -* `""` (empty string) := `*` := `>=0.0.0` -* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0` -* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0` - -#### Tilde Ranges `~1.2.3` `~1.2` `~1` - -Allows patch-level changes if a minor version is specified on the -comparator. Allows minor-level changes if not. - -* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0` -* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`) -* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`) -* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0` -* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`) -* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`) -* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in - the `1.2.3` version will be allowed, if they are greater than or - equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but - `1.2.4-beta.2` would not, because it is a prerelease of a - different `[major, minor, patch]` tuple. - -#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` - -Allows changes that do not modify the left-most non-zero element in the -`[major, minor, patch]` tuple. In other words, this allows patch and -minor updates for versions `1.0.0` and above, patch updates for -versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. - -Many authors treat a `0.x` version as if the `x` were the major -"breaking-change" indicator. - -Caret ranges are ideal when an author may make breaking changes -between `0.2.4` and `0.3.0` releases, which is a common practice. -However, it presumes that there will *not* be breaking changes between -`0.2.4` and `0.2.5`. It allows for changes that are presumed to be -additive (but non-breaking), according to commonly observed practices. - -* `^1.2.3` := `>=1.2.3 <2.0.0-0` -* `^0.2.3` := `>=0.2.3 <0.3.0-0` -* `^0.0.3` := `>=0.0.3 <0.0.4-0` -* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in - the `1.2.3` version will be allowed, if they are greater than or - equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but - `1.2.4-beta.2` would not, because it is a prerelease of a - different `[major, minor, patch]` tuple. -* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the - `0.0.3` version *only* will be allowed, if they are greater than or - equal to `beta`. So, `0.0.3-pr.2` would be allowed. - -When parsing caret ranges, a missing `patch` value desugars to the -number `0`, but will allow flexibility within that value, even if the -major and minor versions are both `0`. - -* `^1.2.x` := `>=1.2.0 <2.0.0-0` -* `^0.0.x` := `>=0.0.0 <0.1.0-0` -* `^0.0` := `>=0.0.0 <0.1.0-0` - -A missing `minor` and `patch` values will desugar to zero, but also -allow flexibility within those values, even if the major version is -zero. - -* `^1.x` := `>=1.0.0 <2.0.0-0` -* `^0.x` := `>=0.0.0 <1.0.0-0` - -### Range Grammar - -Putting all this together, here is a Backus-Naur grammar for ranges, -for the benefit of parser authors: - -```bnf -range-set ::= range ( logical-or range ) * -logical-or ::= ( ' ' ) * '||' ( ' ' ) * -range ::= hyphen | simple ( ' ' simple ) * | '' -hyphen ::= partial ' - ' partial -simple ::= primitive | partial | tilde | caret -primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial -partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? -xr ::= 'x' | 'X' | '*' | nr -nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * -tilde ::= '~' partial -caret ::= '^' partial -qualifier ::= ( '-' pre )? ( '+' build )? -pre ::= parts -build ::= parts -parts ::= part ( '.' part ) * -part ::= nr | [-0-9A-Za-z]+ -``` - -## Functions - -All methods and classes take a final `options` object argument. All -options in this object are `false` by default. The options supported -are: - -- `loose`: Be more forgiving about not-quite-valid semver strings. - (Any resulting output will always be 100% strict compliant, of - course.) For backwards compatibility reasons, if the `options` - argument is a boolean value instead of an object, it is interpreted - to be the `loose` param. -- `includePrerelease`: Set to suppress the [default - behavior](https://github.com/npm/node-semver#prerelease-tags) of - excluding prerelease tagged versions from ranges unless they are - explicitly opted into. - -Strict-mode Comparators and Ranges will be strict about the SemVer -strings that they parse. - -* `valid(v)`: Return the parsed version, or null if it's not valid. -* `inc(v, release, options, identifier, identifierBase)`: - Return the version incremented by the release - type (`major`, `premajor`, `minor`, `preminor`, `patch`, - `prepatch`, or `prerelease`), or null if it's not valid - * `premajor` in one call will bump the version up to the next major - version and down to a prerelease of that major version. - `preminor`, and `prepatch` work the same way. - * If called from a non-prerelease version, `prerelease` will work the - same as `prepatch`. It increments the patch version and then makes a - prerelease. If the input version is already a prerelease it simply - increments it. - * `identifier` can be used to prefix `premajor`, `preminor`, - `prepatch`, or `prerelease` version increments. `identifierBase` - is the base to be used for the `prerelease` identifier. -* `prerelease(v)`: Returns an array of prerelease components, or null - if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` -* `major(v)`: Return the major version number. -* `minor(v)`: Return the minor version number. -* `patch(v)`: Return the patch version number. -* `intersects(r1, r2, loose)`: Return true if the two supplied ranges - or comparators intersect. -* `parse(v)`: Attempt to parse a string as a semantic version, returning either - a `SemVer` object or `null`. - -### Comparison - -* `gt(v1, v2)`: `v1 > v2` -* `gte(v1, v2)`: `v1 >= v2` -* `lt(v1, v2)`: `v1 < v2` -* `lte(v1, v2)`: `v1 <= v2` -* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, - even if they're not the same string. You already know how to - compare strings. -* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. -* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call - the corresponding function above. `"==="` and `"!=="` do simple - string comparison, but are included for completeness. Throws if an - invalid comparison string is provided. -* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if - `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. -* `rcompare(v1, v2)`: The reverse of `compare`. Sorts an array of versions - in descending order when passed to `Array.sort()`. -* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions - are equal. Sorts in ascending order if passed to `Array.sort()`. -* `compareLoose(v1, v2)`: Short for `compare(v1, v2, { loose: true })`. -* `diff(v1, v2)`: Returns the difference between two versions by the release type - (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), - or null if the versions are the same. - -### Sorting - -* `sort(versions)`: Returns a sorted array of versions based on the `compareBuild` - function. -* `rsort(versions)`: The reverse of `sort`. Returns an array of versions based on - the `compareBuild` function in descending order. - -### Comparators - -* `intersects(comparator)`: Return true if the comparators intersect - -### Ranges - -* `validRange(range)`: Return the valid range or null if it's not valid -* `satisfies(version, range)`: Return true if the version satisfies the - range. -* `maxSatisfying(versions, range)`: Return the highest version in the list - that satisfies the range, or `null` if none of them do. -* `minSatisfying(versions, range)`: Return the lowest version in the list - that satisfies the range, or `null` if none of them do. -* `minVersion(range)`: Return the lowest version that can match - the given range. -* `gtr(version, range)`: Return `true` if the version is greater than all the - versions possible in the range. -* `ltr(version, range)`: Return `true` if the version is less than all the - versions possible in the range. -* `outside(version, range, hilo)`: Return true if the version is outside - the bounds of the range in either the high or low direction. The - `hilo` argument must be either the string `'>'` or `'<'`. (This is - the function called by `gtr` and `ltr`.) -* `intersects(range)`: Return true if any of the range comparators intersect. -* `simplifyRange(versions, range)`: Return a "simplified" range that - matches the same items in the `versions` list as the range specified. Note - that it does *not* guarantee that it would match the same versions in all - cases, only for the set of versions provided. This is useful when - generating ranges by joining together multiple versions with `||` - programmatically, to provide the user with something a bit more - ergonomic. If the provided range is shorter in string-length than the - generated range, then that is returned. -* `subset(subRange, superRange)`: Return `true` if the `subRange` range is - entirely contained by the `superRange` range. - -Note that, since ranges may be non-contiguous, a version might not be -greater than a range, less than a range, *or* satisfy a range! For -example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` -until `2.0.0`, so version `1.2.10` would not be greater than the -range (because `2.0.1` satisfies, which is higher), nor less than the -range (since `1.2.8` satisfies, which is lower), and it also does not -satisfy the range. - -If you want to know if a version satisfies or does not satisfy a -range, use the `satisfies(version, range)` function. - -### Coercion - -* `coerce(version, options)`: Coerces a string to semver if possible - -This aims to provide a very forgiving translation of a non-semver string to -semver. It looks for the first digit in a string and consumes all -remaining characters which satisfy at least a partial semver (e.g., `1`, -`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer -versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All -surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes -`3.4.0`). Only text which lacks digits will fail coercion (`version one` -is not valid). The maximum length for any semver component considered for -coercion is 16 characters; longer components will be ignored -(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any -semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value -components are invalid (`9999999999999999.4.7.4` is likely invalid). - -If the `options.rtl` flag is set, then `coerce` will return the right-most -coercible tuple that does not share an ending index with a longer coercible -tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not -`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of -any other overlapping SemVer tuple. - -If the `options.includePrerelease` flag is set, then the `coerce` result will contain -prerelease and build parts of a version. For example, `1.2.3.4-rc.1+rev.2` -will preserve prerelease `rc.1` and build `rev.2` in the result. - -### Clean - -* `clean(version)`: Clean a string to be a valid semver if possible - -This will return a cleaned and trimmed semver version. If the provided -version is not valid a null will be returned. This does not work for -ranges. - -ex. -* `s.clean(' = v 2.1.5foo')`: `null` -* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` -* `s.clean(' = v 2.1.5-foo')`: `null` -* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` -* `s.clean('=v2.1.5')`: `'2.1.5'` -* `s.clean(' =v2.1.5')`: `'2.1.5'` -* `s.clean(' 2.1.5 ')`: `'2.1.5'` -* `s.clean('~1.0.0')`: `null` - -## Constants - -As a convenience, helper constants are exported to provide information about what `node-semver` supports: - -### `RELEASE_TYPES` - -- major -- premajor -- minor -- preminor -- patch -- prepatch -- prerelease - -``` -const semver = require('semver'); - -if (semver.RELEASE_TYPES.includes(arbitraryUserInput)) { - console.log('This is a valid release type!'); -} else { - console.warn('This is NOT a valid release type!'); -} -``` - -### `SEMVER_SPEC_VERSION` - -2.0.0 - -``` -const semver = require('semver'); - -console.log('We are currently using the semver specification version:', semver.SEMVER_SPEC_VERSION); -``` - -## Exported Modules - - - -You may pull in just the part of this semver utility that you need if you -are sensitive to packing and tree-shaking concerns. The main -`require('semver')` export uses getter functions to lazily load the parts -of the API that are used. - -The following modules are available: - -* `require('semver')` -* `require('semver/classes')` -* `require('semver/classes/comparator')` -* `require('semver/classes/range')` -* `require('semver/classes/semver')` -* `require('semver/functions/clean')` -* `require('semver/functions/cmp')` -* `require('semver/functions/coerce')` -* `require('semver/functions/compare')` -* `require('semver/functions/compare-build')` -* `require('semver/functions/compare-loose')` -* `require('semver/functions/diff')` -* `require('semver/functions/eq')` -* `require('semver/functions/gt')` -* `require('semver/functions/gte')` -* `require('semver/functions/inc')` -* `require('semver/functions/lt')` -* `require('semver/functions/lte')` -* `require('semver/functions/major')` -* `require('semver/functions/minor')` -* `require('semver/functions/neq')` -* `require('semver/functions/parse')` -* `require('semver/functions/patch')` -* `require('semver/functions/prerelease')` -* `require('semver/functions/rcompare')` -* `require('semver/functions/rsort')` -* `require('semver/functions/satisfies')` -* `require('semver/functions/sort')` -* `require('semver/functions/valid')` -* `require('semver/ranges/gtr')` -* `require('semver/ranges/intersects')` -* `require('semver/ranges/ltr')` -* `require('semver/ranges/max-satisfying')` -* `require('semver/ranges/min-satisfying')` -* `require('semver/ranges/min-version')` -* `require('semver/ranges/outside')` -* `require('semver/ranges/simplify')` -* `require('semver/ranges/subset')` -* `require('semver/ranges/to-comparators')` -* `require('semver/ranges/valid')` - diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/bin/semver.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/bin/semver.js deleted file mode 100755 index f62b566f..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/bin/semver.js +++ /dev/null @@ -1,188 +0,0 @@ -#!/usr/bin/env node -// Standalone semver comparison program. -// Exits successfully and prints matching version(s) if -// any supplied version is valid and passes all tests. - -const argv = process.argv.slice(2) - -let versions = [] - -const range = [] - -let inc = null - -const version = require('../package.json').version - -let loose = false - -let includePrerelease = false - -let coerce = false - -let rtl = false - -let identifier - -let identifierBase - -const semver = require('../') -const parseOptions = require('../internal/parse-options') - -let reverse = false - -let options = {} - -const main = () => { - if (!argv.length) { - return help() - } - while (argv.length) { - let a = argv.shift() - const indexOfEqualSign = a.indexOf('=') - if (indexOfEqualSign !== -1) { - const value = a.slice(indexOfEqualSign + 1) - a = a.slice(0, indexOfEqualSign) - argv.unshift(value) - } - switch (a) { - case '-rv': case '-rev': case '--rev': case '--reverse': - reverse = true - break - case '-l': case '--loose': - loose = true - break - case '-p': case '--include-prerelease': - includePrerelease = true - break - case '-v': case '--version': - versions.push(argv.shift()) - break - case '-i': case '--inc': case '--increment': - switch (argv[0]) { - case 'major': case 'minor': case 'patch': case 'prerelease': - case 'premajor': case 'preminor': case 'prepatch': - inc = argv.shift() - break - default: - inc = 'patch' - break - } - break - case '--preid': - identifier = argv.shift() - break - case '-r': case '--range': - range.push(argv.shift()) - break - case '-n': - identifierBase = argv.shift() - if (identifierBase === 'false') { - identifierBase = false - } - break - case '-c': case '--coerce': - coerce = true - break - case '--rtl': - rtl = true - break - case '--ltr': - rtl = false - break - case '-h': case '--help': case '-?': - return help() - default: - versions.push(a) - break - } - } - - options = parseOptions({ loose, includePrerelease, rtl }) - - versions = versions.map((v) => { - return coerce ? (semver.coerce(v, options) || { version: v }).version : v - }).filter((v) => { - return semver.valid(v) - }) - if (!versions.length) { - return fail() - } - if (inc && (versions.length !== 1 || range.length)) { - return failInc() - } - - for (let i = 0, l = range.length; i < l; i++) { - versions = versions.filter((v) => { - return semver.satisfies(v, range[i], options) - }) - if (!versions.length) { - return fail() - } - } - versions - .sort((a, b) => semver[reverse ? 'rcompare' : 'compare'](a, b, options)) - .map(v => semver.clean(v, options)) - .map(v => inc ? semver.inc(v, inc, options, identifier, identifierBase) : v) - .forEach(v => console.log(v)) -} - -const failInc = () => { - console.error('--inc can only be used on a single version with no range') - fail() -} - -const fail = () => process.exit(1) - -const help = () => console.log( -`SemVer ${version} - -A JavaScript implementation of the https://semver.org/ specification -Copyright Isaac Z. Schlueter - -Usage: semver [options] [ [...]] -Prints valid versions sorted by SemVer precedence - -Options: --r --range - Print versions that match the specified range. - --i --increment [] - Increment a version by the specified level. Level can - be one of: major, minor, patch, premajor, preminor, - prepatch, or prerelease. Default level is 'patch'. - Only one version may be specified. - ---preid - Identifier to be used to prefix premajor, preminor, - prepatch or prerelease version increments. - --l --loose - Interpret versions and ranges loosely - --p --include-prerelease - Always include prerelease versions in range matching - --c --coerce - Coerce a string into SemVer if possible - (does not imply --loose) - ---rtl - Coerce version strings right to left - ---ltr - Coerce version strings left to right (default) - --n - Base number to be used for the prerelease identifier. - Can be either 0 or 1, or false to omit the number altogether. - Defaults to 0. - -Program exits successfully if any valid version satisfies -all supplied ranges, and prints all satisfying versions. - -If no satisfying versions are found, then exits failure. - -Versions are printed in ascending order, so supplying -multiple versions to the utility will just sort them.`) - -main() diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/classes/comparator.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/classes/comparator.js deleted file mode 100644 index 3d39c0ee..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/classes/comparator.js +++ /dev/null @@ -1,141 +0,0 @@ -const ANY = Symbol('SemVer ANY') -// hoisted class for cyclic dependency -class Comparator { - static get ANY () { - return ANY - } - - constructor (comp, options) { - options = parseOptions(options) - - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } - } - - comp = comp.trim().split(/\s+/).join(' ') - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) - - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } - - debug('comp', this) - } - - parse (comp) { - const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] - const m = comp.match(r) - - if (!m) { - throw new TypeError(`Invalid comparator: ${comp}`) - } - - this.operator = m[1] !== undefined ? m[1] : '' - if (this.operator === '=') { - this.operator = '' - } - - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } - } - - toString () { - return this.value - } - - test (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY || version === ANY) { - return true - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } - } - - return cmp(version, this.operator, this.semver, this.options) - } - - intersects (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } - - if (this.operator === '') { - if (this.value === '') { - return true - } - return new Range(comp.value, options).test(this.value) - } else if (comp.operator === '') { - if (comp.value === '') { - return true - } - return new Range(this.value, options).test(comp.semver) - } - - options = parseOptions(options) - - // Special cases where nothing can possibly be lower - if (options.includePrerelease && - (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { - return false - } - if (!options.includePrerelease && - (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { - return false - } - - // Same direction increasing (> or >=) - if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { - return true - } - // Same direction decreasing (< or <=) - if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { - return true - } - // same SemVer and both sides are inclusive (<= or >=) - if ( - (this.semver.version === comp.semver.version) && - this.operator.includes('=') && comp.operator.includes('=')) { - return true - } - // opposite directions less than - if (cmp(this.semver, '<', comp.semver, options) && - this.operator.startsWith('>') && comp.operator.startsWith('<')) { - return true - } - // opposite directions greater than - if (cmp(this.semver, '>', comp.semver, options) && - this.operator.startsWith('<') && comp.operator.startsWith('>')) { - return true - } - return false - } -} - -module.exports = Comparator - -const parseOptions = require('../internal/parse-options') -const { safeRe: re, t } = require('../internal/re') -const cmp = require('../functions/cmp') -const debug = require('../internal/debug') -const SemVer = require('./semver') -const Range = require('./range') diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/classes/range.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/classes/range.js deleted file mode 100644 index ceee2314..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/classes/range.js +++ /dev/null @@ -1,554 +0,0 @@ -const SPACE_CHARACTERS = /\s+/g - -// hoisted class for cyclic dependency -class Range { - constructor (range, options) { - options = parseOptions(options) - - if (range instanceof Range) { - if ( - range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease - ) { - return range - } else { - return new Range(range.raw, options) - } - } - - if (range instanceof Comparator) { - // just put it in the set and return - this.raw = range.value - this.set = [[range]] - this.formatted = undefined - return this - } - - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First reduce all whitespace as much as possible so we do not have to rely - // on potentially slow regexes like \s*. This is then stored and used for - // future error messages as well. - this.raw = range.trim().replace(SPACE_CHARACTERS, ' ') - - // First, split on || - this.set = this.raw - .split('||') - // map the range to a 2d array of comparators - .map(r => this.parseRange(r.trim())) - // throw out any comparator lists that are empty - // this generally means that it was not a valid range, which is allowed - // in loose mode, but will still throw if the WHOLE range is invalid. - .filter(c => c.length) - - if (!this.set.length) { - throw new TypeError(`Invalid SemVer Range: ${this.raw}`) - } - - // if we have any that are not the null set, throw out null sets. - if (this.set.length > 1) { - // keep the first one, in case they're all null sets - const first = this.set[0] - this.set = this.set.filter(c => !isNullSet(c[0])) - if (this.set.length === 0) { - this.set = [first] - } else if (this.set.length > 1) { - // if we have any that are *, then the range is just * - for (const c of this.set) { - if (c.length === 1 && isAny(c[0])) { - this.set = [c] - break - } - } - } - } - - this.formatted = undefined - } - - get range () { - if (this.formatted === undefined) { - this.formatted = '' - for (let i = 0; i < this.set.length; i++) { - if (i > 0) { - this.formatted += '||' - } - const comps = this.set[i] - for (let k = 0; k < comps.length; k++) { - if (k > 0) { - this.formatted += ' ' - } - this.formatted += comps[k].toString().trim() - } - } - } - return this.formatted - } - - format () { - return this.range - } - - toString () { - return this.range - } - - parseRange (range) { - // memoize range parsing for performance. - // this is a very hot path, and fully deterministic. - const memoOpts = - (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | - (this.options.loose && FLAG_LOOSE) - const memoKey = memoOpts + ':' + range - const cached = cache.get(memoKey) - if (cached) { - return cached - } - - const loose = this.options.loose - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] - range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) - debug('hyphen replace', range) - - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range) - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[t.TILDETRIM], tildeTrimReplace) - debug('tilde trim', range) - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[t.CARETTRIM], caretTrimReplace) - debug('caret trim', range) - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - let rangeList = range - .split(' ') - .map(comp => parseComparator(comp, this.options)) - .join(' ') - .split(/\s+/) - // >=0.0.0 is equivalent to * - .map(comp => replaceGTE0(comp, this.options)) - - if (loose) { - // in loose mode, throw out any that are not valid comparators - rangeList = rangeList.filter(comp => { - debug('loose invalid filter', comp, this.options) - return !!comp.match(re[t.COMPARATORLOOSE]) - }) - } - debug('range list', rangeList) - - // if any comparators are the null set, then replace with JUST null set - // if more than one comparator, remove any * comparators - // also, don't include the same comparator more than once - const rangeMap = new Map() - const comparators = rangeList.map(comp => new Comparator(comp, this.options)) - for (const comp of comparators) { - if (isNullSet(comp)) { - return [comp] - } - rangeMap.set(comp.value, comp) - } - if (rangeMap.size > 1 && rangeMap.has('')) { - rangeMap.delete('') - } - - const result = [...rangeMap.values()] - cache.set(memoKey, result) - return result - } - - intersects (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - - return this.set.some((thisComparators) => { - return ( - isSatisfiable(thisComparators, options) && - range.set.some((rangeComparators) => { - return ( - isSatisfiable(rangeComparators, options) && - thisComparators.every((thisComparator) => { - return rangeComparators.every((rangeComparator) => { - return thisComparator.intersects(rangeComparator, options) - }) - }) - ) - }) - ) - }) - } - - // if ANY of the sets match ALL of its comparators, then pass - test (version) { - if (!version) { - return false - } - - if (typeof version === 'string') { - try { - version = new SemVer(version, this.options) - } catch (er) { - return false - } - } - - for (let i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } - } - return false - } -} - -module.exports = Range - -const LRU = require('../internal/lrucache') -const cache = new LRU() - -const parseOptions = require('../internal/parse-options') -const Comparator = require('./comparator') -const debug = require('../internal/debug') -const SemVer = require('./semver') -const { - safeRe: re, - t, - comparatorTrimReplace, - tildeTrimReplace, - caretTrimReplace, -} = require('../internal/re') -const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require('../internal/constants') - -const isNullSet = c => c.value === '<0.0.0-0' -const isAny = c => c.value === '' - -// take a set of comparators and determine whether there -// exists a version which can satisfy it -const isSatisfiable = (comparators, options) => { - let result = true - const remainingComparators = comparators.slice() - let testComparator = remainingComparators.pop() - - while (result && remainingComparators.length) { - result = remainingComparators.every((otherComparator) => { - return testComparator.intersects(otherComparator, options) - }) - - testComparator = remainingComparators.pop() - } - - return result -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -const parseComparator = (comp, options) => { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -const isX = id => !id || id.toLowerCase() === 'x' || id === '*' - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 -// ~0.0.1 --> >=0.0.1 <0.1.0-0 -const replaceTildes = (comp, options) => { - return comp - .trim() - .split(/\s+/) - .map((c) => replaceTilde(c, options)) - .join(' ') -} - -const replaceTilde = (comp, options) => { - const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] - return comp.replace(r, (_, M, m, p, pr) => { - debug('tilde', comp, _, M, m, p, pr) - let ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = `>=${M}.0.0 <${+M + 1}.0.0-0` - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0-0 - ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` - } else if (pr) { - debug('replaceTilde pr', pr) - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${+m + 1}.0-0` - } else { - // ~1.2.3 == >=1.2.3 <1.3.0-0 - ret = `>=${M}.${m}.${p - } <${M}.${+m + 1}.0-0` - } - - debug('tilde return', ret) - return ret - }) -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 -// ^1.2.3 --> >=1.2.3 <2.0.0-0 -// ^1.2.0 --> >=1.2.0 <2.0.0-0 -// ^0.0.1 --> >=0.0.1 <0.0.2-0 -// ^0.1.0 --> >=0.1.0 <0.2.0-0 -const replaceCarets = (comp, options) => { - return comp - .trim() - .split(/\s+/) - .map((c) => replaceCaret(c, options)) - .join(' ') -} - -const replaceCaret = (comp, options) => { - debug('caret', comp, options) - const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] - const z = options.includePrerelease ? '-0' : '' - return comp.replace(r, (_, M, m, p, pr) => { - debug('caret', comp, _, M, m, p, pr) - let ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` - } else if (isX(p)) { - if (M === '0') { - ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` - } else { - ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${m}.${+p + 1}-0` - } else { - ret = `>=${M}.${m}.${p}-${pr - } <${M}.${+m + 1}.0-0` - } - } else { - ret = `>=${M}.${m}.${p}-${pr - } <${+M + 1}.0.0-0` - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = `>=${M}.${m}.${p - }${z} <${M}.${m}.${+p + 1}-0` - } else { - ret = `>=${M}.${m}.${p - }${z} <${M}.${+m + 1}.0-0` - } - } else { - ret = `>=${M}.${m}.${p - } <${+M + 1}.0.0-0` - } - } - - debug('caret return', ret) - return ret - }) -} - -const replaceXRanges = (comp, options) => { - debug('replaceXRanges', comp, options) - return comp - .split(/\s+/) - .map((c) => replaceXRange(c, options)) - .join(' ') -} - -const replaceXRange = (comp, options) => { - comp = comp.trim() - const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] - return comp.replace(r, (ret, gtlt, M, m, p, pr) => { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - const xM = isX(M) - const xm = xM || isX(m) - const xp = xm || isX(p) - const anyX = xp - - if (gtlt === '=' && anyX) { - gtlt = '' - } - - // if we're including prereleases in the match, then we need - // to fix this to -0, the lowest possible prerelease value - pr = options.includePrerelease ? '-0' : '' - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0-0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } - - if (gtlt === '<') { - pr = '-0' - } - - ret = `${gtlt + M}.${m}.${p}${pr}` - } else if (xm) { - ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` - } else if (xp) { - ret = `>=${M}.${m}.0${pr - } <${M}.${+m + 1}.0-0` - } - - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -const replaceStars = (comp, options) => { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp - .trim() - .replace(re[t.STAR], '') -} - -const replaceGTE0 = (comp, options) => { - debug('replaceGTE0', comp, options) - return comp - .trim() - .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') -} - -// This function is passed to string.replace(re[t.HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 -// TODO build? -const hyphenReplace = incPr => ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr) => { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = `>=${fM}.0.0${incPr ? '-0' : ''}` - } else if (isX(fp)) { - from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` - } else if (fpr) { - from = `>=${from}` - } else { - from = `>=${from}${incPr ? '-0' : ''}` - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = `<${+tM + 1}.0.0-0` - } else if (isX(tp)) { - to = `<${tM}.${+tm + 1}.0-0` - } else if (tpr) { - to = `<=${tM}.${tm}.${tp}-${tpr}` - } else if (incPr) { - to = `<${tM}.${tm}.${+tp + 1}-0` - } else { - to = `<=${to}` - } - - return `${from} ${to}`.trim() -} - -const testSet = (set, version, options) => { - for (let i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } - } - - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (let i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === Comparator.ANY) { - continue - } - - if (set[i].semver.prerelease.length > 0) { - const allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } - - // Version has a -pre, but it's not one of the ones we like. - return false - } - - return true -} diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/classes/semver.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/classes/semver.js deleted file mode 100644 index 13e66ce4..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/classes/semver.js +++ /dev/null @@ -1,302 +0,0 @@ -const debug = require('../internal/debug') -const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') -const { safeRe: re, t } = require('../internal/re') - -const parseOptions = require('../internal/parse-options') -const { compareIdentifiers } = require('../internal/identifiers') -class SemVer { - constructor (version, options) { - options = parseOptions(options) - - if (version instanceof SemVer) { - if (version.loose === !!options.loose && - version.includePrerelease === !!options.includePrerelease) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) - } - - if (version.length > MAX_LENGTH) { - throw new TypeError( - `version is longer than ${MAX_LENGTH} characters` - ) - } - - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose - // this isn't actually relevant for versions, but keep it so that we - // don't run into trouble passing this.options around. - this.includePrerelease = !!options.includePrerelease - - const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) - - if (!m) { - throw new TypeError(`Invalid Version: ${version}`) - } - - this.raw = version - - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] - - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } - - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } - - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } - - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map((id) => { - if (/^[0-9]+$/.test(id)) { - const num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } - - this.build = m[5] ? m[5].split('.') : [] - this.format() - } - - format () { - this.version = `${this.major}.${this.minor}.${this.patch}` - if (this.prerelease.length) { - this.version += `-${this.prerelease.join('.')}` - } - return this.version - } - - toString () { - return this.version - } - - compare (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - if (typeof other === 'string' && other === this.version) { - return 0 - } - other = new SemVer(other, this.options) - } - - if (other.version === this.version) { - return 0 - } - - return this.compareMain(other) || this.comparePre(other) - } - - compareMain (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return ( - compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) - ) - } - - comparePre (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } - - let i = 0 - do { - const a = this.prerelease[i] - const b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) - } - - compareBuild (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - let i = 0 - do { - const a = this.build[i] - const b = other.build[i] - debug('build compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) - } - - // preminor will bump the version up to the next minor release, and immediately - // down to pre-release. premajor and prepatch work the same way. - inc (release, identifier, identifierBase) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier, identifierBase) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier, identifierBase) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier, identifierBase) - this.inc('pre', identifier, identifierBase) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier, identifierBase) - } - this.inc('pre', identifier, identifierBase) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if ( - this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0 - ) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. - case 'pre': { - const base = Number(identifierBase) ? 1 : 0 - - if (!identifier && identifierBase === false) { - throw new Error('invalid increment argument: identifier is empty') - } - - if (this.prerelease.length === 0) { - this.prerelease = [base] - } else { - let i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - if (identifier === this.prerelease.join('.') && identifierBase === false) { - throw new Error('invalid increment argument: identifier already exists') - } - this.prerelease.push(base) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - let prerelease = [identifier, base] - if (identifierBase === false) { - prerelease = [identifier] - } - if (compareIdentifiers(this.prerelease[0], identifier) === 0) { - if (isNaN(this.prerelease[1])) { - this.prerelease = prerelease - } - } else { - this.prerelease = prerelease - } - } - break - } - default: - throw new Error(`invalid increment argument: ${release}`) - } - this.raw = this.format() - if (this.build.length) { - this.raw += `+${this.build.join('.')}` - } - return this - } -} - -module.exports = SemVer diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/coerce.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/coerce.js deleted file mode 100644 index b378dcea..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/coerce.js +++ /dev/null @@ -1,60 +0,0 @@ -const SemVer = require('../classes/semver') -const parse = require('./parse') -const { safeRe: re, t } = require('../internal/re') - -const coerce = (version, options) => { - if (version instanceof SemVer) { - return version - } - - if (typeof version === 'number') { - version = String(version) - } - - if (typeof version !== 'string') { - return null - } - - options = options || {} - - let match = null - if (!options.rtl) { - match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) - } else { - // Find the right-most coercible string that does not share - // a terminus with a more left-ward coercible string. - // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' - // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' - // - // Walk through the string checking with a /g regexp - // Manually set the index so as to pick up overlapping matches. - // Stop when we get a match that ends at the string end, since no - // coercible string can be more right-ward without the same terminus. - const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] - let next - while ((next = coerceRtlRegex.exec(version)) && - (!match || match.index + match[0].length !== version.length) - ) { - if (!match || - next.index + next[0].length !== match.index + match[0].length) { - match = next - } - coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length - } - // leave it in a clean state - coerceRtlRegex.lastIndex = -1 - } - - if (match === null) { - return null - } - - const major = match[2] - const minor = match[3] || '0' - const patch = match[4] || '0' - const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' - const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' - - return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) -} -module.exports = coerce diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/diff.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/diff.js deleted file mode 100644 index fc224e30..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/diff.js +++ /dev/null @@ -1,65 +0,0 @@ -const parse = require('./parse.js') - -const diff = (version1, version2) => { - const v1 = parse(version1, null, true) - const v2 = parse(version2, null, true) - const comparison = v1.compare(v2) - - if (comparison === 0) { - return null - } - - const v1Higher = comparison > 0 - const highVersion = v1Higher ? v1 : v2 - const lowVersion = v1Higher ? v2 : v1 - const highHasPre = !!highVersion.prerelease.length - const lowHasPre = !!lowVersion.prerelease.length - - if (lowHasPre && !highHasPre) { - // Going from prerelease -> no prerelease requires some special casing - - // If the low version has only a major, then it will always be a major - // Some examples: - // 1.0.0-1 -> 1.0.0 - // 1.0.0-1 -> 1.1.1 - // 1.0.0-1 -> 2.0.0 - if (!lowVersion.patch && !lowVersion.minor) { - return 'major' - } - - // Otherwise it can be determined by checking the high version - - if (highVersion.patch) { - // anything higher than a patch bump would result in the wrong version - return 'patch' - } - - if (highVersion.minor) { - // anything higher than a minor bump would result in the wrong version - return 'minor' - } - - // bumping major/minor/patch all have same result - return 'major' - } - - // add the `pre` prefix if we are going to a prerelease version - const prefix = highHasPre ? 'pre' : '' - - if (v1.major !== v2.major) { - return prefix + 'major' - } - - if (v1.minor !== v2.minor) { - return prefix + 'minor' - } - - if (v1.patch !== v2.patch) { - return prefix + 'patch' - } - - // high and low are preleases - return 'prerelease' -} - -module.exports = diff diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/inc.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/inc.js deleted file mode 100644 index 7670b1be..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/inc.js +++ /dev/null @@ -1,19 +0,0 @@ -const SemVer = require('../classes/semver') - -const inc = (version, release, options, identifier, identifierBase) => { - if (typeof (options) === 'string') { - identifierBase = identifier - identifier = options - options = undefined - } - - try { - return new SemVer( - version instanceof SemVer ? version.version : version, - options - ).inc(release, identifier, identifierBase).version - } catch (er) { - return null - } -} -module.exports = inc diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/parse.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/parse.js deleted file mode 100644 index 459b3b17..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/functions/parse.js +++ /dev/null @@ -1,16 +0,0 @@ -const SemVer = require('../classes/semver') -const parse = (version, options, throwErrors = false) => { - if (version instanceof SemVer) { - return version - } - try { - return new SemVer(version, options) - } catch (er) { - if (!throwErrors) { - return null - } - throw er - } -} - -module.exports = parse diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/index.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/index.js deleted file mode 100644 index 86d42ac1..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/index.js +++ /dev/null @@ -1,89 +0,0 @@ -// just pre-load all the stuff that index.js lazily exports -const internalRe = require('./internal/re') -const constants = require('./internal/constants') -const SemVer = require('./classes/semver') -const identifiers = require('./internal/identifiers') -const parse = require('./functions/parse') -const valid = require('./functions/valid') -const clean = require('./functions/clean') -const inc = require('./functions/inc') -const diff = require('./functions/diff') -const major = require('./functions/major') -const minor = require('./functions/minor') -const patch = require('./functions/patch') -const prerelease = require('./functions/prerelease') -const compare = require('./functions/compare') -const rcompare = require('./functions/rcompare') -const compareLoose = require('./functions/compare-loose') -const compareBuild = require('./functions/compare-build') -const sort = require('./functions/sort') -const rsort = require('./functions/rsort') -const gt = require('./functions/gt') -const lt = require('./functions/lt') -const eq = require('./functions/eq') -const neq = require('./functions/neq') -const gte = require('./functions/gte') -const lte = require('./functions/lte') -const cmp = require('./functions/cmp') -const coerce = require('./functions/coerce') -const Comparator = require('./classes/comparator') -const Range = require('./classes/range') -const satisfies = require('./functions/satisfies') -const toComparators = require('./ranges/to-comparators') -const maxSatisfying = require('./ranges/max-satisfying') -const minSatisfying = require('./ranges/min-satisfying') -const minVersion = require('./ranges/min-version') -const validRange = require('./ranges/valid') -const outside = require('./ranges/outside') -const gtr = require('./ranges/gtr') -const ltr = require('./ranges/ltr') -const intersects = require('./ranges/intersects') -const simplifyRange = require('./ranges/simplify') -const subset = require('./ranges/subset') -module.exports = { - parse, - valid, - clean, - inc, - diff, - major, - minor, - patch, - prerelease, - compare, - rcompare, - compareLoose, - compareBuild, - sort, - rsort, - gt, - lt, - eq, - neq, - gte, - lte, - cmp, - coerce, - Comparator, - Range, - satisfies, - toComparators, - maxSatisfying, - minSatisfying, - minVersion, - validRange, - outside, - gtr, - ltr, - intersects, - simplifyRange, - subset, - SemVer, - re: internalRe.re, - src: internalRe.src, - tokens: internalRe.t, - SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, - RELEASE_TYPES: constants.RELEASE_TYPES, - compareIdentifiers: identifiers.compareIdentifiers, - rcompareIdentifiers: identifiers.rcompareIdentifiers, -} diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/internal/constants.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/internal/constants.js deleted file mode 100644 index 94be1c57..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/internal/constants.js +++ /dev/null @@ -1,35 +0,0 @@ -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -const SEMVER_SPEC_VERSION = '2.0.0' - -const MAX_LENGTH = 256 -const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || -/* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -const MAX_SAFE_COMPONENT_LENGTH = 16 - -// Max safe length for a build identifier. The max length minus 6 characters for -// the shortest version with a build 0.0.0+BUILD. -const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 - -const RELEASE_TYPES = [ - 'major', - 'premajor', - 'minor', - 'preminor', - 'patch', - 'prepatch', - 'prerelease', -] - -module.exports = { - MAX_LENGTH, - MAX_SAFE_COMPONENT_LENGTH, - MAX_SAFE_BUILD_LENGTH, - MAX_SAFE_INTEGER, - RELEASE_TYPES, - SEMVER_SPEC_VERSION, - FLAG_INCLUDE_PRERELEASE: 0b001, - FLAG_LOOSE: 0b010, -} diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/internal/parse-options.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/internal/parse-options.js deleted file mode 100644 index 10d64ce0..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/internal/parse-options.js +++ /dev/null @@ -1,15 +0,0 @@ -// parse out just the options we care about -const looseOption = Object.freeze({ loose: true }) -const emptyOpts = Object.freeze({ }) -const parseOptions = options => { - if (!options) { - return emptyOpts - } - - if (typeof options !== 'object') { - return looseOption - } - - return options -} -module.exports = parseOptions diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/internal/re.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/internal/re.js deleted file mode 100644 index fd8920e7..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/internal/re.js +++ /dev/null @@ -1,217 +0,0 @@ -const { - MAX_SAFE_COMPONENT_LENGTH, - MAX_SAFE_BUILD_LENGTH, - MAX_LENGTH, -} = require('./constants') -const debug = require('./debug') -exports = module.exports = {} - -// The actual regexps go on exports.re -const re = exports.re = [] -const safeRe = exports.safeRe = [] -const src = exports.src = [] -const t = exports.t = {} -let R = 0 - -const LETTERDASHNUMBER = '[a-zA-Z0-9-]' - -// Replace some greedy regex tokens to prevent regex dos issues. These regex are -// used internally via the safeRe object since all inputs in this library get -// normalized first to trim and collapse all extra whitespace. The original -// regexes are exported for userland consumption and lower level usage. A -// future breaking change could export the safer regex only with a note that -// all input should have extra whitespace removed. -const safeRegexReplacements = [ - ['\\s', 1], - ['\\d', MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], -] - -const makeSafeRegex = (value) => { - for (const [token, max] of safeRegexReplacements) { - value = value - .split(`${token}*`).join(`${token}{0,${max}}`) - .split(`${token}+`).join(`${token}{1,${max}}`) - } - return value -} - -const createToken = (name, value, isGlobal) => { - const safe = makeSafeRegex(value) - const index = R++ - debug(name, index, value) - t[name] = index - src[index] = value - re[index] = new RegExp(value, isGlobal ? 'g' : undefined) - safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) -} - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') -createToken('NUMERICIDENTIFIERLOOSE', '\\d+') - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) - -// ## Main Version -// Three dot-separated numeric identifiers. - -createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + - `(${src[t.NUMERICIDENTIFIER]})\\.` + - `(${src[t.NUMERICIDENTIFIER]})`) - -createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + - `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + - `(${src[t.NUMERICIDENTIFIERLOOSE]})`) - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] -}|${src[t.NONNUMERICIDENTIFIER]})`) - -createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] -}|${src[t.NONNUMERICIDENTIFIER]})`) - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] -}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) - -createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] -}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] -}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -createToken('FULLPLAIN', `v?${src[t.MAINVERSION] -}${src[t.PRERELEASE]}?${ - src[t.BUILD]}?`) - -createToken('FULL', `^${src[t.FULLPLAIN]}$`) - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] -}${src[t.PRERELEASELOOSE]}?${ - src[t.BUILD]}?`) - -createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) - -createToken('GTLT', '((?:<|>)?=?)') - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) -createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) - -createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + - `(?:${src[t.PRERELEASE]})?${ - src[t.BUILD]}?` + - `)?)?`) - -createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + - `(?:${src[t.PRERELEASELOOSE]})?${ - src[t.BUILD]}?` + - `)?)?`) - -createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) -createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) - -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -createToken('COERCEPLAIN', `${'(^|[^\\d])' + - '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + - `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + - `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) -createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) -createToken('COERCEFULL', src[t.COERCEPLAIN] + - `(?:${src[t.PRERELEASE]})?` + - `(?:${src[t.BUILD]})?` + - `(?:$|[^\\d])`) -createToken('COERCERTL', src[t.COERCE], true) -createToken('COERCERTLFULL', src[t.COERCEFULL], true) - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -createToken('LONETILDE', '(?:~>?)') - -createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) -exports.tildeTrimReplace = '$1~' - -createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) -createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -createToken('LONECARET', '(?:\\^)') - -createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) -exports.caretTrimReplace = '$1^' - -createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) -createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) -createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] -}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) -exports.comparatorTrimReplace = '$1$2$3' - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + - `\\s+-\\s+` + - `(${src[t.XRANGEPLAIN]})` + - `\\s*$`) - -createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + - `\\s+-\\s+` + - `(${src[t.XRANGEPLAINLOOSE]})` + - `\\s*$`) - -// Star ranges basically just allow anything at all. -createToken('STAR', '(<|>)?=?\\s*\\*') -// >=0.0.0 is like a star -createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') -createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/package.json b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/package.json deleted file mode 100644 index 663d3701..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/package.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "name": "semver", - "version": "7.6.3", - "description": "The semantic version parser used by npm.", - "main": "index.js", - "scripts": { - "test": "tap", - "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "benchmark": "^2.1.4", - "tap": "^16.0.0" - }, - "license": "ISC", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/node-semver.git" - }, - "bin": { - "semver": "bin/semver.js" - }, - "files": [ - "bin/", - "lib/", - "classes/", - "functions/", - "internal/", - "ranges/", - "index.js", - "preload.js", - "range.bnf" - ], - "tap": { - "timeout": 30, - "coverage-map": "map.js", - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "engines": { - "node": ">=10" - }, - "author": "GitHub Inc.", - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", - "engines": ">=10", - "distPaths": [ - "classes/", - "functions/", - "internal/", - "ranges/", - "index.js", - "preload.js", - "range.bnf" - ], - "allowPaths": [ - "/classes/", - "/functions/", - "/internal/", - "/ranges/", - "/index.js", - "/preload.js", - "/range.bnf", - "/benchmarks" - ], - "publish": "true" - } -} diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/ranges/intersects.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/ranges/intersects.js deleted file mode 100644 index e0e9b7ce..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/ranges/intersects.js +++ /dev/null @@ -1,7 +0,0 @@ -const Range = require('../classes/range') -const intersects = (r1, r2, options) => { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2, options) -} -module.exports = intersects diff --git a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/ranges/subset.js b/node_modules/@mapbox/node-pre-gyp/node_modules/semver/ranges/subset.js deleted file mode 100644 index 1e5c2683..00000000 --- a/node_modules/@mapbox/node-pre-gyp/node_modules/semver/ranges/subset.js +++ /dev/null @@ -1,247 +0,0 @@ -const Range = require('../classes/range.js') -const Comparator = require('../classes/comparator.js') -const { ANY } = Comparator -const satisfies = require('../functions/satisfies.js') -const compare = require('../functions/compare.js') - -// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: -// - Every simple range `r1, r2, ...` is a null set, OR -// - Every simple range `r1, r2, ...` which is not a null set is a subset of -// some `R1, R2, ...` -// -// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: -// - If c is only the ANY comparator -// - If C is only the ANY comparator, return true -// - Else if in prerelease mode, return false -// - else replace c with `[>=0.0.0]` -// - If C is only the ANY comparator -// - if in prerelease mode, return true -// - else replace C with `[>=0.0.0]` -// - Let EQ be the set of = comparators in c -// - If EQ is more than one, return true (null set) -// - Let GT be the highest > or >= comparator in c -// - Let LT be the lowest < or <= comparator in c -// - If GT and LT, and GT.semver > LT.semver, return true (null set) -// - If any C is a = range, and GT or LT are set, return false -// - If EQ -// - If GT, and EQ does not satisfy GT, return true (null set) -// - If LT, and EQ does not satisfy LT, return true (null set) -// - If EQ satisfies every C, return true -// - Else return false -// - If GT -// - If GT.semver is lower than any > or >= comp in C, return false -// - If GT is >=, and GT.semver does not satisfy every C, return false -// - If GT.semver has a prerelease, and not in prerelease mode -// - If no C has a prerelease and the GT.semver tuple, return false -// - If LT -// - If LT.semver is greater than any < or <= comp in C, return false -// - If LT is <=, and LT.semver does not satisfy every C, return false -// - If GT.semver has a prerelease, and not in prerelease mode -// - If no C has a prerelease and the LT.semver tuple, return false -// - Else return true - -const subset = (sub, dom, options = {}) => { - if (sub === dom) { - return true - } - - sub = new Range(sub, options) - dom = new Range(dom, options) - let sawNonNull = false - - OUTER: for (const simpleSub of sub.set) { - for (const simpleDom of dom.set) { - const isSub = simpleSubset(simpleSub, simpleDom, options) - sawNonNull = sawNonNull || isSub !== null - if (isSub) { - continue OUTER - } - } - // the null set is a subset of everything, but null simple ranges in - // a complex range should be ignored. so if we saw a non-null range, - // then we know this isn't a subset, but if EVERY simple range was null, - // then it is a subset. - if (sawNonNull) { - return false - } - } - return true -} - -const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] -const minimumVersion = [new Comparator('>=0.0.0')] - -const simpleSubset = (sub, dom, options) => { - if (sub === dom) { - return true - } - - if (sub.length === 1 && sub[0].semver === ANY) { - if (dom.length === 1 && dom[0].semver === ANY) { - return true - } else if (options.includePrerelease) { - sub = minimumVersionWithPreRelease - } else { - sub = minimumVersion - } - } - - if (dom.length === 1 && dom[0].semver === ANY) { - if (options.includePrerelease) { - return true - } else { - dom = minimumVersion - } - } - - const eqSet = new Set() - let gt, lt - for (const c of sub) { - if (c.operator === '>' || c.operator === '>=') { - gt = higherGT(gt, c, options) - } else if (c.operator === '<' || c.operator === '<=') { - lt = lowerLT(lt, c, options) - } else { - eqSet.add(c.semver) - } - } - - if (eqSet.size > 1) { - return null - } - - let gtltComp - if (gt && lt) { - gtltComp = compare(gt.semver, lt.semver, options) - if (gtltComp > 0) { - return null - } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { - return null - } - } - - // will iterate one or zero times - for (const eq of eqSet) { - if (gt && !satisfies(eq, String(gt), options)) { - return null - } - - if (lt && !satisfies(eq, String(lt), options)) { - return null - } - - for (const c of dom) { - if (!satisfies(eq, String(c), options)) { - return false - } - } - - return true - } - - let higher, lower - let hasDomLT, hasDomGT - // if the subset has a prerelease, we need a comparator in the superset - // with the same tuple and a prerelease, or it's not a subset - let needDomLTPre = lt && - !options.includePrerelease && - lt.semver.prerelease.length ? lt.semver : false - let needDomGTPre = gt && - !options.includePrerelease && - gt.semver.prerelease.length ? gt.semver : false - // exception: <1.2.3-0 is the same as <1.2.3 - if (needDomLTPre && needDomLTPre.prerelease.length === 1 && - lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { - needDomLTPre = false - } - - for (const c of dom) { - hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' - hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' - if (gt) { - if (needDomGTPre) { - if (c.semver.prerelease && c.semver.prerelease.length && - c.semver.major === needDomGTPre.major && - c.semver.minor === needDomGTPre.minor && - c.semver.patch === needDomGTPre.patch) { - needDomGTPre = false - } - } - if (c.operator === '>' || c.operator === '>=') { - higher = higherGT(gt, c, options) - if (higher === c && higher !== gt) { - return false - } - } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { - return false - } - } - if (lt) { - if (needDomLTPre) { - if (c.semver.prerelease && c.semver.prerelease.length && - c.semver.major === needDomLTPre.major && - c.semver.minor === needDomLTPre.minor && - c.semver.patch === needDomLTPre.patch) { - needDomLTPre = false - } - } - if (c.operator === '<' || c.operator === '<=') { - lower = lowerLT(lt, c, options) - if (lower === c && lower !== lt) { - return false - } - } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { - return false - } - } - if (!c.operator && (lt || gt) && gtltComp !== 0) { - return false - } - } - - // if there was a < or >, and nothing in the dom, then must be false - // UNLESS it was limited by another range in the other direction. - // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 - if (gt && hasDomLT && !lt && gtltComp !== 0) { - return false - } - - if (lt && hasDomGT && !gt && gtltComp !== 0) { - return false - } - - // we needed a prerelease range in a specific tuple, but didn't get one - // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, - // because it includes prereleases in the 1.2.3 tuple - if (needDomGTPre || needDomLTPre) { - return false - } - - return true -} - -// >=1.2.3 is lower than >1.2.3 -const higherGT = (a, b, options) => { - if (!a) { - return b - } - const comp = compare(a.semver, b.semver, options) - return comp > 0 ? a - : comp < 0 ? b - : b.operator === '>' && a.operator === '>=' ? b - : a -} - -// <=1.2.3 is higher than <1.2.3 -const lowerLT = (a, b, options) => { - if (!a) { - return b - } - const comp = compare(a.semver, b.semver, options) - return comp < 0 ? a - : comp > 0 ? b - : b.operator === '<' && a.operator === '<=' ? b - : a -} - -module.exports = subset diff --git a/node_modules/@mapbox/node-pre-gyp/package.json b/node_modules/@mapbox/node-pre-gyp/package.json deleted file mode 100644 index 5e1d6fd5..00000000 --- a/node_modules/@mapbox/node-pre-gyp/package.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "name": "@mapbox/node-pre-gyp", - "description": "Node.js native addon binary install tool", - "version": "1.0.11", - "keywords": [ - "native", - "addon", - "module", - "c", - "c++", - "bindings", - "binary" - ], - "license": "BSD-3-Clause", - "author": "Dane Springmeyer ", - "repository": { - "type": "git", - "url": "git://github.com/mapbox/node-pre-gyp.git" - }, - "bin": "./bin/node-pre-gyp", - "main": "./lib/node-pre-gyp.js", - "dependencies": { - "detect-libc": "^2.0.0", - "https-proxy-agent": "^5.0.0", - "make-dir": "^3.1.0", - "node-fetch": "^2.6.7", - "nopt": "^5.0.0", - "npmlog": "^5.0.1", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.11" - }, - "devDependencies": { - "@mapbox/cloudfriend": "^5.1.0", - "@mapbox/eslint-config-mapbox": "^3.0.0", - "aws-sdk": "^2.1087.0", - "codecov": "^3.8.3", - "eslint": "^7.32.0", - "eslint-plugin-node": "^11.1.0", - "mock-aws-s3": "^4.0.2", - "nock": "^12.0.3", - "node-addon-api": "^4.3.0", - "nyc": "^15.1.0", - "tape": "^5.5.2", - "tar-fs": "^2.1.1" - }, - "nyc": { - "all": true, - "skip-full": false, - "exclude": [ - "test/**" - ] - }, - "scripts": { - "coverage": "nyc --all --include index.js --include lib/ npm test", - "upload-coverage": "nyc report --reporter json && codecov --clear --flags=unit --file=./coverage/coverage-final.json", - "lint": "eslint bin/node-pre-gyp lib/*js lib/util/*js test/*js scripts/*js", - "fix": "npm run lint -- --fix", - "update-crosswalk": "node scripts/abi_crosswalk.js", - "test": "tape test/*test.js" - } -} diff --git a/node_modules/bcrypt/.github/workflows/ci.yaml b/node_modules/bcrypt/.github/workflows/ci.yaml deleted file mode 100644 index dc3f12f4..00000000 --- a/node_modules/bcrypt/.github/workflows/ci.yaml +++ /dev/null @@ -1,59 +0,0 @@ -name: ci - -on: - push: - branches: - - master - pull_request: - branches: - - master - -jobs: - build: - strategy: - matrix: - os: [ubuntu-20.04, macos-11.0, windows-2019] - nodeVersion: [14, 16, 18, 20] - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v3 - - name: Use Node.js ${{ matrix.nodeVersion }} - uses: actions/setup-node@v3 - with: - node-version: ${{ matrix.nodeVersion }} - - name: Test - run: npm test - - name: Package - if: startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master') - run: npx node-pre-gyp package - - name: Upload - uses: actions/upload-artifact@v3 - if: matrix.nodeVersion == '14' && (startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master')) - with: - name: bcrypt-lib-${{ matrix.os }}-${{ matrix.nodeVersion }} - path: build/stage/**/bcrypt_lib*.tar.gz - - build-alpine: - runs-on: ubuntu-latest - strategy: - matrix: - nodeVersion: [14, 16, 18, 20] - container: - image: node:${{ matrix.nodeVersion }}-alpine - steps: - - uses: actions/checkout@v3 - - name: Install dependencies - run: | - apk add make g++ python3 - - name: Test - run: | - npm test --unsafe-perm - - name: Package - if: startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master') - run: npx node-pre-gyp package --unsafe-perm - - name: Upload - if: matrix.nodeVersion == '14' && (startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master')) - uses: actions/upload-artifact@v3 - with: - name: bcrypt-lib-alpine-${{ matrix.nodeVersion }} - path: build/stage/**/bcrypt_lib*.tar.gz diff --git a/node_modules/bcrypt/CHANGELOG.md b/node_modules/bcrypt/CHANGELOG.md deleted file mode 100644 index f2fcb471..00000000 --- a/node_modules/bcrypt/CHANGELOG.md +++ /dev/null @@ -1,178 +0,0 @@ -# 5.1.0 (2022-10-06) - * Update `node-pre-gyp` to 1.0.11 - -# 5.1.0 (2022-10-06) - * Update `node-pre-gyp` to 1.0.10 - * Replace `nodeunit` with `jest` as the testing library - -# 5.0.1 (2021-02-22) - - * Update `node-pre-gyp` to 1.0.0 - -# 5.0.0 (2020-06-02) - - * Fix the bcrypt "wrap-around" bug. It affects passwords with lengths >= 255. - It is uncommon but it's a bug nevertheless. Previous attempts to fix the bug - was unsuccessful. - * Experimental support for z/OS - * Fix a bug related to NUL in password input - * Update `node-pre-gyp` to 0.15.0 - -# 4.0.1 (2020-02-27) - - * Fix compilation errors in Alpine linux - -# 4.0.0 (2020-02-17) - - * Switch to NAPI bcrypt - * Drop support for NodeJS 8 - -# 3.0.8 (2019-12-31) - - * Update `node-pre-gyp` to 0.14 - * Pre-built binaries for NodeJS 13 - -# 3.0.7 (2019-10-18) - - * Update `nan` to 2.14.0 - * Update `node-pre-gyp` to 0.13 - -# 3.0.6 (2019-04-11) - - * Update `nan` to 2.13.2 - -# 3.0.5 (2019-03-19) - - * Update `nan` to 2.13.1 - * NodeJS 12 compatibility - * Remove `node-pre-gyp` from bundled dependencies - -# 3.0.4-napi (2019-03-08) - - * Sync N-API bcrypt with NAN bcrypt - -# 3.0.4 (2019-02-07) - - * Fix GCC, NAN and V8 deprecation warnings - -# 3.0.3 (2018-12-19) - - * Update `nan` to 2.12.1 - -# 3.0.2 (2018-10-18) - - * Update `nan` to 2.11.1 - -# 3.0.1 (2018-09-20) - - * Update `nan` to 2.11.0 - -# 3.0.0 (2018-07-06) - - * Drop support for NodeJS <= 4 - -# 2.0.1 (2018-04-20) - - * Update `node-pre-gyp` to allow downloading prebuilt modules - -# 2.0.0 (2018-04-07) - - * Make `2b` the default bcrypt version - -# 1.1.0-napi (2018-01-21) - - * Initial support for [N-API](https://nodejs.org/api/n-api.html) - -# 1.0.3 (2016-08-23) - - * update to nan v2.6.2 for NodeJS 8 support - * Fix: use npm scripts instead of node-gyp directly. - -# 1.0.2 (2016-12-31) - - * Fix `compare` promise rejection with invalid arguments - -# 1.0.1 (2016-12-07) - - * Fix destructuring imports with promises - -# 1.0.0 (2016-12-04) - - * add Promise support (commit 2488473) - -# 0.8.7 (2016-06-09) - - * update nan to 2.3.5 for improved node v6 support - -# 0.8.6 (2016-04-20) - - * update nan for node v6 support - -# 0.8.5 (2015-08-12) - - * update to nan v2 (adds support for iojs 3) - -# 0.8.4 (2015-07-24) - - * fix deprecation warning for the Encode API - -# 0.8.3 (2015-05-06) - - * update nan to 1.8.4 for iojs 2.x support - -# 0.8.2 (2015-03-28) - - * always use callback for generating random bytes to avoid blocking - -# 0.8.1 (2015-01-18) - * update NaN to 1.5.0 for iojs support - -# 0.8.0 (2014-08-03) - * migrate to NAN for bindings - -# v0.5.0 - * Fix for issue around empty string params throwing Errors. - * Method deprecation. - * Upgrade from libeio/ev to libuv. (shtylman) - ** --- NOTE --- Breaks 0.4.x compatability - * EV_MULTIPLICITY compile flag. - -# v0.4.1 - * Thread safety fix around OpenSSL (GH-32). (bnoordhuis - through node) - * C++ code changes using delete and new instead of malloc and free. (shtylman) - * Compile options for speed, zoom. (shtylman) - * Move much of the type and variable checking to the JS. (shtylman) - -# v0.4.0 - * Added getRounds function that will tell you the number of rounds within a hash/salt - -# v0.3.2 - * Fix api issue with async salt gen first param - -# v0.3.1 - * Compile under node 0.5.x - -# v0.3.0 - * Internal Refactoring - * Remove pthread dependencies and locking - * Fix compiler warnings and a memory bug - -# v0.2.4 - * Use threadsafe functions instead of pthread mutexes - * salt validation to make sure the salt is of the correct size and format - -# v0.2.3 - * cygwin support - -# v0.2.2 - * Remove dependency on libbsd, use libssl instead - -# v0.2.0 - * Added async functionality - * API changes - * hashpw -> encrypt - * all old sync methods now end with _sync - * Removed libbsd(arc4random) dependency...now uses openssl which is more widely spread - -# v0.1.2 - * Security fix. Wasn't reading rounds in properly and was always only using 4 rounds diff --git a/node_modules/bcrypt/binding.gyp b/node_modules/bcrypt/binding.gyp deleted file mode 100644 index 181dca0f..00000000 --- a/node_modules/bcrypt/binding.gyp +++ /dev/null @@ -1,61 +0,0 @@ -{ - "variables": { - "NODE_VERSION%":" { - const start = Date.now(); - - // genSalt - const salt = await bcrypt.genSalt(10) - console.log('salt: ' + salt); - console.log('salt cb end: ' + (Date.now() - start) + 'ms'); - - // hash - const crypted = await bcrypt.hash('test', salt) - console.log('crypted: ' + crypted); - console.log('crypted cb end: ' + (Date.now() - start) + 'ms'); - console.log('rounds used from hash:', bcrypt.getRounds(crypted)); - - // compare - const res = await bcrypt.compare('test', crypted) - console.log('compared true: ' + res); - console.log('compared true cb end: ' + (Date.now() - start) + 'ms'); - - // compare - const res = await bcrypt.compare('bacon', crypted) - console.log('compared false: ' + res); - console.log('compared false cb end: ' + (Date.now() - start) + 'ms'); - - console.log('end: ' + (Date.now() - start) + 'ms'); -})(); diff --git a/node_modules/bcrypt/lib/binding/napi-v3/bcrypt_lib.node b/node_modules/bcrypt/lib/binding/napi-v3/bcrypt_lib.node deleted file mode 100755 index ba87d282..00000000 Binary files a/node_modules/bcrypt/lib/binding/napi-v3/bcrypt_lib.node and /dev/null differ diff --git a/node_modules/bcrypt/package.json b/node_modules/bcrypt/package.json deleted file mode 100644 index 621fc1be..00000000 --- a/node_modules/bcrypt/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "bcrypt", - "description": "A bcrypt library for NodeJS.", - "keywords": [ - "bcrypt", - "password", - "auth", - "authentication", - "encryption", - "crypt", - "crypto" - ], - "main": "./bcrypt", - "version": "5.1.1", - "author": "Nick Campbell (https://github.com/ncb000gt)", - "engines": { - "node": ">= 10.0.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/kelektiv/node.bcrypt.js.git" - }, - "license": "MIT", - "bugs": { - "url": "https://github.com/kelektiv/node.bcrypt.js/issues" - }, - "scripts": { - "test": "npm ci --build-from-source && jest", - "install": "node-pre-gyp install --fallback-to-build" - }, - "dependencies": { - "@mapbox/node-pre-gyp": "^1.0.11", - "node-addon-api": "^5.0.0" - }, - "devDependencies": { - "jest": "^29.6.2" - }, - "contributors": [ - "Antonio Salazar Cardozo (https://github.com/Shadowfiend)", - "Van Nguyen (https://github.com/thegoleffect)", - "David Trejo (https://github.com/dtrejo)", - "Ben Glow (https://github.com/pixelglow)", - "NewITFarmer.com <> (https://github.com/newitfarmer)", - "Alfred Westerveld (https://github.com/alfredwesterveld)", - "Vincent Côté-Roy (https://github.com/vincentcr)", - "Lloyd Hilaiel (https://github.com/lloyd)", - "Roman Shtylman (https://github.com/shtylman)", - "Vadim Graboys (https://github.com/vadimg)", - "Ben Noorduis <> (https://github.com/bnoordhuis)", - "Nate Rajlich (https://github.com/tootallnate)", - "Sean McArthur (https://github.com/seanmonstar)", - "Fanie Oosthuysen (https://github.com/weareu)", - "Amitosh Swain Mahapatra (https://github.com/Agathver)", - "Corbin Crutchley (https://github.com/crutchcorn)", - "Nicola Del Gobbo (https://github.com/NickNaso)" - ], - "binary": { - "module_name": "bcrypt_lib", - "module_path": "./lib/binding/napi-v{napi_build_version}", - "package_name": "{module_name}-v{version}-napi-v{napi_build_version}-{platform}-{arch}-{libc}.tar.gz", - "host": "https://github.com", - "remote_path": "kelektiv/node.bcrypt.js/releases/download/v{version}", - "napi_versions": [ - 3 - ] - } -} diff --git a/node_modules/detect-libc/README.md b/node_modules/detect-libc/README.md deleted file mode 100644 index 23212fdd..00000000 --- a/node_modules/detect-libc/README.md +++ /dev/null @@ -1,163 +0,0 @@ -# detect-libc - -Node.js module to detect details of the C standard library (libc) -implementation provided by a given Linux system. - -Currently supports detection of GNU glibc and MUSL libc. - -Provides asychronous and synchronous functions for the -family (e.g. `glibc`, `musl`) and version (e.g. `1.23`, `1.2.3`). - -The version numbers of libc implementations -are not guaranteed to be semver-compliant. - -For previous v1.x releases, please see the -[v1](https://github.com/lovell/detect-libc/tree/v1) branch. - -## Install - -```sh -npm install detect-libc -``` - -## API - -### GLIBC - -```ts -const GLIBC: string = 'glibc'; -``` - -A String constant containing the value `glibc`. - -### MUSL - -```ts -const MUSL: string = 'musl'; -``` - -A String constant containing the value `musl`. - -### family - -```ts -function family(): Promise; -``` - -Resolves asychronously with: - -* `glibc` or `musl` when the libc family can be determined -* `null` when the libc family cannot be determined -* `null` when run on a non-Linux platform - -```js -const { family, GLIBC, MUSL } = require('detect-libc'); - -switch (await family()) { - case GLIBC: ... - case MUSL: ... - case null: ... -} -``` - -### familySync - -```ts -function familySync(): string | null; -``` - -Synchronous version of `family()`. - -```js -const { familySync, GLIBC, MUSL } = require('detect-libc'); - -switch (familySync()) { - case GLIBC: ... - case MUSL: ... - case null: ... -} -``` - -### version - -```ts -function version(): Promise; -``` - -Resolves asychronously with: - -* The version when it can be determined -* `null` when the libc family cannot be determined -* `null` when run on a non-Linux platform - -```js -const { version } = require('detect-libc'); - -const v = await version(); -if (v) { - const [major, minor, patch] = v.split('.'); -} -``` - -### versionSync - -```ts -function versionSync(): string | null; -``` - -Synchronous version of `version()`. - -```js -const { versionSync } = require('detect-libc'); - -const v = versionSync(); -if (v) { - const [major, minor, patch] = v.split('.'); -} -``` - -### isNonGlibcLinux - -```ts -function isNonGlibcLinux(): Promise; -``` - -Resolves asychronously with: - -* `false` when the libc family is `glibc` -* `true` when the libc family is not `glibc` -* `false` when run on a non-Linux platform - -```js -const { isNonGlibcLinux } = require('detect-libc'); - -if (await isNonGlibcLinux()) { ... } -``` - -### isNonGlibcLinuxSync - -```ts -function isNonGlibcLinuxSync(): boolean; -``` - -Synchronous version of `isNonGlibcLinux()`. - -```js -const { isNonGlibcLinuxSync } = require('detect-libc'); - -if (isNonGlibcLinuxSync()) { ... } -``` - -## Licensing - -Copyright 2017 Lovell Fuller and others. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html) - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/node_modules/detect-libc/index.d.ts b/node_modules/detect-libc/index.d.ts deleted file mode 100644 index 4c0fb2b0..00000000 --- a/node_modules/detect-libc/index.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2017 Lovell Fuller and others. -// SPDX-License-Identifier: Apache-2.0 - -export const GLIBC: 'glibc'; -export const MUSL: 'musl'; - -export function family(): Promise; -export function familySync(): string | null; - -export function isNonGlibcLinux(): Promise; -export function isNonGlibcLinuxSync(): boolean; - -export function version(): Promise; -export function versionSync(): string | null; diff --git a/node_modules/detect-libc/lib/detect-libc.js b/node_modules/detect-libc/lib/detect-libc.js deleted file mode 100644 index fe499870..00000000 --- a/node_modules/detect-libc/lib/detect-libc.js +++ /dev/null @@ -1,267 +0,0 @@ -// Copyright 2017 Lovell Fuller and others. -// SPDX-License-Identifier: Apache-2.0 - -'use strict'; - -const childProcess = require('child_process'); -const { isLinux, getReport } = require('./process'); -const { LDD_PATH, readFile, readFileSync } = require('./filesystem'); - -let cachedFamilyFilesystem; -let cachedVersionFilesystem; - -const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true'; -let commandOut = ''; - -const safeCommand = () => { - if (!commandOut) { - return new Promise((resolve) => { - childProcess.exec(command, (err, out) => { - commandOut = err ? ' ' : out; - resolve(commandOut); - }); - }); - } - return commandOut; -}; - -const safeCommandSync = () => { - if (!commandOut) { - try { - commandOut = childProcess.execSync(command, { encoding: 'utf8' }); - } catch (_err) { - commandOut = ' '; - } - } - return commandOut; -}; - -/** - * A String constant containing the value `glibc`. - * @type {string} - * @public - */ -const GLIBC = 'glibc'; - -/** - * A Regexp constant to get the GLIBC Version. - * @type {string} - */ -const RE_GLIBC_VERSION = /LIBC[a-z0-9 \-).]*?(\d+\.\d+)/i; - -/** - * A String constant containing the value `musl`. - * @type {string} - * @public - */ -const MUSL = 'musl'; - -const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-'); - -const familyFromReport = () => { - const report = getReport(); - if (report.header && report.header.glibcVersionRuntime) { - return GLIBC; - } - if (Array.isArray(report.sharedObjects)) { - if (report.sharedObjects.some(isFileMusl)) { - return MUSL; - } - } - return null; -}; - -const familyFromCommand = (out) => { - const [getconf, ldd1] = out.split(/[\r\n]+/); - if (getconf && getconf.includes(GLIBC)) { - return GLIBC; - } - if (ldd1 && ldd1.includes(MUSL)) { - return MUSL; - } - return null; -}; - -const getFamilyFromLddContent = (content) => { - if (content.includes('musl')) { - return MUSL; - } - if (content.includes('GNU C Library')) { - return GLIBC; - } - return null; -}; - -const familyFromFilesystem = async () => { - if (cachedFamilyFilesystem !== undefined) { - return cachedFamilyFilesystem; - } - cachedFamilyFilesystem = null; - try { - const lddContent = await readFile(LDD_PATH); - cachedFamilyFilesystem = getFamilyFromLddContent(lddContent); - } catch (e) {} - return cachedFamilyFilesystem; -}; - -const familyFromFilesystemSync = () => { - if (cachedFamilyFilesystem !== undefined) { - return cachedFamilyFilesystem; - } - cachedFamilyFilesystem = null; - try { - const lddContent = readFileSync(LDD_PATH); - cachedFamilyFilesystem = getFamilyFromLddContent(lddContent); - } catch (e) {} - return cachedFamilyFilesystem; -}; - -/** - * Resolves with the libc family when it can be determined, `null` otherwise. - * @returns {Promise} - */ -const family = async () => { - let family = null; - if (isLinux()) { - family = await familyFromFilesystem(); - if (!family) { - family = familyFromReport(); - } - if (!family) { - const out = await safeCommand(); - family = familyFromCommand(out); - } - } - return family; -}; - -/** - * Returns the libc family when it can be determined, `null` otherwise. - * @returns {?string} - */ -const familySync = () => { - let family = null; - if (isLinux()) { - family = familyFromFilesystemSync(); - if (!family) { - family = familyFromReport(); - } - if (!family) { - const out = safeCommandSync(); - family = familyFromCommand(out); - } - } - return family; -}; - -/** - * Resolves `true` only when the platform is Linux and the libc family is not `glibc`. - * @returns {Promise} - */ -const isNonGlibcLinux = async () => isLinux() && await family() !== GLIBC; - -/** - * Returns `true` only when the platform is Linux and the libc family is not `glibc`. - * @returns {boolean} - */ -const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC; - -const versionFromFilesystem = async () => { - if (cachedVersionFilesystem !== undefined) { - return cachedVersionFilesystem; - } - cachedVersionFilesystem = null; - try { - const lddContent = await readFile(LDD_PATH); - const versionMatch = lddContent.match(RE_GLIBC_VERSION); - if (versionMatch) { - cachedVersionFilesystem = versionMatch[1]; - } - } catch (e) {} - return cachedVersionFilesystem; -}; - -const versionFromFilesystemSync = () => { - if (cachedVersionFilesystem !== undefined) { - return cachedVersionFilesystem; - } - cachedVersionFilesystem = null; - try { - const lddContent = readFileSync(LDD_PATH); - const versionMatch = lddContent.match(RE_GLIBC_VERSION); - if (versionMatch) { - cachedVersionFilesystem = versionMatch[1]; - } - } catch (e) {} - return cachedVersionFilesystem; -}; - -const versionFromReport = () => { - const report = getReport(); - if (report.header && report.header.glibcVersionRuntime) { - return report.header.glibcVersionRuntime; - } - return null; -}; - -const versionSuffix = (s) => s.trim().split(/\s+/)[1]; - -const versionFromCommand = (out) => { - const [getconf, ldd1, ldd2] = out.split(/[\r\n]+/); - if (getconf && getconf.includes(GLIBC)) { - return versionSuffix(getconf); - } - if (ldd1 && ldd2 && ldd1.includes(MUSL)) { - return versionSuffix(ldd2); - } - return null; -}; - -/** - * Resolves with the libc version when it can be determined, `null` otherwise. - * @returns {Promise} - */ -const version = async () => { - let version = null; - if (isLinux()) { - version = await versionFromFilesystem(); - if (!version) { - version = versionFromReport(); - } - if (!version) { - const out = await safeCommand(); - version = versionFromCommand(out); - } - } - return version; -}; - -/** - * Returns the libc version when it can be determined, `null` otherwise. - * @returns {?string} - */ -const versionSync = () => { - let version = null; - if (isLinux()) { - version = versionFromFilesystemSync(); - if (!version) { - version = versionFromReport(); - } - if (!version) { - const out = safeCommandSync(); - version = versionFromCommand(out); - } - } - return version; -}; - -module.exports = { - GLIBC, - MUSL, - family, - familySync, - isNonGlibcLinux, - isNonGlibcLinuxSync, - version, - versionSync -}; diff --git a/node_modules/detect-libc/lib/process.js b/node_modules/detect-libc/lib/process.js deleted file mode 100644 index ee78ad26..00000000 --- a/node_modules/detect-libc/lib/process.js +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2017 Lovell Fuller and others. -// SPDX-License-Identifier: Apache-2.0 - -'use strict'; - -const isLinux = () => process.platform === 'linux'; - -let report = null; -const getReport = () => { - if (!report) { - /* istanbul ignore next */ - if (isLinux() && process.report) { - const orig = process.report.excludeNetwork; - process.report.excludeNetwork = true; - report = process.report.getReport(); - process.report.excludeNetwork = orig; - } else { - report = {}; - } - } - return report; -}; - -module.exports = { isLinux, getReport }; diff --git a/node_modules/detect-libc/package.json b/node_modules/detect-libc/package.json deleted file mode 100644 index d5adec31..00000000 --- a/node_modules/detect-libc/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "detect-libc", - "version": "2.0.3", - "description": "Node.js module to detect the C standard library (libc) implementation family and version", - "main": "lib/detect-libc.js", - "files": [ - "lib/", - "index.d.ts" - ], - "scripts": { - "test": "semistandard && nyc --reporter=text --check-coverage --branches=100 ava test/unit.js", - "bench": "node benchmark/detect-libc", - "bench:calls": "node benchmark/call-familySync.js && sleep 1 && node benchmark/call-isNonGlibcLinuxSync.js && sleep 1 && node benchmark/call-versionSync.js" - }, - "repository": { - "type": "git", - "url": "git://github.com/lovell/detect-libc" - }, - "keywords": [ - "libc", - "glibc", - "musl" - ], - "author": "Lovell Fuller ", - "contributors": [ - "Niklas Salmoukas ", - "Vinícius Lourenço " - ], - "license": "Apache-2.0", - "devDependencies": { - "ava": "^2.4.0", - "benchmark": "^2.1.4", - "nyc": "^15.1.0", - "proxyquire": "^2.1.3", - "semistandard": "^14.2.3" - }, - "engines": { - "node": ">=8" - } -} diff --git a/node_modules/minipass/LICENSE b/node_modules/minipass/LICENSE deleted file mode 100644 index 97f8e32e..00000000 --- a/node_modules/minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minipass/README.md b/node_modules/minipass/README.md deleted file mode 100644 index 61088093..00000000 --- a/node_modules/minipass/README.md +++ /dev/null @@ -1,769 +0,0 @@ -# minipass - -A _very_ minimal implementation of a [PassThrough -stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough) - -[It's very -fast](https://docs.google.com/spreadsheets/d/1K_HR5oh3r80b8WVMWCPPjfuWXUgfkmhlX7FGI6JJ8tY/edit?usp=sharing) -for objects, strings, and buffers. - -Supports `pipe()`ing (including multi-`pipe()` and backpressure -transmission), buffering data until either a `data` event handler -or `pipe()` is added (so you don't lose the first chunk), and -most other cases where PassThrough is a good idea. - -There is a `read()` method, but it's much more efficient to -consume data from this stream via `'data'` events or by calling -`pipe()` into some other stream. Calling `read()` requires the -buffer to be flattened in some cases, which requires copying -memory. - -If you set `objectMode: true` in the options, then whatever is -written will be emitted. Otherwise, it'll do a minimal amount of -Buffer copying to ensure proper Streams semantics when `read(n)` -is called. - -`objectMode` can also be set by doing `stream.objectMode = true`, -or by writing any non-string/non-buffer data. `objectMode` cannot -be set to false once it is set. - -This is not a `through` or `through2` stream. It doesn't -transform the data, it just passes it right through. If you want -to transform the data, extend the class, and override the -`write()` method. Once you're done transforming the data however -you want, call `super.write()` with the transform output. - -For some examples of streams that extend Minipass in various -ways, check out: - -- [minizlib](http://npm.im/minizlib) -- [fs-minipass](http://npm.im/fs-minipass) -- [tar](http://npm.im/tar) -- [minipass-collect](http://npm.im/minipass-collect) -- [minipass-flush](http://npm.im/minipass-flush) -- [minipass-pipeline](http://npm.im/minipass-pipeline) -- [tap](http://npm.im/tap) -- [tap-parser](http://npm.im/tap-parser) -- [treport](http://npm.im/treport) -- [minipass-fetch](http://npm.im/minipass-fetch) -- [pacote](http://npm.im/pacote) -- [make-fetch-happen](http://npm.im/make-fetch-happen) -- [cacache](http://npm.im/cacache) -- [ssri](http://npm.im/ssri) -- [npm-registry-fetch](http://npm.im/npm-registry-fetch) -- [minipass-json-stream](http://npm.im/minipass-json-stream) -- [minipass-sized](http://npm.im/minipass-sized) - -## Differences from Node.js Streams - -There are several things that make Minipass streams different -from (and in some ways superior to) Node.js core streams. - -Please read these caveats if you are familiar with node-core -streams and intend to use Minipass streams in your programs. - -You can avoid most of these differences entirely (for a very -small performance penalty) by setting `{async: true}` in the -constructor options. - -### Timing - -Minipass streams are designed to support synchronous use-cases. -Thus, data is emitted as soon as it is available, always. It is -buffered until read, but no longer. Another way to look at it is -that Minipass streams are exactly as synchronous as the logic -that writes into them. - -This can be surprising if your code relies on -`PassThrough.write()` always providing data on the next tick -rather than the current one, or being able to call `resume()` and -not have the entire buffer disappear immediately. - -However, without this synchronicity guarantee, there would be no -way for Minipass to achieve the speeds it does, or support the -synchronous use cases that it does. Simply put, waiting takes -time. - -This non-deferring approach makes Minipass streams much easier to -reason about, especially in the context of Promises and other -flow-control mechanisms. - -Example: - -```js -// hybrid module, either works -import { Minipass } from 'minipass' -// or: -const { Minipass } = require('minipass') - -const stream = new Minipass() -stream.on('data', () => console.log('data event')) -console.log('before write') -stream.write('hello') -console.log('after write') -// output: -// before write -// data event -// after write -``` - -### Exception: Async Opt-In - -If you wish to have a Minipass stream with behavior that more -closely mimics Node.js core streams, you can set the stream in -async mode either by setting `async: true` in the constructor -options, or by setting `stream.async = true` later on. - -```js -// hybrid module, either works -import { Minipass } from 'minipass' -// or: -const { Minipass } = require('minipass') - -const asyncStream = new Minipass({ async: true }) -asyncStream.on('data', () => console.log('data event')) -console.log('before write') -asyncStream.write('hello') -console.log('after write') -// output: -// before write -// after write -// data event <-- this is deferred until the next tick -``` - -Switching _out_ of async mode is unsafe, as it could cause data -corruption, and so is not enabled. Example: - -```js -import { Minipass } from 'minipass' -const stream = new Minipass({ encoding: 'utf8' }) -stream.on('data', chunk => console.log(chunk)) -stream.async = true -console.log('before writes') -stream.write('hello') -setStreamSyncAgainSomehow(stream) // <-- this doesn't actually exist! -stream.write('world') -console.log('after writes') -// hypothetical output would be: -// before writes -// world -// after writes -// hello -// NOT GOOD! -``` - -To avoid this problem, once set into async mode, any attempt to -make the stream sync again will be ignored. - -```js -const { Minipass } = require('minipass') -const stream = new Minipass({ encoding: 'utf8' }) -stream.on('data', chunk => console.log(chunk)) -stream.async = true -console.log('before writes') -stream.write('hello') -stream.async = false // <-- no-op, stream already async -stream.write('world') -console.log('after writes') -// actual output: -// before writes -// after writes -// hello -// world -``` - -### No High/Low Water Marks - -Node.js core streams will optimistically fill up a buffer, -returning `true` on all writes until the limit is hit, even if -the data has nowhere to go. Then, they will not attempt to draw -more data in until the buffer size dips below a minimum value. - -Minipass streams are much simpler. The `write()` method will -return `true` if the data has somewhere to go (which is to say, -given the timing guarantees, that the data is already there by -the time `write()` returns). - -If the data has nowhere to go, then `write()` returns false, and -the data sits in a buffer, to be drained out immediately as soon -as anyone consumes it. - -Since nothing is ever buffered unnecessarily, there is much less -copying data, and less bookkeeping about buffer capacity levels. - -### Hazards of Buffering (or: Why Minipass Is So Fast) - -Since data written to a Minipass stream is immediately written -all the way through the pipeline, and `write()` always returns -true/false based on whether the data was fully flushed, -backpressure is communicated immediately to the upstream caller. -This minimizes buffering. - -Consider this case: - -```js -const { PassThrough } = require('stream') -const p1 = new PassThrough({ highWaterMark: 1024 }) -const p2 = new PassThrough({ highWaterMark: 1024 }) -const p3 = new PassThrough({ highWaterMark: 1024 }) -const p4 = new PassThrough({ highWaterMark: 1024 }) - -p1.pipe(p2).pipe(p3).pipe(p4) -p4.on('data', () => console.log('made it through')) - -// this returns false and buffers, then writes to p2 on next tick (1) -// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2) -// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3) -// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain' -// on next tick (4) -// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and -// 'drain' on next tick (5) -// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6) -// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next -// tick (7) - -p1.write(Buffer.alloc(2048)) // returns false -``` - -Along the way, the data was buffered and deferred at each stage, -and multiple event deferrals happened, for an unblocked pipeline -where it was perfectly safe to write all the way through! - -Furthermore, setting a `highWaterMark` of `1024` might lead -someone reading the code to think an advisory maximum of 1KiB is -being set for the pipeline. However, the actual advisory -buffering level is the _sum_ of `highWaterMark` values, since -each one has its own bucket. - -Consider the Minipass case: - -```js -const m1 = new Minipass() -const m2 = new Minipass() -const m3 = new Minipass() -const m4 = new Minipass() - -m1.pipe(m2).pipe(m3).pipe(m4) -m4.on('data', () => console.log('made it through')) - -// m1 is flowing, so it writes the data to m2 immediately -// m2 is flowing, so it writes the data to m3 immediately -// m3 is flowing, so it writes the data to m4 immediately -// m4 is flowing, so it fires the 'data' event immediately, returns true -// m4's write returned true, so m3 is still flowing, returns true -// m3's write returned true, so m2 is still flowing, returns true -// m2's write returned true, so m1 is still flowing, returns true -// No event deferrals or buffering along the way! - -m1.write(Buffer.alloc(2048)) // returns true -``` - -It is extremely unlikely that you _don't_ want to buffer any data -written, or _ever_ buffer data that can be flushed all the way -through. Neither node-core streams nor Minipass ever fail to -buffer written data, but node-core streams do a lot of -unnecessary buffering and pausing. - -As always, the faster implementation is the one that does less -stuff and waits less time to do it. - -### Immediately emit `end` for empty streams (when not paused) - -If a stream is not paused, and `end()` is called before writing -any data into it, then it will emit `end` immediately. - -If you have logic that occurs on the `end` event which you don't -want to potentially happen immediately (for example, closing file -descriptors, moving on to the next entry in an archive parse -stream, etc.) then be sure to call `stream.pause()` on creation, -and then `stream.resume()` once you are ready to respond to the -`end` event. - -However, this is _usually_ not a problem because: - -### Emit `end` When Asked - -One hazard of immediately emitting `'end'` is that you may not -yet have had a chance to add a listener. In order to avoid this -hazard, Minipass streams safely re-emit the `'end'` event if a -new listener is added after `'end'` has been emitted. - -Ie, if you do `stream.on('end', someFunction)`, and the stream -has already emitted `end`, then it will call the handler right -away. (You can think of this somewhat like attaching a new -`.then(fn)` to a previously-resolved Promise.) - -To prevent calling handlers multiple times who would not expect -multiple ends to occur, all listeners are removed from the -`'end'` event whenever it is emitted. - -### Emit `error` When Asked - -The most recent error object passed to the `'error'` event is -stored on the stream. If a new `'error'` event handler is added, -and an error was previously emitted, then the event handler will -be called immediately (or on `process.nextTick` in the case of -async streams). - -This makes it much more difficult to end up trying to interact -with a broken stream, if the error handler is added after an -error was previously emitted. - -### Impact of "immediate flow" on Tee-streams - -A "tee stream" is a stream piping to multiple destinations: - -```js -const tee = new Minipass() -t.pipe(dest1) -t.pipe(dest2) -t.write('foo') // goes to both destinations -``` - -Since Minipass streams _immediately_ process any pending data -through the pipeline when a new pipe destination is added, this -can have surprising effects, especially when a stream comes in -from some other function and may or may not have data in its -buffer. - -```js -// WARNING! WILL LOSE DATA! -const src = new Minipass() -src.write('foo') -src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone -src.pipe(dest2) // gets nothing! -``` - -One solution is to create a dedicated tee-stream junction that -pipes to both locations, and then pipe to _that_ instead. - -```js -// Safe example: tee to both places -const src = new Minipass() -src.write('foo') -const tee = new Minipass() -tee.pipe(dest1) -tee.pipe(dest2) -src.pipe(tee) // tee gets 'foo', pipes to both locations -``` - -The same caveat applies to `on('data')` event listeners. The -first one added will _immediately_ receive all of the data, -leaving nothing for the second: - -```js -// WARNING! WILL LOSE DATA! -const src = new Minipass() -src.write('foo') -src.on('data', handler1) // receives 'foo' right away -src.on('data', handler2) // nothing to see here! -``` - -Using a dedicated tee-stream can be used in this case as well: - -```js -// Safe example: tee to both data handlers -const src = new Minipass() -src.write('foo') -const tee = new Minipass() -tee.on('data', handler1) -tee.on('data', handler2) -src.pipe(tee) -``` - -All of the hazards in this section are avoided by setting `{ -async: true }` in the Minipass constructor, or by setting -`stream.async = true` afterwards. Note that this does add some -overhead, so should only be done in cases where you are willing -to lose a bit of performance in order to avoid having to refactor -program logic. - -## USAGE - -It's a stream! Use it like a stream and it'll most likely do what -you want. - -```js -import { Minipass } from 'minipass' -const mp = new Minipass(options) // optional: { encoding, objectMode } -mp.write('foo') -mp.pipe(someOtherStream) -mp.end('bar') -``` - -### OPTIONS - -- `encoding` How would you like the data coming _out_ of the - stream to be encoded? Accepts any values that can be passed to - `Buffer.toString()`. -- `objectMode` Emit data exactly as it comes in. This will be - flipped on by default if you write() something other than a - string or Buffer at any point. Setting `objectMode: true` will - prevent setting any encoding value. -- `async` Defaults to `false`. Set to `true` to defer data - emission until next tick. This reduces performance slightly, - but makes Minipass streams use timing behavior closer to Node - core streams. See [Timing](#timing) for more details. -- `signal` An `AbortSignal` that will cause the stream to unhook - itself from everything and become as inert as possible. Note - that providing a `signal` parameter will make `'error'` events - no longer throw if they are unhandled, but they will still be - emitted to handlers if any are attached. - -### API - -Implements the user-facing portions of Node.js's `Readable` and -`Writable` streams. - -### Methods - -- `write(chunk, [encoding], [callback])` - Put data in. (Note - that, in the base Minipass class, the same data will come out.) - Returns `false` if the stream will buffer the next write, or - true if it's still in "flowing" mode. -- `end([chunk, [encoding]], [callback])` - Signal that you have - no more data to write. This will queue an `end` event to be - fired when all the data has been consumed. -- `setEncoding(encoding)` - Set the encoding for data coming of - the stream. This can only be done once. -- `pause()` - No more data for a while, please. This also - prevents `end` from being emitted for empty streams until the - stream is resumed. -- `resume()` - Resume the stream. If there's data in the buffer, - it is all discarded. Any buffered events are immediately - emitted. -- `pipe(dest)` - Send all output to the stream provided. When - data is emitted, it is immediately written to any and all pipe - destinations. (Or written on next tick in `async` mode.) -- `unpipe(dest)` - Stop piping to the destination stream. This is - immediate, meaning that any asynchronously queued data will - _not_ make it to the destination when running in `async` mode. - - `options.end` - Boolean, end the destination stream when the - source stream ends. Default `true`. - - `options.proxyErrors` - Boolean, proxy `error` events from - the source stream to the destination stream. Note that errors - are _not_ proxied after the pipeline terminates, either due - to the source emitting `'end'` or manually unpiping with - `src.unpipe(dest)`. Default `false`. -- `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are - EventEmitters. Some events are given special treatment, - however. (See below under "events".) -- `promise()` - Returns a Promise that resolves when the stream - emits `end`, or rejects if the stream emits `error`. -- `collect()` - Return a Promise that resolves on `end` with an - array containing each chunk of data that was emitted, or - rejects if the stream emits `error`. Note that this consumes - the stream data. -- `concat()` - Same as `collect()`, but concatenates the data - into a single Buffer object. Will reject the returned promise - if the stream is in objectMode, or if it goes into objectMode - by the end of the data. -- `read(n)` - Consume `n` bytes of data out of the buffer. If `n` - is not provided, then consume all of it. If `n` bytes are not - available, then it returns null. **Note** consuming streams in - this way is less efficient, and can lead to unnecessary Buffer - copying. -- `destroy([er])` - Destroy the stream. If an error is provided, - then an `'error'` event is emitted. If the stream has a - `close()` method, and has not emitted a `'close'` event yet, - then `stream.close()` will be called. Any Promises returned by - `.promise()`, `.collect()` or `.concat()` will be rejected. - After being destroyed, writing to the stream will emit an - error. No more data will be emitted if the stream is destroyed, - even if it was previously buffered. - -### Properties - -- `bufferLength` Read-only. Total number of bytes buffered, or in - the case of objectMode, the total number of objects. -- `encoding` The encoding that has been set. (Setting this is - equivalent to calling `setEncoding(enc)` and has the same - prohibition against setting multiple times.) -- `flowing` Read-only. Boolean indicating whether a chunk written - to the stream will be immediately emitted. -- `emittedEnd` Read-only. Boolean indicating whether the end-ish - events (ie, `end`, `prefinish`, `finish`) have been emitted. - Note that listening on any end-ish event will immediateyl - re-emit it if it has already been emitted. -- `writable` Whether the stream is writable. Default `true`. Set - to `false` when `end()` -- `readable` Whether the stream is readable. Default `true`. -- `pipes` An array of Pipe objects referencing streams that this - stream is piping into. -- `destroyed` A getter that indicates whether the stream was - destroyed. -- `paused` True if the stream has been explicitly paused, - otherwise false. -- `objectMode` Indicates whether the stream is in `objectMode`. - Once set to `true`, it cannot be set to `false`. -- `aborted` Readonly property set when the `AbortSignal` - dispatches an `abort` event. - -### Events - -- `data` Emitted when there's data to read. Argument is the data - to read. This is never emitted while not flowing. If a listener - is attached, that will resume the stream. -- `end` Emitted when there's no more data to read. This will be - emitted immediately for empty streams when `end()` is called. - If a listener is attached, and `end` was already emitted, then - it will be emitted again. All listeners are removed when `end` - is emitted. -- `prefinish` An end-ish event that follows the same logic as - `end` and is emitted in the same conditions where `end` is - emitted. Emitted after `'end'`. -- `finish` An end-ish event that follows the same logic as `end` - and is emitted in the same conditions where `end` is emitted. - Emitted after `'prefinish'`. -- `close` An indication that an underlying resource has been - released. Minipass does not emit this event, but will defer it - until after `end` has been emitted, since it throws off some - stream libraries otherwise. -- `drain` Emitted when the internal buffer empties, and it is - again suitable to `write()` into the stream. -- `readable` Emitted when data is buffered and ready to be read - by a consumer. -- `resume` Emitted when stream changes state from buffering to - flowing mode. (Ie, when `resume` is called, `pipe` is called, - or a `data` event listener is added.) - -### Static Methods - -- `Minipass.isStream(stream)` Returns `true` if the argument is a - stream, and false otherwise. To be considered a stream, the - object must be either an instance of Minipass, or an - EventEmitter that has either a `pipe()` method, or both - `write()` and `end()` methods. (Pretty much any stream in - node-land will return `true` for this.) - -## EXAMPLES - -Here are some examples of things you can do with Minipass -streams. - -### simple "are you done yet" promise - -```js -mp.promise().then( - () => { - // stream is finished - }, - er => { - // stream emitted an error - } -) -``` - -### collecting - -```js -mp.collect().then(all => { - // all is an array of all the data emitted - // encoding is supported in this case, so - // so the result will be a collection of strings if - // an encoding is specified, or buffers/objects if not. - // - // In an async function, you may do - // const data = await stream.collect() -}) -``` - -### collecting into a single blob - -This is a bit slower because it concatenates the data into one -chunk for you, but if you're going to do it yourself anyway, it's -convenient this way: - -```js -mp.concat().then(onebigchunk => { - // onebigchunk is a string if the stream - // had an encoding set, or a buffer otherwise. -}) -``` - -### iteration - -You can iterate over streams synchronously or asynchronously in -platforms that support it. - -Synchronous iteration will end when the currently available data -is consumed, even if the `end` event has not been reached. In -string and buffer mode, the data is concatenated, so unless -multiple writes are occurring in the same tick as the `read()`, -sync iteration loops will generally only have a single iteration. - -To consume chunks in this way exactly as they have been written, -with no flattening, create the stream with the `{ objectMode: -true }` option. - -```js -const mp = new Minipass({ objectMode: true }) -mp.write('a') -mp.write('b') -for (let letter of mp) { - console.log(letter) // a, b -} -mp.write('c') -mp.write('d') -for (let letter of mp) { - console.log(letter) // c, d -} -mp.write('e') -mp.end() -for (let letter of mp) { - console.log(letter) // e -} -for (let letter of mp) { - console.log(letter) // nothing -} -``` - -Asynchronous iteration will continue until the end event is reached, -consuming all of the data. - -```js -const mp = new Minipass({ encoding: 'utf8' }) - -// some source of some data -let i = 5 -const inter = setInterval(() => { - if (i-- > 0) mp.write(Buffer.from('foo\n', 'utf8')) - else { - mp.end() - clearInterval(inter) - } -}, 100) - -// consume the data with asynchronous iteration -async function consume() { - for await (let chunk of mp) { - console.log(chunk) - } - return 'ok' -} - -consume().then(res => console.log(res)) -// logs `foo\n` 5 times, and then `ok` -``` - -### subclass that `console.log()`s everything written into it - -```js -class Logger extends Minipass { - write(chunk, encoding, callback) { - console.log('WRITE', chunk, encoding) - return super.write(chunk, encoding, callback) - } - end(chunk, encoding, callback) { - console.log('END', chunk, encoding) - return super.end(chunk, encoding, callback) - } -} - -someSource.pipe(new Logger()).pipe(someDest) -``` - -### same thing, but using an inline anonymous class - -```js -// js classes are fun -someSource - .pipe( - new (class extends Minipass { - emit(ev, ...data) { - // let's also log events, because debugging some weird thing - console.log('EMIT', ev) - return super.emit(ev, ...data) - } - write(chunk, encoding, callback) { - console.log('WRITE', chunk, encoding) - return super.write(chunk, encoding, callback) - } - end(chunk, encoding, callback) { - console.log('END', chunk, encoding) - return super.end(chunk, encoding, callback) - } - })() - ) - .pipe(someDest) -``` - -### subclass that defers 'end' for some reason - -```js -class SlowEnd extends Minipass { - emit(ev, ...args) { - if (ev === 'end') { - console.log('going to end, hold on a sec') - setTimeout(() => { - console.log('ok, ready to end now') - super.emit('end', ...args) - }, 100) - } else { - return super.emit(ev, ...args) - } - } -} -``` - -### transform that creates newline-delimited JSON - -```js -class NDJSONEncode extends Minipass { - write(obj, cb) { - try { - // JSON.stringify can throw, emit an error on that - return super.write(JSON.stringify(obj) + '\n', 'utf8', cb) - } catch (er) { - this.emit('error', er) - } - } - end(obj, cb) { - if (typeof obj === 'function') { - cb = obj - obj = undefined - } - if (obj !== undefined) { - this.write(obj) - } - return super.end(cb) - } -} -``` - -### transform that parses newline-delimited JSON - -```js -class NDJSONDecode extends Minipass { - constructor (options) { - // always be in object mode, as far as Minipass is concerned - super({ objectMode: true }) - this._jsonBuffer = '' - } - write (chunk, encoding, cb) { - if (typeof chunk === 'string' && - typeof encoding === 'string' && - encoding !== 'utf8') { - chunk = Buffer.from(chunk, encoding).toString() - } else if (Buffer.isBuffer(chunk)) { - chunk = chunk.toString() - } - if (typeof encoding === 'function') { - cb = encoding - } - const jsonData = (this._jsonBuffer + chunk).split('\n') - this._jsonBuffer = jsonData.pop() - for (let i = 0; i < jsonData.length; i++) { - try { - // JSON.parse can throw, emit an error on that - super.write(JSON.parse(jsonData[i])) - } catch (er) { - this.emit('error', er) - continue - } - } - if (cb) - cb() - } -} -``` diff --git a/node_modules/minipass/index.d.ts b/node_modules/minipass/index.d.ts deleted file mode 100644 index 86851f96..00000000 --- a/node_modules/minipass/index.d.ts +++ /dev/null @@ -1,152 +0,0 @@ -/// - -// Note: marking anything protected or private in the exported -// class will limit Minipass's ability to be used as the base -// for mixin classes. -import { EventEmitter } from 'events' -import { Stream } from 'stream' - -export namespace Minipass { - export type Encoding = BufferEncoding | 'buffer' | null - - export interface Writable extends EventEmitter { - end(): any - write(chunk: any, ...args: any[]): any - } - - export interface Readable extends EventEmitter { - pause(): any - resume(): any - pipe(): any - } - - export type DualIterable = Iterable & AsyncIterable - - export type ContiguousData = - | Buffer - | ArrayBufferLike - | ArrayBufferView - | string - - export type BufferOrString = Buffer | string - - export interface SharedOptions { - async?: boolean - signal?: AbortSignal - } - - export interface StringOptions extends SharedOptions { - encoding: BufferEncoding - objectMode?: boolean - } - - export interface BufferOptions extends SharedOptions { - encoding?: null | 'buffer' - objectMode?: boolean - } - - export interface ObjectModeOptions extends SharedOptions { - objectMode: true - } - - export interface PipeOptions { - end?: boolean - proxyErrors?: boolean - } - - export type Options = T extends string - ? StringOptions - : T extends Buffer - ? BufferOptions - : ObjectModeOptions -} - -export class Minipass< - RType extends any = Buffer, - WType extends any = RType extends Minipass.BufferOrString - ? Minipass.ContiguousData - : RType - > - extends Stream - implements Minipass.DualIterable -{ - static isStream(stream: any): stream is Minipass.Readable | Minipass.Writable - - readonly bufferLength: number - readonly flowing: boolean - readonly writable: boolean - readonly readable: boolean - readonly aborted: boolean - readonly paused: boolean - readonly emittedEnd: boolean - readonly destroyed: boolean - - /** - * Technically writable, but mutating it can change the type, - * so is not safe to do in TypeScript. - */ - readonly objectMode: boolean - async: boolean - - /** - * Note: encoding is not actually read-only, and setEncoding(enc) - * exists. However, this type definition will insist that TypeScript - * programs declare the type of a Minipass stream up front, and if - * that type is string, then an encoding MUST be set in the ctor. If - * the type is Buffer, then the encoding must be missing, or set to - * 'buffer' or null. If the type is anything else, then objectMode - * must be set in the constructor options. So there is effectively - * no allowed way that a TS program can set the encoding after - * construction, as doing so will destroy any hope of type safety. - * TypeScript does not provide many options for changing the type of - * an object at run-time, which is what changing the encoding does. - */ - readonly encoding: Minipass.Encoding - // setEncoding(encoding: Encoding): void - - // Options required if not reading buffers - constructor( - ...args: RType extends Buffer - ? [] | [Minipass.Options] - : [Minipass.Options] - ) - - write(chunk: WType, cb?: () => void): boolean - write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean - read(size?: number): RType - end(cb?: () => void): this - end(chunk: any, cb?: () => void): this - end(chunk: any, encoding?: Minipass.Encoding, cb?: () => void): this - pause(): void - resume(): void - promise(): Promise - collect(): Promise - - concat(): RType extends Minipass.BufferOrString ? Promise : never - destroy(er?: any): void - pipe(dest: W, opts?: Minipass.PipeOptions): W - unpipe(dest: W): void - - /** - * alias for on() - */ - addEventHandler(event: string, listener: (...args: any[]) => any): this - - on(event: string, listener: (...args: any[]) => any): this - on(event: 'data', listener: (chunk: RType) => any): this - on(event: 'error', listener: (error: any) => any): this - on( - event: - | 'readable' - | 'drain' - | 'resume' - | 'end' - | 'prefinish' - | 'finish' - | 'close', - listener: () => any - ): this - - [Symbol.iterator](): Generator - [Symbol.asyncIterator](): AsyncGenerator -} diff --git a/node_modules/minipass/index.js b/node_modules/minipass/index.js deleted file mode 100644 index ed07c17a..00000000 --- a/node_modules/minipass/index.js +++ /dev/null @@ -1,702 +0,0 @@ -'use strict' -const proc = - typeof process === 'object' && process - ? process - : { - stdout: null, - stderr: null, - } -const EE = require('events') -const Stream = require('stream') -const stringdecoder = require('string_decoder') -const SD = stringdecoder.StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFER = Symbol('buffer') -const PIPES = Symbol('pipes') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -// internal event when stream is destroyed -const DESTROYED = Symbol('destroyed') -// internal event when stream has an error -const ERROR = Symbol('error') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') -const ABORT = Symbol('abort') -const ABORTED = Symbol('aborted') -const SIGNAL = Symbol('signal') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = - (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') -const ITERATOR = - (doIter && Symbol.iterator) || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' - -const isArrayBuffer = b => - b instanceof ArrayBuffer || - (typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0) - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor(src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe() { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors() {} - end() { - this.unpipe() - if (this.opts.end) this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe() { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor(src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -class Minipass extends Stream { - constructor(options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this[PIPES] = [] - this[BUFFER] = [] - this[OBJECTMODE] = (options && options.objectMode) || false - if (this[OBJECTMODE]) this[ENCODING] = null - else this[ENCODING] = (options && options.encoding) || null - if (this[ENCODING] === 'buffer') this[ENCODING] = null - this[ASYNC] = (options && !!options.async) || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - if (options && options.debugExposeBuffer === true) { - Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) - } - if (options && options.debugExposePipes === true) { - Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) - } - this[SIGNAL] = options && options.signal - this[ABORTED] = false - if (this[SIGNAL]) { - this[SIGNAL].addEventListener('abort', () => this[ABORT]()) - if (this[SIGNAL].aborted) { - this[ABORT]() - } - } - } - - get bufferLength() { - return this[BUFFERLENGTH] - } - - get encoding() { - return this[ENCODING] - } - set encoding(enc) { - if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') - - if ( - this[ENCODING] && - enc !== this[ENCODING] && - ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) - ) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this[BUFFER].length) - this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding(enc) { - this.encoding = enc - } - - get objectMode() { - return this[OBJECTMODE] - } - set objectMode(om) { - this[OBJECTMODE] = this[OBJECTMODE] || !!om - } - - get ['async']() { - return this[ASYNC] - } - set ['async'](a) { - this[ASYNC] = this[ASYNC] || !!a - } - - // drop everything and get out of the flow completely - [ABORT]() { - this[ABORTED] = true - this.emit('abort', this[SIGNAL].reason) - this.destroy(this[SIGNAL].reason) - } - - get aborted() { - return this[ABORTED] - } - set aborted(_) {} - - write(chunk, encoding, cb) { - if (this[ABORTED]) return false - if (this[EOF]) throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit( - 'error', - Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - ) - ) - return true - } - - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - - if (!encoding) encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - if (cb) fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if ( - typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed) - ) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) - - if (this.flowing) this.emit('data', chunk) - else this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) this.emit('readable') - - if (cb) fn(cb) - - return this.flowing - } - - read(n) { - if (this[DESTROYED]) return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) n = null - - if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { - if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] - else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this[BUFFER][0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ](n, chunk) { - if (n === chunk.length || n === null) this[BUFFERSHIFT]() - else { - this[BUFFER][0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this[BUFFER].length && !this[EOF]) this.emit('drain') - - return chunk - } - - end(chunk, encoding, cb) { - if (typeof chunk === 'function') (cb = chunk), (chunk = null) - if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') - if (chunk) this.write(chunk, encoding) - if (cb) this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME]() { - if (this[DESTROYED]) return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this[BUFFER].length) this[FLUSH]() - else if (this[EOF]) this[MAYBE_EMIT_END]() - else this.emit('drain') - } - - resume() { - return this[RESUME]() - } - - pause() { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed() { - return this[DESTROYED] - } - - get flowing() { - return this[FLOWING] - } - - get paused() { - return this[PAUSED] - } - - [BUFFERPUSH](chunk) { - if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 - else this[BUFFERLENGTH] += chunk.length - this[BUFFER].push(chunk) - } - - [BUFFERSHIFT]() { - if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 - else this[BUFFERLENGTH] -= this[BUFFER][0].length - return this[BUFFER].shift() - } - - [FLUSH](noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) - - if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') - } - - [FLUSHCHUNK](chunk) { - this.emit('data', chunk) - return this.flowing - } - - pipe(dest, opts) { - if (this[DESTROYED]) return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) opts.end = false - else opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) dest.end() - } else { - this[PIPES].push( - !opts.proxyErrors - ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts) - ) - if (this[ASYNC]) defer(() => this[RESUME]()) - else this[RESUME]() - } - - return dest - } - - unpipe(dest) { - const p = this[PIPES].find(p => p.dest === dest) - if (p) { - this[PIPES].splice(this[PIPES].indexOf(p), 1) - p.unpipe() - } - } - - addListener(ev, fn) { - return this.on(ev, fn) - } - - on(ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) - else fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd() { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END]() { - if ( - !this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this[BUFFER].length === 0 && - this[EOF] - ) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) this.emit('close') - this[EMITTING_END] = false - } - } - - emit(ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !this[OBJECTMODE] && !data - ? false - : this[ASYNC] - ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - super.emit(ERROR, data) - const ret = - !this[SIGNAL] || this.listeners('error').length - ? super.emit('error', data) - : false - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA](data) { - for (const p of this[PIPES]) { - if (p.dest.write(data) === false) this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND]() { - if (this[EMITTED_END]) return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) defer(() => this[EMITEND2]()) - else this[EMITEND2]() - } - - [EMITEND2]() { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this[PIPES]) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this[PIPES]) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect() { - const buf = [] - if (!this[OBJECTMODE]) buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat() { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] - ? buf.join('') - : Buffer.concat(buf, buf.dataLength) - ) - } - - // stream.promise().then(() => done, er => emitted error) - promise() { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - stopped = true - return Promise.resolve({ done: true }) - } - const next = () => { - if (stopped) return stop() - const res = this.read() - if (res !== null) return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) return stop() - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - stop() - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.removeListener(DESTROYED, ondestroy) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - this.removeListener(DESTROYED, ondestroy) - stop() - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { - next, - throw: stop, - return: stop, - [ASYNCITERATOR]() { - return this - }, - } - } - - // for (let chunk of stream) - [ITERATOR]() { - let stopped = false - const stop = () => { - this.pause() - this.removeListener(ERROR, stop) - this.removeListener(DESTROYED, stop) - this.removeListener('end', stop) - stopped = true - return { done: true } - } - - const next = () => { - if (stopped) return stop() - const value = this.read() - return value === null ? stop() : { value } - } - this.once('end', stop) - this.once(ERROR, stop) - this.once(DESTROYED, stop) - - return { - next, - throw: stop, - return: stop, - [ITERATOR]() { - return this - }, - } - } - - destroy(er) { - if (this[DESTROYED]) { - if (er) this.emit('error', er) - else this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this[BUFFER].length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) this.close() - - if (er) this.emit('error', er) - // if no error to emit, still reject pending promises - else this.emit(DESTROYED) - - return this - } - - static isStream(s) { - return ( - !!s && - (s instanceof Minipass || - s instanceof Stream || - (s instanceof EE && - // readable - (typeof s.pipe === 'function' || - // writable - (typeof s.write === 'function' && typeof s.end === 'function')))) - ) - } -} - -exports.Minipass = Minipass diff --git a/node_modules/minipass/package.json b/node_modules/minipass/package.json deleted file mode 100644 index 0e20e988..00000000 --- a/node_modules/minipass/package.json +++ /dev/null @@ -1,76 +0,0 @@ -{ - "name": "minipass", - "version": "5.0.0", - "description": "minimal implementation of a PassThrough stream", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", - "exports": { - ".": { - "import": { - "types": "./index.d.ts", - "default": "./index.mjs" - }, - "require": { - "types": "./index.d.ts", - "default": "./index.js" - } - }, - "./package.json": "./package.json" - }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "node-abort-controller": "^3.1.1", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typedoc": "^0.23.24", - "typescript": "^4.7.3" - }, - "scripts": { - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "snap": "tap", - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags", - "typedoc": "typedoc ./index.d.ts", - "format": "prettier --write . --loglevel warn" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" - }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js", - "index.mjs" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" - }, - "prettier": { - "semi": false, - "printWidth": 80, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - } -} diff --git a/node_modules/node-addon-api/README.md b/node_modules/node-addon-api/README.md deleted file mode 100644 index 6a79c917..00000000 --- a/node_modules/node-addon-api/README.md +++ /dev/null @@ -1,317 +0,0 @@ -NOTE: The default branch has been renamed! -master is now named main - -If you have a local clone, you can update it by running: - -```shell -git branch -m master main -git fetch origin -git branch -u origin/main main -``` - -# **node-addon-api module** -This module contains **header-only C++ wrapper classes** which simplify -the use of the C based [Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html) -provided by Node.js when using C++. It provides a C++ object model -and exception handling semantics with low overhead. - -There are three options for implementing addons: Node-API, nan, or direct -use of internal V8, libuv, and Node.js libraries. Unless there is a need for -direct access to functionality that is not exposed by Node-API as outlined -in [C/C++ addons](https://nodejs.org/dist/latest/docs/api/addons.html) -in Node.js core, use Node-API. Refer to -[C/C++ addons with Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html) -for more information on Node-API. - -Node-API is an ABI stable C interface provided by Node.js for building native -addons. It is independent of the underlying JavaScript runtime (e.g. V8 or ChakraCore) -and is maintained as part of Node.js itself. It is intended to insulate -native addons from changes in the underlying JavaScript engine and allow -modules compiled for one version to run on later versions of Node.js without -recompilation. - -The `node-addon-api` module, which is not part of Node.js, preserves the benefits -of the Node-API as it consists only of inline code that depends only on the stable API -provided by Node-API. As such, modules built against one version of Node.js -using node-addon-api should run without having to be rebuilt with newer versions -of Node.js. - -It is important to remember that *other* Node.js interfaces such as -`libuv` (included in a project via `#include `) are not ABI-stable across -Node.js major versions. Thus, an addon must use Node-API and/or `node-addon-api` -exclusively and build against a version of Node.js that includes an -implementation of Node-API (meaning an active LTS version of Node.js) in -order to benefit from ABI stability across Node.js major versions. Node.js -provides an [ABI stability guide][] containing a detailed explanation of ABI -stability in general, and the Node-API ABI stability guarantee in particular. - -As new APIs are added to Node-API, node-addon-api must be updated to provide -wrappers for those new APIs. For this reason, node-addon-api provides -methods that allow callers to obtain the underlying Node-API handles so -direct calls to Node-API and the use of the objects/methods provided by -node-addon-api can be used together. For example, in order to be able -to use an API for which the node-addon-api does not yet provide a wrapper. - -APIs exposed by node-addon-api are generally used to create and -manipulate JavaScript values. Concepts and operations generally map -to ideas specified in the **ECMA262 Language Specification**. - -The [Node-API Resource](https://nodejs.github.io/node-addon-examples/) offers an -excellent orientation and tips for developers just getting started with Node-API -and node-addon-api. - -- **[Setup](#setup)** -- **[API Documentation](#api)** -- **[Examples](#examples)** -- **[Tests](#tests)** -- **[More resource and info about native Addons](#resources)** -- **[Badges](#badges)** -- **[Code of Conduct](CODE_OF_CONDUCT.md)** -- **[Contributors](#contributors)** -- **[License](#license)** - -## **Current version: 5.1.0** - -(See [CHANGELOG.md](CHANGELOG.md) for complete Changelog) - -[![NPM](https://nodei.co/npm/node-addon-api.png?downloads=true&downloadRank=true)](https://nodei.co/npm/node-addon-api/) [![NPM](https://nodei.co/npm-dl/node-addon-api.png?months=6&height=1)](https://nodei.co/npm/node-addon-api/) - - - -node-addon-api is based on [Node-API](https://nodejs.org/api/n-api.html) and supports using different Node-API versions. -This allows addons built with it to run with Node.js versions which support the targeted Node-API version. -**However** the node-addon-api support model is to support only the active LTS Node.js versions. This means that -every year there will be a new major which drops support for the Node.js LTS version which has gone out of service. - -The oldest Node.js version supported by the current version of node-addon-api is Node.js 14.x. - -## Setup - - [Installation and usage](doc/setup.md) - - [node-gyp](doc/node-gyp.md) - - [cmake-js](doc/cmake-js.md) - - [Conversion tool](doc/conversion-tool.md) - - [Checker tool](doc/checker-tool.md) - - [Generator](doc/generator.md) - - [Prebuild tools](doc/prebuild_tools.md) - - - -### **API Documentation** - -The following is the documentation for node-addon-api. - - - [Full Class Hierarchy](doc/hierarchy.md) - - [Addon Structure](doc/addon.md) - - Data Types: - - [Env](doc/env.md) - - [CallbackInfo](doc/callbackinfo.md) - - [Reference](doc/reference.md) - - [Value](doc/value.md) - - [Name](doc/name.md) - - [Symbol](doc/symbol.md) - - [String](doc/string.md) - - [Number](doc/number.md) - - [Date](doc/date.md) - - [BigInt](doc/bigint.md) - - [Boolean](doc/boolean.md) - - [External](doc/external.md) - - [Object](doc/object.md) - - [Array](doc/array.md) - - [ObjectReference](doc/object_reference.md) - - [PropertyDescriptor](doc/property_descriptor.md) - - [Function](doc/function.md) - - [FunctionReference](doc/function_reference.md) - - [ObjectWrap](doc/object_wrap.md) - - [ClassPropertyDescriptor](doc/class_property_descriptor.md) - - [Buffer](doc/buffer.md) - - [ArrayBuffer](doc/array_buffer.md) - - [TypedArray](doc/typed_array.md) - - [TypedArrayOf](doc/typed_array_of.md) - - [DataView](doc/dataview.md) - - [Error Handling](doc/error_handling.md) - - [Error](doc/error.md) - - [TypeError](doc/type_error.md) - - [RangeError](doc/range_error.md) - - [Object Lifetime Management](doc/object_lifetime_management.md) - - [HandleScope](doc/handle_scope.md) - - [EscapableHandleScope](doc/escapable_handle_scope.md) - - [Memory Management](doc/memory_management.md) - - [Async Operations](doc/async_operations.md) - - [AsyncWorker](doc/async_worker.md) - - [AsyncContext](doc/async_context.md) - - [AsyncWorker Variants](doc/async_worker_variants.md) - - [Thread-safe Functions](doc/threadsafe.md) - - [ThreadSafeFunction](doc/threadsafe_function.md) - - [TypedThreadSafeFunction](doc/typed_threadsafe_function.md) - - [Promises](doc/promises.md) - - [Version management](doc/version_management.md) - - - -### **Examples** - -Are you new to **node-addon-api**? Take a look at our **[examples](https://github.com/nodejs/node-addon-examples)** - -- **[Hello World](https://github.com/nodejs/node-addon-examples/tree/HEAD/1_hello_world/node-addon-api)** -- **[Pass arguments to a function](https://github.com/nodejs/node-addon-examples/tree/HEAD/2_function_arguments/node-addon-api)** -- **[Callbacks](https://github.com/nodejs/node-addon-examples/tree/HEAD/3_callbacks/node-addon-api)** -- **[Object factory](https://github.com/nodejs/node-addon-examples/tree/HEAD/4_object_factory/node-addon-api)** -- **[Function factory](https://github.com/nodejs/node-addon-examples/tree/HEAD/5_function_factory/node-addon-api)** -- **[Wrapping C++ Object](https://github.com/nodejs/node-addon-examples/tree/HEAD/6_object_wrap/node-addon-api)** -- **[Factory of wrapped object](https://github.com/nodejs/node-addon-examples/tree/HEAD/7_factory_wrap/node-addon-api)** -- **[Passing wrapped object around](https://github.com/nodejs/node-addon-examples/tree/HEAD/8_passing_wrapped/node-addon-api)** - - - -### **Tests** - -To run the **node-addon-api** tests do: - -``` -npm install -npm test -``` - -To avoid testing the deprecated portions of the API run -``` -npm install -npm test --disable-deprecated -``` - -To run the tests targeting a specific version of Node-API run -``` -npm install -export NAPI_VERSION=X -npm test --NAPI_VERSION=X -``` - -where X is the version of Node-API you want to target. - -To run a specific unit test, filter conditions are available - -**Example:** - compile and run only tests on objectwrap.cc and objectwrap.js - ``` - npm run unit --filter=objectwrap - ``` - -Multiple unit tests cane be selected with wildcards - -**Example:** -compile and run all test files ending with "reference" -> function_reference.cc, object_reference.cc, reference.cc - ``` - npm run unit --filter=*reference - ``` - -Multiple filter conditions can be joined to broaden the test selection - -**Example:** - compile and run all tests under folders threadsafe_function and typed_threadsafe_function and also the objectwrap.cc file - npm run unit --filter='*function objectwrap' - -### **Debug** - -To run the **node-addon-api** tests with `--debug` option: - -``` -npm run-script dev -``` - -If you want a faster build, you might use the following option: - -``` -npm run-script dev:incremental -``` - -Take a look and get inspired by our **[test suite](https://github.com/nodejs/node-addon-api/tree/HEAD/test)** - -### **Benchmarks** - -You can run the available benchmarks using the following command: - -``` -npm run-script benchmark -``` - -See [benchmark/README.md](benchmark/README.md) for more details about running and adding benchmarks. - - - -### **More resource and info about native Addons** -- **[C++ Addons](https://nodejs.org/dist/latest/docs/api/addons.html)** -- **[Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html)** -- **[Node-API - Next Generation Node API for Native Modules](https://youtu.be/-Oniup60Afs)** -- **[How We Migrated Realm JavaScript From NAN to Node-API](https://developer.mongodb.com/article/realm-javascript-nan-to-n-api)** - -As node-addon-api's core mission is to expose the plain C Node-API as C++ -wrappers, tools that facilitate n-api/node-addon-api providing more -convenient patterns for developing a Node.js add-on with n-api/node-addon-api -can be published to NPM as standalone packages. It is also recommended to tag -such packages with `node-addon-api` to provide more visibility to the community. - -Quick links to NPM searches: [keywords:node-addon-api](https://www.npmjs.com/search?q=keywords%3Anode-addon-api). - - - -### **Other bindings** - -- **[napi-rs](https://napi.rs)** - (`Rust`) - - - -### **Badges** - -The use of badges is recommended to indicate the minimum version of Node-API -required for the module. This helps to determine which Node.js major versions are -supported. Addon maintainers can consult the [Node-API support matrix][] to determine -which Node.js versions provide a given Node-API version. The following badges are -available: - -![Node-API v1 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v1%20Badge.svg) -![Node-API v2 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v2%20Badge.svg) -![Node-API v3 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v3%20Badge.svg) -![Node-API v4 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v4%20Badge.svg) -![Node-API v5 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v5%20Badge.svg) -![Node-API v6 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v6%20Badge.svg) -![Node-API v7 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v7%20Badge.svg) -![Node-API v8 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v8%20Badge.svg) -![Node-API Experimental Version Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20Experimental%20Version%20Badge.svg) - -## **Contributing** - -We love contributions from the community to **node-addon-api**! -See [CONTRIBUTING.md](CONTRIBUTING.md) for more details on our philosophy around extending this module. - - - -## Team members - -### Active -| Name | GitHub Link | -| ------------------- | ----------------------------------------------------- | -| Anna Henningsen | [addaleax](https://github.com/addaleax) | -| Chengzhong Wu | [legendecas](https://github.com/legendecas) | -| Jack Xia | [JckXia](https://github.com/JckXia) | -| Kevin Eady | [KevinEady](https://github.com/KevinEady) | -| Michael Dawson | [mhdawson](https://github.com/mhdawson) | -| Nicola Del Gobbo | [NickNaso](https://github.com/NickNaso) | -| Vladimir Morozov | [vmoroz](https://github.com/vmoroz) | - -### Emeritus -| Name | GitHub Link | -| ------------------- | ----------------------------------------------------- | -| Arunesh Chandra | [aruneshchandra](https://github.com/aruneshchandra) | -| Benjamin Byholm | [kkoopa](https://github.com/kkoopa) | -| Gabriel Schulhof | [gabrielschulhof](https://github.com/gabrielschulhof) | -| Hitesh Kanwathirtha | [digitalinfinity](https://github.com/digitalinfinity) | -| Jason Ginchereau | [jasongin](https://github.com/jasongin) | -| Jim Schlight | [jschlight](https://github.com/jschlight) | -| Sampson Gao | [sampsongao](https://github.com/sampsongao) | -| Taylor Woll | [boingoing](https://github.com/boingoing) | - - - -Licensed under [MIT](./LICENSE.md) - -[ABI stability guide]: https://nodejs.org/en/docs/guides/abi-stability/ -[Node-API support matrix]: https://nodejs.org/dist/latest/docs/api/n-api.html#n_api_n_api_version_matrix diff --git a/node_modules/node-addon-api/index.js b/node_modules/node-addon-api/index.js deleted file mode 100644 index 52f53e3c..00000000 --- a/node_modules/node-addon-api/index.js +++ /dev/null @@ -1,11 +0,0 @@ -const path = require('path'); - -const includeDir = path.relative('.', __dirname); - -module.exports = { - include: `"${__dirname}"`, // deprecated, can be removed as part of 4.0.0 - include_dir: includeDir, - gyp: path.join(includeDir, 'node_api.gyp:nothing'), - isNodeApiBuiltin: true, - needsFlag: false -}; diff --git a/node_modules/node-addon-api/napi-inl.deprecated.h b/node_modules/node-addon-api/napi-inl.deprecated.h deleted file mode 100644 index 3ddbb2ef..00000000 --- a/node_modules/node-addon-api/napi-inl.deprecated.h +++ /dev/null @@ -1,186 +0,0 @@ -#ifndef SRC_NAPI_INL_DEPRECATED_H_ -#define SRC_NAPI_INL_DEPRECATED_H_ - -//////////////////////////////////////////////////////////////////////////////// -// PropertyDescriptor class -//////////////////////////////////////////////////////////////////////////////// - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - const char* utf8name, - Getter getter, - napi_property_attributes attributes, - void* /*data*/) { - using CbData = details::CallbackData; - // TODO: Delete when the function is destroyed - auto callbackData = new CbData({getter, nullptr}); - - return PropertyDescriptor({utf8name, - nullptr, - nullptr, - CbData::Wrapper, - nullptr, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - const std::string& utf8name, - Getter getter, - napi_property_attributes attributes, - void* data) { - return Accessor(utf8name.c_str(), getter, attributes, data); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - napi_value name, - Getter getter, - napi_property_attributes attributes, - void* /*data*/) { - using CbData = details::CallbackData; - // TODO: Delete when the function is destroyed - auto callbackData = new CbData({getter, nullptr}); - - return PropertyDescriptor({nullptr, - name, - nullptr, - CbData::Wrapper, - nullptr, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - Name name, Getter getter, napi_property_attributes attributes, void* data) { - napi_value nameValue = name; - return PropertyDescriptor::Accessor(nameValue, getter, attributes, data); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - const char* utf8name, - Getter getter, - Setter setter, - napi_property_attributes attributes, - void* /*data*/) { - using CbData = details::AccessorCallbackData; - // TODO: Delete when the function is destroyed - auto callbackData = new CbData({getter, setter, nullptr}); - - return PropertyDescriptor({utf8name, - nullptr, - nullptr, - CbData::GetterWrapper, - CbData::SetterWrapper, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - const std::string& utf8name, - Getter getter, - Setter setter, - napi_property_attributes attributes, - void* data) { - return Accessor(utf8name.c_str(), getter, setter, attributes, data); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - napi_value name, - Getter getter, - Setter setter, - napi_property_attributes attributes, - void* /*data*/) { - using CbData = details::AccessorCallbackData; - // TODO: Delete when the function is destroyed - auto callbackData = new CbData({getter, setter, nullptr}); - - return PropertyDescriptor({nullptr, - name, - nullptr, - CbData::GetterWrapper, - CbData::SetterWrapper, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - Name name, - Getter getter, - Setter setter, - napi_property_attributes attributes, - void* data) { - napi_value nameValue = name; - return PropertyDescriptor::Accessor( - nameValue, getter, setter, attributes, data); -} - -template -inline PropertyDescriptor PropertyDescriptor::Function( - const char* utf8name, - Callable cb, - napi_property_attributes attributes, - void* /*data*/) { - using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr))); - using CbData = details::CallbackData; - // TODO: Delete when the function is destroyed - auto callbackData = new CbData({cb, nullptr}); - - return PropertyDescriptor({utf8name, - nullptr, - CbData::Wrapper, - nullptr, - nullptr, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Function( - const std::string& utf8name, - Callable cb, - napi_property_attributes attributes, - void* data) { - return Function(utf8name.c_str(), cb, attributes, data); -} - -template -inline PropertyDescriptor PropertyDescriptor::Function( - napi_value name, - Callable cb, - napi_property_attributes attributes, - void* /*data*/) { - using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr))); - using CbData = details::CallbackData; - // TODO: Delete when the function is destroyed - auto callbackData = new CbData({cb, nullptr}); - - return PropertyDescriptor({nullptr, - name, - CbData::Wrapper, - nullptr, - nullptr, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Function( - Name name, Callable cb, napi_property_attributes attributes, void* data) { - napi_value nameValue = name; - return PropertyDescriptor::Function(nameValue, cb, attributes, data); -} - -#endif // !SRC_NAPI_INL_DEPRECATED_H_ diff --git a/node_modules/node-addon-api/napi-inl.h b/node_modules/node-addon-api/napi-inl.h deleted file mode 100644 index 3ddc1baa..00000000 --- a/node_modules/node-addon-api/napi-inl.h +++ /dev/null @@ -1,6303 +0,0 @@ -#ifndef SRC_NAPI_INL_H_ -#define SRC_NAPI_INL_H_ - -//////////////////////////////////////////////////////////////////////////////// -// Node-API C++ Wrapper Classes -// -// Inline header-only implementations for "Node-API" ABI-stable C APIs for -// Node.js. -//////////////////////////////////////////////////////////////////////////////// - -// Note: Do not include this file directly! Include "napi.h" instead. - -#include -#include -#include -#include -#include - -namespace Napi { - -#ifdef NAPI_CPP_CUSTOM_NAMESPACE -namespace NAPI_CPP_CUSTOM_NAMESPACE { -#endif - -// Helpers to handle functions exposed from C++. -namespace details { - -// Attach a data item to an object and delete it when the object gets -// garbage-collected. -// TODO: Replace this code with `napi_add_finalizer()` whenever it becomes -// available on all supported versions of Node.js. -template -inline napi_status AttachData(napi_env env, - napi_value obj, - FreeType* data, - napi_finalize finalizer = nullptr, - void* hint = nullptr) { - napi_status status; - if (finalizer == nullptr) { - finalizer = [](napi_env /*env*/, void* data, void* /*hint*/) { - delete static_cast(data); - }; - } -#if (NAPI_VERSION < 5) - napi_value symbol, external; - status = napi_create_symbol(env, nullptr, &symbol); - if (status == napi_ok) { - status = napi_create_external(env, data, finalizer, hint, &external); - if (status == napi_ok) { - napi_property_descriptor desc = {nullptr, - symbol, - nullptr, - nullptr, - nullptr, - external, - napi_default, - nullptr}; - status = napi_define_properties(env, obj, 1, &desc); - } - } -#else // NAPI_VERSION >= 5 - status = napi_add_finalizer(env, obj, data, finalizer, hint, nullptr); -#endif - return status; -} - -// For use in JS to C++ callback wrappers to catch any Napi::Error exceptions -// and rethrow them as JavaScript exceptions before returning from the callback. -template -inline napi_value WrapCallback(Callable callback) { -#ifdef NAPI_CPP_EXCEPTIONS - try { - return callback(); - } catch (const Error& e) { - e.ThrowAsJavaScriptException(); - return nullptr; - } -#else // NAPI_CPP_EXCEPTIONS - // When C++ exceptions are disabled, errors are immediately thrown as JS - // exceptions, so there is no need to catch and rethrow them here. - return callback(); -#endif // NAPI_CPP_EXCEPTIONS -} - -// For use in JS to C++ void callback wrappers to catch any Napi::Error -// exceptions and rethrow them as JavaScript exceptions before returning from -// the callback. -template -inline void WrapVoidCallback(Callable callback) { -#ifdef NAPI_CPP_EXCEPTIONS - try { - callback(); - } catch (const Error& e) { - e.ThrowAsJavaScriptException(); - } -#else // NAPI_CPP_EXCEPTIONS - // When C++ exceptions are disabled, errors are immediately thrown as JS - // exceptions, so there is no need to catch and rethrow them here. - callback(); -#endif // NAPI_CPP_EXCEPTIONS -} - -template -struct CallbackData { - static inline napi_value Wrapper(napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - CallbackData* callbackData = - static_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - return callbackData->callback(callbackInfo); - }); - } - - Callable callback; - void* data; -}; - -template -struct CallbackData { - static inline napi_value Wrapper(napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - CallbackData* callbackData = - static_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - callbackData->callback(callbackInfo); - return nullptr; - }); - } - - Callable callback; - void* data; -}; - -template -napi_value TemplatedVoidCallback(napi_env env, - napi_callback_info info) NAPI_NOEXCEPT { - return details::WrapCallback([&] { - CallbackInfo cbInfo(env, info); - Callback(cbInfo); - return nullptr; - }); -} - -template -napi_value TemplatedCallback(napi_env env, - napi_callback_info info) NAPI_NOEXCEPT { - return details::WrapCallback([&] { - CallbackInfo cbInfo(env, info); - return Callback(cbInfo); - }); -} - -template -napi_value TemplatedInstanceCallback(napi_env env, - napi_callback_info info) NAPI_NOEXCEPT { - return details::WrapCallback([&] { - CallbackInfo cbInfo(env, info); - T* instance = T::Unwrap(cbInfo.This().As()); - return (instance->*UnwrapCallback)(cbInfo); - }); -} - -template -napi_value TemplatedInstanceVoidCallback(napi_env env, napi_callback_info info) - NAPI_NOEXCEPT { - return details::WrapCallback([&] { - CallbackInfo cbInfo(env, info); - T* instance = T::Unwrap(cbInfo.This().As()); - (instance->*UnwrapCallback)(cbInfo); - return nullptr; - }); -} - -template -struct FinalizeData { - static inline void Wrapper(napi_env env, - void* data, - void* finalizeHint) NAPI_NOEXCEPT { - WrapVoidCallback([&] { - FinalizeData* finalizeData = static_cast(finalizeHint); - finalizeData->callback(Env(env), static_cast(data)); - delete finalizeData; - }); - } - - static inline void WrapperWithHint(napi_env env, - void* data, - void* finalizeHint) NAPI_NOEXCEPT { - WrapVoidCallback([&] { - FinalizeData* finalizeData = static_cast(finalizeHint); - finalizeData->callback( - Env(env), static_cast(data), finalizeData->hint); - delete finalizeData; - }); - } - - Finalizer callback; - Hint* hint; -}; - -#if (NAPI_VERSION > 3 && !defined(__wasm32__)) -template , - typename FinalizerDataType = void> -struct ThreadSafeFinalize { - static inline void Wrapper(napi_env env, - void* rawFinalizeData, - void* /* rawContext */) { - if (rawFinalizeData == nullptr) return; - - ThreadSafeFinalize* finalizeData = - static_cast(rawFinalizeData); - finalizeData->callback(Env(env)); - delete finalizeData; - } - - static inline void FinalizeWrapperWithData(napi_env env, - void* rawFinalizeData, - void* /* rawContext */) { - if (rawFinalizeData == nullptr) return; - - ThreadSafeFinalize* finalizeData = - static_cast(rawFinalizeData); - finalizeData->callback(Env(env), finalizeData->data); - delete finalizeData; - } - - static inline void FinalizeWrapperWithContext(napi_env env, - void* rawFinalizeData, - void* rawContext) { - if (rawFinalizeData == nullptr) return; - - ThreadSafeFinalize* finalizeData = - static_cast(rawFinalizeData); - finalizeData->callback(Env(env), static_cast(rawContext)); - delete finalizeData; - } - - static inline void FinalizeFinalizeWrapperWithDataAndContext( - napi_env env, void* rawFinalizeData, void* rawContext) { - if (rawFinalizeData == nullptr) return; - - ThreadSafeFinalize* finalizeData = - static_cast(rawFinalizeData); - finalizeData->callback( - Env(env), finalizeData->data, static_cast(rawContext)); - delete finalizeData; - } - - FinalizerDataType* data; - Finalizer callback; -}; - -template -inline typename std::enable_if(nullptr)>::type -CallJsWrapper(napi_env env, napi_value jsCallback, void* context, void* data) { - call(env, - Function(env, jsCallback), - static_cast(context), - static_cast(data)); -} - -template -inline typename std::enable_if(nullptr)>::type -CallJsWrapper(napi_env env, - napi_value jsCallback, - void* /*context*/, - void* /*data*/) { - if (jsCallback != nullptr) { - Function(env, jsCallback).Call(0, nullptr); - } -} - -#if NAPI_VERSION > 4 - -template -napi_value DefaultCallbackWrapper(napi_env /*env*/, std::nullptr_t /*cb*/) { - return nullptr; -} - -template -napi_value DefaultCallbackWrapper(napi_env /*env*/, Napi::Function cb) { - return cb; -} - -#else -template -napi_value DefaultCallbackWrapper(napi_env env, Napi::Function cb) { - if (cb.IsEmpty()) { - return TSFN::EmptyFunctionFactory(env); - } - return cb; -} -#endif // NAPI_VERSION > 4 -#endif // NAPI_VERSION > 3 && !defined(__wasm32__) - -template -struct AccessorCallbackData { - static inline napi_value GetterWrapper(napi_env env, - napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - AccessorCallbackData* callbackData = - static_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - return callbackData->getterCallback(callbackInfo); - }); - } - - static inline napi_value SetterWrapper(napi_env env, - napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - AccessorCallbackData* callbackData = - static_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - callbackData->setterCallback(callbackInfo); - return nullptr; - }); - } - - Getter getterCallback; - Setter setterCallback; - void* data; -}; - -} // namespace details - -#ifndef NODE_ADDON_API_DISABLE_DEPRECATED -#include "napi-inl.deprecated.h" -#endif // !NODE_ADDON_API_DISABLE_DEPRECATED - -//////////////////////////////////////////////////////////////////////////////// -// Module registration -//////////////////////////////////////////////////////////////////////////////// - -// Register an add-on based on an initializer function. -#define NODE_API_MODULE(modname, regfunc) \ - static napi_value __napi_##regfunc(napi_env env, napi_value exports) { \ - return Napi::RegisterModule(env, exports, regfunc); \ - } \ - NAPI_MODULE(modname, __napi_##regfunc) - -// Register an add-on based on a subclass of `Addon` with a custom Node.js -// module name. -#define NODE_API_NAMED_ADDON(modname, classname) \ - static napi_value __napi_##classname(napi_env env, napi_value exports) { \ - return Napi::RegisterModule(env, exports, &classname::Init); \ - } \ - NAPI_MODULE(modname, __napi_##classname) - -// Register an add-on based on a subclass of `Addon` with the Node.js module -// name given by node-gyp from the `target_name` in binding.gyp. -#define NODE_API_ADDON(classname) \ - NODE_API_NAMED_ADDON(NODE_GYP_MODULE_NAME, classname) - -// Adapt the NAPI_MODULE registration function: -// - Wrap the arguments in NAPI wrappers. -// - Catch any NAPI errors and rethrow as JS exceptions. -inline napi_value RegisterModule(napi_env env, - napi_value exports, - ModuleRegisterCallback registerCallback) { - return details::WrapCallback([&] { - return napi_value( - registerCallback(Napi::Env(env), Napi::Object(env, exports))); - }); -} - -//////////////////////////////////////////////////////////////////////////////// -// Maybe class -//////////////////////////////////////////////////////////////////////////////// - -template -bool Maybe::IsNothing() const { - return !_has_value; -} - -template -bool Maybe::IsJust() const { - return _has_value; -} - -template -void Maybe::Check() const { - NAPI_CHECK(IsJust(), "Napi::Maybe::Check", "Maybe value is Nothing."); -} - -template -T Maybe::Unwrap() const { - NAPI_CHECK(IsJust(), "Napi::Maybe::Unwrap", "Maybe value is Nothing."); - return _value; -} - -template -T Maybe::UnwrapOr(const T& default_value) const { - return _has_value ? _value : default_value; -} - -template -bool Maybe::UnwrapTo(T* out) const { - if (IsJust()) { - *out = _value; - return true; - }; - return false; -} - -template -bool Maybe::operator==(const Maybe& other) const { - return (IsJust() == other.IsJust()) && - (!IsJust() || Unwrap() == other.Unwrap()); -} - -template -bool Maybe::operator!=(const Maybe& other) const { - return !operator==(other); -} - -template -Maybe::Maybe() : _has_value(false) {} - -template -Maybe::Maybe(const T& t) : _has_value(true), _value(t) {} - -template -inline Maybe Nothing() { - return Maybe(); -} - -template -inline Maybe Just(const T& t) { - return Maybe(t); -} - -//////////////////////////////////////////////////////////////////////////////// -// Env class -//////////////////////////////////////////////////////////////////////////////// - -inline Env::Env(napi_env env) : _env(env) {} - -inline Env::operator napi_env() const { - return _env; -} - -inline Object Env::Global() const { - napi_value value; - napi_status status = napi_get_global(*this, &value); - NAPI_THROW_IF_FAILED(*this, status, Object()); - return Object(*this, value); -} - -inline Value Env::Undefined() const { - napi_value value; - napi_status status = napi_get_undefined(*this, &value); - NAPI_THROW_IF_FAILED(*this, status, Value()); - return Value(*this, value); -} - -inline Value Env::Null() const { - napi_value value; - napi_status status = napi_get_null(*this, &value); - NAPI_THROW_IF_FAILED(*this, status, Value()); - return Value(*this, value); -} - -inline bool Env::IsExceptionPending() const { - bool result; - napi_status status = napi_is_exception_pending(_env, &result); - if (status != napi_ok) - result = false; // Checking for a pending exception shouldn't throw. - return result; -} - -inline Error Env::GetAndClearPendingException() const { - napi_value value; - napi_status status = napi_get_and_clear_last_exception(_env, &value); - if (status != napi_ok) { - // Don't throw another exception when failing to get the exception! - return Error(); - } - return Error(_env, value); -} - -inline MaybeOrValue Env::RunScript(const char* utf8script) const { - String script = String::New(_env, utf8script); - return RunScript(script); -} - -inline MaybeOrValue Env::RunScript(const std::string& utf8script) const { - return RunScript(utf8script.c_str()); -} - -inline MaybeOrValue Env::RunScript(String script) const { - napi_value result; - napi_status status = napi_run_script(_env, script, &result); - NAPI_RETURN_OR_THROW_IF_FAILED( - _env, status, Napi::Value(_env, result), Napi::Value); -} - -#if NAPI_VERSION > 2 -template -void Env::CleanupHook::Wrapper(void* data) NAPI_NOEXCEPT { - auto* cleanupData = - static_cast::CleanupData*>( - data); - cleanupData->hook(); - delete cleanupData; -} - -template -void Env::CleanupHook::WrapperWithArg(void* data) NAPI_NOEXCEPT { - auto* cleanupData = - static_cast::CleanupData*>( - data); - cleanupData->hook(static_cast(cleanupData->arg)); - delete cleanupData; -} -#endif // NAPI_VERSION > 2 - -#if NAPI_VERSION > 5 -template fini> -inline void Env::SetInstanceData(T* data) const { - napi_status status = napi_set_instance_data( - _env, - data, - [](napi_env env, void* data, void*) { fini(env, static_cast(data)); }, - nullptr); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -template fini> -inline void Env::SetInstanceData(DataType* data, HintType* hint) const { - napi_status status = napi_set_instance_data( - _env, - data, - [](napi_env env, void* data, void* hint) { - fini(env, static_cast(data), static_cast(hint)); - }, - hint); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -template -inline T* Env::GetInstanceData() const { - void* data = nullptr; - - napi_status status = napi_get_instance_data(_env, &data); - NAPI_THROW_IF_FAILED(_env, status, nullptr); - - return static_cast(data); -} - -template -void Env::DefaultFini(Env, T* data) { - delete data; -} - -template -void Env::DefaultFiniWithHint(Env, DataType* data, HintType*) { - delete data; -} -#endif // NAPI_VERSION > 5 - -//////////////////////////////////////////////////////////////////////////////// -// Value class -//////////////////////////////////////////////////////////////////////////////// - -inline Value::Value() : _env(nullptr), _value(nullptr) {} - -inline Value::Value(napi_env env, napi_value value) - : _env(env), _value(value) {} - -inline Value::operator napi_value() const { - return _value; -} - -inline bool Value::operator==(const Value& other) const { - return StrictEquals(other); -} - -inline bool Value::operator!=(const Value& other) const { - return !this->operator==(other); -} - -inline bool Value::StrictEquals(const Value& other) const { - bool result; - napi_status status = napi_strict_equals(_env, *this, other, &result); - NAPI_THROW_IF_FAILED(_env, status, false); - return result; -} - -inline Napi::Env Value::Env() const { - return Napi::Env(_env); -} - -inline bool Value::IsEmpty() const { - return _value == nullptr; -} - -inline napi_valuetype Value::Type() const { - if (IsEmpty()) { - return napi_undefined; - } - - napi_valuetype type; - napi_status status = napi_typeof(_env, _value, &type); - NAPI_THROW_IF_FAILED(_env, status, napi_undefined); - return type; -} - -inline bool Value::IsUndefined() const { - return Type() == napi_undefined; -} - -inline bool Value::IsNull() const { - return Type() == napi_null; -} - -inline bool Value::IsBoolean() const { - return Type() == napi_boolean; -} - -inline bool Value::IsNumber() const { - return Type() == napi_number; -} - -#if NAPI_VERSION > 5 -inline bool Value::IsBigInt() const { - return Type() == napi_bigint; -} -#endif // NAPI_VERSION > 5 - -#if (NAPI_VERSION > 4) -inline bool Value::IsDate() const { - if (IsEmpty()) { - return false; - } - - bool result; - napi_status status = napi_is_date(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, false); - return result; -} -#endif - -inline bool Value::IsString() const { - return Type() == napi_string; -} - -inline bool Value::IsSymbol() const { - return Type() == napi_symbol; -} - -inline bool Value::IsArray() const { - if (IsEmpty()) { - return false; - } - - bool result; - napi_status status = napi_is_array(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, false); - return result; -} - -inline bool Value::IsArrayBuffer() const { - if (IsEmpty()) { - return false; - } - - bool result; - napi_status status = napi_is_arraybuffer(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, false); - return result; -} - -inline bool Value::IsTypedArray() const { - if (IsEmpty()) { - return false; - } - - bool result; - napi_status status = napi_is_typedarray(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, false); - return result; -} - -inline bool Value::IsObject() const { - return Type() == napi_object || IsFunction(); -} - -inline bool Value::IsFunction() const { - return Type() == napi_function; -} - -inline bool Value::IsPromise() const { - if (IsEmpty()) { - return false; - } - - bool result; - napi_status status = napi_is_promise(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, false); - return result; -} - -inline bool Value::IsDataView() const { - if (IsEmpty()) { - return false; - } - - bool result; - napi_status status = napi_is_dataview(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, false); - return result; -} - -inline bool Value::IsBuffer() const { - if (IsEmpty()) { - return false; - } - - bool result; - napi_status status = napi_is_buffer(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, false); - return result; -} - -inline bool Value::IsExternal() const { - return Type() == napi_external; -} - -template -inline T Value::As() const { - return T(_env, _value); -} - -inline MaybeOrValue Value::ToBoolean() const { - napi_value result; - napi_status status = napi_coerce_to_bool(_env, _value, &result); - NAPI_RETURN_OR_THROW_IF_FAILED( - _env, status, Napi::Boolean(_env, result), Napi::Boolean); -} - -inline MaybeOrValue Value::ToNumber() const { - napi_value result; - napi_status status = napi_coerce_to_number(_env, _value, &result); - NAPI_RETURN_OR_THROW_IF_FAILED( - _env, status, Napi::Number(_env, result), Napi::Number); -} - -inline MaybeOrValue Value::ToString() const { - napi_value result; - napi_status status = napi_coerce_to_string(_env, _value, &result); - NAPI_RETURN_OR_THROW_IF_FAILED( - _env, status, Napi::String(_env, result), Napi::String); -} - -inline MaybeOrValue Value::ToObject() const { - napi_value result; - napi_status status = napi_coerce_to_object(_env, _value, &result); - NAPI_RETURN_OR_THROW_IF_FAILED( - _env, status, Napi::Object(_env, result), Napi::Object); -} - -//////////////////////////////////////////////////////////////////////////////// -// Boolean class -//////////////////////////////////////////////////////////////////////////////// - -inline Boolean Boolean::New(napi_env env, bool val) { - napi_value value; - napi_status status = napi_get_boolean(env, val, &value); - NAPI_THROW_IF_FAILED(env, status, Boolean()); - return Boolean(env, value); -} - -inline Boolean::Boolean() : Napi::Value() {} - -inline Boolean::Boolean(napi_env env, napi_value value) - : Napi::Value(env, value) {} - -inline Boolean::operator bool() const { - return Value(); -} - -inline bool Boolean::Value() const { - bool result; - napi_status status = napi_get_value_bool(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, false); - return result; -} - -//////////////////////////////////////////////////////////////////////////////// -// Number class -//////////////////////////////////////////////////////////////////////////////// - -inline Number Number::New(napi_env env, double val) { - napi_value value; - napi_status status = napi_create_double(env, val, &value); - NAPI_THROW_IF_FAILED(env, status, Number()); - return Number(env, value); -} - -inline Number::Number() : Value() {} - -inline Number::Number(napi_env env, napi_value value) : Value(env, value) {} - -inline Number::operator int32_t() const { - return Int32Value(); -} - -inline Number::operator uint32_t() const { - return Uint32Value(); -} - -inline Number::operator int64_t() const { - return Int64Value(); -} - -inline Number::operator float() const { - return FloatValue(); -} - -inline Number::operator double() const { - return DoubleValue(); -} - -inline int32_t Number::Int32Value() const { - int32_t result; - napi_status status = napi_get_value_int32(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} - -inline uint32_t Number::Uint32Value() const { - uint32_t result; - napi_status status = napi_get_value_uint32(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} - -inline int64_t Number::Int64Value() const { - int64_t result; - napi_status status = napi_get_value_int64(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} - -inline float Number::FloatValue() const { - return static_cast(DoubleValue()); -} - -inline double Number::DoubleValue() const { - double result; - napi_status status = napi_get_value_double(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} - -#if NAPI_VERSION > 5 -//////////////////////////////////////////////////////////////////////////////// -// BigInt Class -//////////////////////////////////////////////////////////////////////////////// - -inline BigInt BigInt::New(napi_env env, int64_t val) { - napi_value value; - napi_status status = napi_create_bigint_int64(env, val, &value); - NAPI_THROW_IF_FAILED(env, status, BigInt()); - return BigInt(env, value); -} - -inline BigInt BigInt::New(napi_env env, uint64_t val) { - napi_value value; - napi_status status = napi_create_bigint_uint64(env, val, &value); - NAPI_THROW_IF_FAILED(env, status, BigInt()); - return BigInt(env, value); -} - -inline BigInt BigInt::New(napi_env env, - int sign_bit, - size_t word_count, - const uint64_t* words) { - napi_value value; - napi_status status = - napi_create_bigint_words(env, sign_bit, word_count, words, &value); - NAPI_THROW_IF_FAILED(env, status, BigInt()); - return BigInt(env, value); -} - -inline BigInt::BigInt() : Value() {} - -inline BigInt::BigInt(napi_env env, napi_value value) : Value(env, value) {} - -inline int64_t BigInt::Int64Value(bool* lossless) const { - int64_t result; - napi_status status = - napi_get_value_bigint_int64(_env, _value, &result, lossless); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} - -inline uint64_t BigInt::Uint64Value(bool* lossless) const { - uint64_t result; - napi_status status = - napi_get_value_bigint_uint64(_env, _value, &result, lossless); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} - -inline size_t BigInt::WordCount() const { - size_t word_count; - napi_status status = - napi_get_value_bigint_words(_env, _value, nullptr, &word_count, nullptr); - NAPI_THROW_IF_FAILED(_env, status, 0); - return word_count; -} - -inline void BigInt::ToWords(int* sign_bit, - size_t* word_count, - uint64_t* words) { - napi_status status = - napi_get_value_bigint_words(_env, _value, sign_bit, word_count, words); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} -#endif // NAPI_VERSION > 5 - -#if (NAPI_VERSION > 4) -//////////////////////////////////////////////////////////////////////////////// -// Date Class -//////////////////////////////////////////////////////////////////////////////// - -inline Date Date::New(napi_env env, double val) { - napi_value value; - napi_status status = napi_create_date(env, val, &value); - NAPI_THROW_IF_FAILED(env, status, Date()); - return Date(env, value); -} - -inline Date::Date() : Value() {} - -inline Date::Date(napi_env env, napi_value value) : Value(env, value) {} - -inline Date::operator double() const { - return ValueOf(); -} - -inline double Date::ValueOf() const { - double result; - napi_status status = napi_get_date_value(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} -#endif - -//////////////////////////////////////////////////////////////////////////////// -// Name class -//////////////////////////////////////////////////////////////////////////////// - -inline Name::Name() : Value() {} - -inline Name::Name(napi_env env, napi_value value) : Value(env, value) {} - -//////////////////////////////////////////////////////////////////////////////// -// String class -//////////////////////////////////////////////////////////////////////////////// - -inline String String::New(napi_env env, const std::string& val) { - return String::New(env, val.c_str(), val.size()); -} - -inline String String::New(napi_env env, const std::u16string& val) { - return String::New(env, val.c_str(), val.size()); -} - -inline String String::New(napi_env env, const char* val) { - // TODO(@gabrielschulhof) Remove if-statement when core's error handling is - // available in all supported versions. - if (val == nullptr) { - // Throw an error that looks like it came from core. - NAPI_THROW_IF_FAILED(env, napi_invalid_arg, String()); - } - napi_value value; - napi_status status = - napi_create_string_utf8(env, val, std::strlen(val), &value); - NAPI_THROW_IF_FAILED(env, status, String()); - return String(env, value); -} - -inline String String::New(napi_env env, const char16_t* val) { - napi_value value; - // TODO(@gabrielschulhof) Remove if-statement when core's error handling is - // available in all supported versions. - if (val == nullptr) { - // Throw an error that looks like it came from core. - NAPI_THROW_IF_FAILED(env, napi_invalid_arg, String()); - } - napi_status status = - napi_create_string_utf16(env, val, std::u16string(val).size(), &value); - NAPI_THROW_IF_FAILED(env, status, String()); - return String(env, value); -} - -inline String String::New(napi_env env, const char* val, size_t length) { - napi_value value; - napi_status status = napi_create_string_utf8(env, val, length, &value); - NAPI_THROW_IF_FAILED(env, status, String()); - return String(env, value); -} - -inline String String::New(napi_env env, const char16_t* val, size_t length) { - napi_value value; - napi_status status = napi_create_string_utf16(env, val, length, &value); - NAPI_THROW_IF_FAILED(env, status, String()); - return String(env, value); -} - -inline String::String() : Name() {} - -inline String::String(napi_env env, napi_value value) : Name(env, value) {} - -inline String::operator std::string() const { - return Utf8Value(); -} - -inline String::operator std::u16string() const { - return Utf16Value(); -} - -inline std::string String::Utf8Value() const { - size_t length; - napi_status status = - napi_get_value_string_utf8(_env, _value, nullptr, 0, &length); - NAPI_THROW_IF_FAILED(_env, status, ""); - - std::string value; - value.reserve(length + 1); - value.resize(length); - status = napi_get_value_string_utf8( - _env, _value, &value[0], value.capacity(), nullptr); - NAPI_THROW_IF_FAILED(_env, status, ""); - return value; -} - -inline std::u16string String::Utf16Value() const { - size_t length; - napi_status status = - napi_get_value_string_utf16(_env, _value, nullptr, 0, &length); - NAPI_THROW_IF_FAILED(_env, status, NAPI_WIDE_TEXT("")); - - std::u16string value; - value.reserve(length + 1); - value.resize(length); - status = napi_get_value_string_utf16( - _env, _value, &value[0], value.capacity(), nullptr); - NAPI_THROW_IF_FAILED(_env, status, NAPI_WIDE_TEXT("")); - return value; -} - -//////////////////////////////////////////////////////////////////////////////// -// Symbol class -//////////////////////////////////////////////////////////////////////////////// - -inline Symbol Symbol::New(napi_env env, const char* description) { - napi_value descriptionValue = description != nullptr - ? String::New(env, description) - : static_cast(nullptr); - return Symbol::New(env, descriptionValue); -} - -inline Symbol Symbol::New(napi_env env, const std::string& description) { - napi_value descriptionValue = String::New(env, description); - return Symbol::New(env, descriptionValue); -} - -inline Symbol Symbol::New(napi_env env, String description) { - napi_value descriptionValue = description; - return Symbol::New(env, descriptionValue); -} - -inline Symbol Symbol::New(napi_env env, napi_value description) { - napi_value value; - napi_status status = napi_create_symbol(env, description, &value); - NAPI_THROW_IF_FAILED(env, status, Symbol()); - return Symbol(env, value); -} - -inline MaybeOrValue Symbol::WellKnown(napi_env env, - const std::string& name) { -#if defined(NODE_ADDON_API_ENABLE_MAYBE) - Value symbol_obj; - Value symbol_value; - if (Napi::Env(env).Global().Get("Symbol").UnwrapTo(&symbol_obj) && - symbol_obj.As().Get(name).UnwrapTo(&symbol_value)) { - return Just(symbol_value.As()); - } - return Nothing(); -#else - return Napi::Env(env) - .Global() - .Get("Symbol") - .As() - .Get(name) - .As(); -#endif -} - -inline MaybeOrValue Symbol::For(napi_env env, - const std::string& description) { - napi_value descriptionValue = String::New(env, description); - return Symbol::For(env, descriptionValue); -} - -inline MaybeOrValue Symbol::For(napi_env env, const char* description) { - napi_value descriptionValue = String::New(env, description); - return Symbol::For(env, descriptionValue); -} - -inline MaybeOrValue Symbol::For(napi_env env, String description) { - return Symbol::For(env, static_cast(description)); -} - -inline MaybeOrValue Symbol::For(napi_env env, napi_value description) { -#if defined(NODE_ADDON_API_ENABLE_MAYBE) - Value symbol_obj; - Value symbol_for_value; - Value symbol_value; - if (Napi::Env(env).Global().Get("Symbol").UnwrapTo(&symbol_obj) && - symbol_obj.As().Get("for").UnwrapTo(&symbol_for_value) && - symbol_for_value.As() - .Call(symbol_obj, {description}) - .UnwrapTo(&symbol_value)) { - return Just(symbol_value.As()); - } - return Nothing(); -#else - Object symbol_obj = Napi::Env(env).Global().Get("Symbol").As(); - return symbol_obj.Get("for") - .As() - .Call(symbol_obj, {description}) - .As(); -#endif -} - -inline Symbol::Symbol() : Name() {} - -inline Symbol::Symbol(napi_env env, napi_value value) : Name(env, value) {} - -//////////////////////////////////////////////////////////////////////////////// -// Automagic value creation -//////////////////////////////////////////////////////////////////////////////// - -namespace details { -template -struct vf_number { - static Number From(napi_env env, T value) { - return Number::New(env, static_cast(value)); - } -}; - -template <> -struct vf_number { - static Boolean From(napi_env env, bool value) { - return Boolean::New(env, value); - } -}; - -struct vf_utf8_charp { - static String From(napi_env env, const char* value) { - return String::New(env, value); - } -}; - -struct vf_utf16_charp { - static String From(napi_env env, const char16_t* value) { - return String::New(env, value); - } -}; -struct vf_utf8_string { - static String From(napi_env env, const std::string& value) { - return String::New(env, value); - } -}; - -struct vf_utf16_string { - static String From(napi_env env, const std::u16string& value) { - return String::New(env, value); - } -}; - -template -struct vf_fallback { - static Value From(napi_env env, const T& value) { return Value(env, value); } -}; - -template -struct disjunction : std::false_type {}; -template -struct disjunction : B {}; -template -struct disjunction - : std::conditional>::type {}; - -template -struct can_make_string - : disjunction::type, - typename std::is_convertible::type, - typename std::is_convertible::type, - typename std::is_convertible::type> {}; -} // namespace details - -template -Value Value::From(napi_env env, const T& value) { - using Helper = typename std::conditional< - std::is_integral::value || std::is_floating_point::value, - details::vf_number, - typename std::conditional::value, - String, - details::vf_fallback>::type>::type; - return Helper::From(env, value); -} - -template -String String::From(napi_env env, const T& value) { - struct Dummy {}; - using Helper = typename std::conditional< - std::is_convertible::value, - details::vf_utf8_charp, - typename std::conditional< - std::is_convertible::value, - details::vf_utf16_charp, - typename std::conditional< - std::is_convertible::value, - details::vf_utf8_string, - typename std::conditional< - std::is_convertible::value, - details::vf_utf16_string, - Dummy>::type>::type>::type>::type; - return Helper::From(env, value); -} - -//////////////////////////////////////////////////////////////////////////////// -// Object class -//////////////////////////////////////////////////////////////////////////////// - -template -inline Object::PropertyLValue::operator Value() const { - MaybeOrValue val = Object(_env, _object).Get(_key); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - return val.Unwrap(); -#else - return val; -#endif -} - -template -template -inline Object::PropertyLValue& Object::PropertyLValue::operator=( - ValueType value) { -#ifdef NODE_ADDON_API_ENABLE_MAYBE - MaybeOrValue result = -#endif - Object(_env, _object).Set(_key, value); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - result.Unwrap(); -#endif - return *this; -} - -template -inline Object::PropertyLValue::PropertyLValue(Object object, Key key) - : _env(object.Env()), _object(object), _key(key) {} - -inline Object Object::New(napi_env env) { - napi_value value; - napi_status status = napi_create_object(env, &value); - NAPI_THROW_IF_FAILED(env, status, Object()); - return Object(env, value); -} - -inline Object::Object() : Value() {} - -inline Object::Object(napi_env env, napi_value value) : Value(env, value) {} - -inline Object::PropertyLValue Object::operator[]( - const char* utf8name) { - return PropertyLValue(*this, utf8name); -} - -inline Object::PropertyLValue Object::operator[]( - const std::string& utf8name) { - return PropertyLValue(*this, utf8name); -} - -inline Object::PropertyLValue Object::operator[](uint32_t index) { - return PropertyLValue(*this, index); -} - -inline Object::PropertyLValue Object::operator[](Value index) const { - return PropertyLValue(*this, index); -} - -inline MaybeOrValue Object::operator[](const char* utf8name) const { - return Get(utf8name); -} - -inline MaybeOrValue Object::operator[]( - const std::string& utf8name) const { - return Get(utf8name); -} - -inline MaybeOrValue Object::operator[](uint32_t index) const { - return Get(index); -} - -inline MaybeOrValue Object::Has(napi_value key) const { - bool result; - napi_status status = napi_has_property(_env, _value, key, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -inline MaybeOrValue Object::Has(Value key) const { - bool result; - napi_status status = napi_has_property(_env, _value, key, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -inline MaybeOrValue Object::Has(const char* utf8name) const { - bool result; - napi_status status = napi_has_named_property(_env, _value, utf8name, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -inline MaybeOrValue Object::Has(const std::string& utf8name) const { - return Has(utf8name.c_str()); -} - -inline MaybeOrValue Object::HasOwnProperty(napi_value key) const { - bool result; - napi_status status = napi_has_own_property(_env, _value, key, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -inline MaybeOrValue Object::HasOwnProperty(Value key) const { - bool result; - napi_status status = napi_has_own_property(_env, _value, key, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -inline MaybeOrValue Object::HasOwnProperty(const char* utf8name) const { - napi_value key; - napi_status status = - napi_create_string_utf8(_env, utf8name, std::strlen(utf8name), &key); - NAPI_MAYBE_THROW_IF_FAILED(_env, status, bool); - return HasOwnProperty(key); -} - -inline MaybeOrValue Object::HasOwnProperty( - const std::string& utf8name) const { - return HasOwnProperty(utf8name.c_str()); -} - -inline MaybeOrValue Object::Get(napi_value key) const { - napi_value result; - napi_status status = napi_get_property(_env, _value, key, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, result), Value); -} - -inline MaybeOrValue Object::Get(Value key) const { - napi_value result; - napi_status status = napi_get_property(_env, _value, key, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, result), Value); -} - -inline MaybeOrValue Object::Get(const char* utf8name) const { - napi_value result; - napi_status status = napi_get_named_property(_env, _value, utf8name, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, result), Value); -} - -inline MaybeOrValue Object::Get(const std::string& utf8name) const { - return Get(utf8name.c_str()); -} - -template -inline MaybeOrValue Object::Set(napi_value key, - const ValueType& value) const { - napi_status status = - napi_set_property(_env, _value, key, Value::From(_env, value)); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); -} - -template -inline MaybeOrValue Object::Set(Value key, const ValueType& value) const { - napi_status status = - napi_set_property(_env, _value, key, Value::From(_env, value)); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); -} - -template -inline MaybeOrValue Object::Set(const char* utf8name, - const ValueType& value) const { - napi_status status = - napi_set_named_property(_env, _value, utf8name, Value::From(_env, value)); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); -} - -template -inline MaybeOrValue Object::Set(const std::string& utf8name, - const ValueType& value) const { - return Set(utf8name.c_str(), value); -} - -inline MaybeOrValue Object::Delete(napi_value key) const { - bool result; - napi_status status = napi_delete_property(_env, _value, key, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -inline MaybeOrValue Object::Delete(Value key) const { - bool result; - napi_status status = napi_delete_property(_env, _value, key, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -inline MaybeOrValue Object::Delete(const char* utf8name) const { - return Delete(String::New(_env, utf8name)); -} - -inline MaybeOrValue Object::Delete(const std::string& utf8name) const { - return Delete(String::New(_env, utf8name)); -} - -inline MaybeOrValue Object::Has(uint32_t index) const { - bool result; - napi_status status = napi_has_element(_env, _value, index, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -inline MaybeOrValue Object::Get(uint32_t index) const { - napi_value value; - napi_status status = napi_get_element(_env, _value, index, &value); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, value), Value); -} - -template -inline MaybeOrValue Object::Set(uint32_t index, - const ValueType& value) const { - napi_status status = - napi_set_element(_env, _value, index, Value::From(_env, value)); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); -} - -inline MaybeOrValue Object::Delete(uint32_t index) const { - bool result; - napi_status status = napi_delete_element(_env, _value, index, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -inline MaybeOrValue Object::GetPropertyNames() const { - napi_value result; - napi_status status = napi_get_property_names(_env, _value, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Array(_env, result), Array); -} - -inline MaybeOrValue Object::DefineProperty( - const PropertyDescriptor& property) const { - napi_status status = napi_define_properties( - _env, - _value, - 1, - reinterpret_cast(&property)); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); -} - -inline MaybeOrValue Object::DefineProperties( - const std::initializer_list& properties) const { - napi_status status = napi_define_properties( - _env, - _value, - properties.size(), - reinterpret_cast(properties.begin())); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); -} - -inline MaybeOrValue Object::DefineProperties( - const std::vector& properties) const { - napi_status status = napi_define_properties( - _env, - _value, - properties.size(), - reinterpret_cast(properties.data())); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); -} - -inline MaybeOrValue Object::InstanceOf( - const Function& constructor) const { - bool result; - napi_status status = napi_instanceof(_env, _value, constructor, &result); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); -} - -template -inline void Object::AddFinalizer(Finalizer finalizeCallback, T* data) const { - details::FinalizeData* finalizeData = - new details::FinalizeData( - {std::move(finalizeCallback), nullptr}); - napi_status status = - details::AttachData(_env, - *this, - data, - details::FinalizeData::Wrapper, - finalizeData); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED_VOID(_env, status); - } -} - -template -inline void Object::AddFinalizer(Finalizer finalizeCallback, - T* data, - Hint* finalizeHint) const { - details::FinalizeData* finalizeData = - new details::FinalizeData( - {std::move(finalizeCallback), finalizeHint}); - napi_status status = details::AttachData( - _env, - *this, - data, - details::FinalizeData::WrapperWithHint, - finalizeData); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED_VOID(_env, status); - } -} - -#ifdef NAPI_CPP_EXCEPTIONS -inline Object::const_iterator::const_iterator(const Object* object, - const Type type) { - _object = object; - _keys = object->GetPropertyNames(); - _index = type == Type::BEGIN ? 0 : _keys.Length(); -} - -inline Object::const_iterator Napi::Object::begin() const { - const_iterator it(this, Object::const_iterator::Type::BEGIN); - return it; -} - -inline Object::const_iterator Napi::Object::end() const { - const_iterator it(this, Object::const_iterator::Type::END); - return it; -} - -inline Object::const_iterator& Object::const_iterator::operator++() { - ++_index; - return *this; -} - -inline bool Object::const_iterator::operator==( - const const_iterator& other) const { - return _index == other._index; -} - -inline bool Object::const_iterator::operator!=( - const const_iterator& other) const { - return _index != other._index; -} - -inline const std::pair> -Object::const_iterator::operator*() const { - const Value key = _keys[_index]; - const PropertyLValue value = (*_object)[key]; - return {key, value}; -} - -inline Object::iterator::iterator(Object* object, const Type type) { - _object = object; - _keys = object->GetPropertyNames(); - _index = type == Type::BEGIN ? 0 : _keys.Length(); -} - -inline Object::iterator Napi::Object::begin() { - iterator it(this, Object::iterator::Type::BEGIN); - return it; -} - -inline Object::iterator Napi::Object::end() { - iterator it(this, Object::iterator::Type::END); - return it; -} - -inline Object::iterator& Object::iterator::operator++() { - ++_index; - return *this; -} - -inline bool Object::iterator::operator==(const iterator& other) const { - return _index == other._index; -} - -inline bool Object::iterator::operator!=(const iterator& other) const { - return _index != other._index; -} - -inline std::pair> -Object::iterator::operator*() { - Value key = _keys[_index]; - PropertyLValue value = (*_object)[key]; - return {key, value}; -} -#endif // NAPI_CPP_EXCEPTIONS - -#if NAPI_VERSION >= 8 -inline MaybeOrValue Object::Freeze() const { - napi_status status = napi_object_freeze(_env, _value); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); -} - -inline MaybeOrValue Object::Seal() const { - napi_status status = napi_object_seal(_env, _value); - NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); -} -#endif // NAPI_VERSION >= 8 - -//////////////////////////////////////////////////////////////////////////////// -// External class -//////////////////////////////////////////////////////////////////////////////// - -template -inline External External::New(napi_env env, T* data) { - napi_value value; - napi_status status = - napi_create_external(env, data, nullptr, nullptr, &value); - NAPI_THROW_IF_FAILED(env, status, External()); - return External(env, value); -} - -template -template -inline External External::New(napi_env env, - T* data, - Finalizer finalizeCallback) { - napi_value value; - details::FinalizeData* finalizeData = - new details::FinalizeData( - {std::move(finalizeCallback), nullptr}); - napi_status status = - napi_create_external(env, - data, - details::FinalizeData::Wrapper, - finalizeData, - &value); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED(env, status, External()); - } - return External(env, value); -} - -template -template -inline External External::New(napi_env env, - T* data, - Finalizer finalizeCallback, - Hint* finalizeHint) { - napi_value value; - details::FinalizeData* finalizeData = - new details::FinalizeData( - {std::move(finalizeCallback), finalizeHint}); - napi_status status = napi_create_external( - env, - data, - details::FinalizeData::WrapperWithHint, - finalizeData, - &value); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED(env, status, External()); - } - return External(env, value); -} - -template -inline External::External() : Value() {} - -template -inline External::External(napi_env env, napi_value value) - : Value(env, value) {} - -template -inline T* External::Data() const { - void* data; - napi_status status = napi_get_value_external(_env, _value, &data); - NAPI_THROW_IF_FAILED(_env, status, nullptr); - return reinterpret_cast(data); -} - -//////////////////////////////////////////////////////////////////////////////// -// Array class -//////////////////////////////////////////////////////////////////////////////// - -inline Array Array::New(napi_env env) { - napi_value value; - napi_status status = napi_create_array(env, &value); - NAPI_THROW_IF_FAILED(env, status, Array()); - return Array(env, value); -} - -inline Array Array::New(napi_env env, size_t length) { - napi_value value; - napi_status status = napi_create_array_with_length(env, length, &value); - NAPI_THROW_IF_FAILED(env, status, Array()); - return Array(env, value); -} - -inline Array::Array() : Object() {} - -inline Array::Array(napi_env env, napi_value value) : Object(env, value) {} - -inline uint32_t Array::Length() const { - uint32_t result; - napi_status status = napi_get_array_length(_env, _value, &result); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} - -//////////////////////////////////////////////////////////////////////////////// -// ArrayBuffer class -//////////////////////////////////////////////////////////////////////////////// - -inline ArrayBuffer ArrayBuffer::New(napi_env env, size_t byteLength) { - napi_value value; - void* data; - napi_status status = napi_create_arraybuffer(env, byteLength, &data, &value); - NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); - - return ArrayBuffer(env, value); -} - -inline ArrayBuffer ArrayBuffer::New(napi_env env, - void* externalData, - size_t byteLength) { - napi_value value; - napi_status status = napi_create_external_arraybuffer( - env, externalData, byteLength, nullptr, nullptr, &value); - NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); - - return ArrayBuffer(env, value); -} - -template -inline ArrayBuffer ArrayBuffer::New(napi_env env, - void* externalData, - size_t byteLength, - Finalizer finalizeCallback) { - napi_value value; - details::FinalizeData* finalizeData = - new details::FinalizeData( - {std::move(finalizeCallback), nullptr}); - napi_status status = napi_create_external_arraybuffer( - env, - externalData, - byteLength, - details::FinalizeData::Wrapper, - finalizeData, - &value); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); - } - - return ArrayBuffer(env, value); -} - -template -inline ArrayBuffer ArrayBuffer::New(napi_env env, - void* externalData, - size_t byteLength, - Finalizer finalizeCallback, - Hint* finalizeHint) { - napi_value value; - details::FinalizeData* finalizeData = - new details::FinalizeData( - {std::move(finalizeCallback), finalizeHint}); - napi_status status = napi_create_external_arraybuffer( - env, - externalData, - byteLength, - details::FinalizeData::WrapperWithHint, - finalizeData, - &value); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); - } - - return ArrayBuffer(env, value); -} - -inline ArrayBuffer::ArrayBuffer() : Object() {} - -inline ArrayBuffer::ArrayBuffer(napi_env env, napi_value value) - : Object(env, value) {} - -inline void* ArrayBuffer::Data() { - void* data; - napi_status status = napi_get_arraybuffer_info(_env, _value, &data, nullptr); - NAPI_THROW_IF_FAILED(_env, status, nullptr); - return data; -} - -inline size_t ArrayBuffer::ByteLength() { - size_t length; - napi_status status = - napi_get_arraybuffer_info(_env, _value, nullptr, &length); - NAPI_THROW_IF_FAILED(_env, status, 0); - return length; -} - -#if NAPI_VERSION >= 7 -inline bool ArrayBuffer::IsDetached() const { - bool detached; - napi_status status = napi_is_detached_arraybuffer(_env, _value, &detached); - NAPI_THROW_IF_FAILED(_env, status, false); - return detached; -} - -inline void ArrayBuffer::Detach() { - napi_status status = napi_detach_arraybuffer(_env, _value); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} -#endif // NAPI_VERSION >= 7 - -//////////////////////////////////////////////////////////////////////////////// -// DataView class -//////////////////////////////////////////////////////////////////////////////// -inline DataView DataView::New(napi_env env, Napi::ArrayBuffer arrayBuffer) { - return New(env, arrayBuffer, 0, arrayBuffer.ByteLength()); -} - -inline DataView DataView::New(napi_env env, - Napi::ArrayBuffer arrayBuffer, - size_t byteOffset) { - if (byteOffset > arrayBuffer.ByteLength()) { - NAPI_THROW(RangeError::New( - env, "Start offset is outside the bounds of the buffer"), - DataView()); - } - return New( - env, arrayBuffer, byteOffset, arrayBuffer.ByteLength() - byteOffset); -} - -inline DataView DataView::New(napi_env env, - Napi::ArrayBuffer arrayBuffer, - size_t byteOffset, - size_t byteLength) { - if (byteOffset + byteLength > arrayBuffer.ByteLength()) { - NAPI_THROW(RangeError::New(env, "Invalid DataView length"), DataView()); - } - napi_value value; - napi_status status = - napi_create_dataview(env, byteLength, arrayBuffer, byteOffset, &value); - NAPI_THROW_IF_FAILED(env, status, DataView()); - return DataView(env, value); -} - -inline DataView::DataView() : Object() {} - -inline DataView::DataView(napi_env env, napi_value value) : Object(env, value) { - napi_status status = napi_get_dataview_info(_env, - _value /* dataView */, - &_length /* byteLength */, - &_data /* data */, - nullptr /* arrayBuffer */, - nullptr /* byteOffset */); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline Napi::ArrayBuffer DataView::ArrayBuffer() const { - napi_value arrayBuffer; - napi_status status = napi_get_dataview_info(_env, - _value /* dataView */, - nullptr /* byteLength */, - nullptr /* data */, - &arrayBuffer /* arrayBuffer */, - nullptr /* byteOffset */); - NAPI_THROW_IF_FAILED(_env, status, Napi::ArrayBuffer()); - return Napi::ArrayBuffer(_env, arrayBuffer); -} - -inline size_t DataView::ByteOffset() const { - size_t byteOffset; - napi_status status = napi_get_dataview_info(_env, - _value /* dataView */, - nullptr /* byteLength */, - nullptr /* data */, - nullptr /* arrayBuffer */, - &byteOffset /* byteOffset */); - NAPI_THROW_IF_FAILED(_env, status, 0); - return byteOffset; -} - -inline size_t DataView::ByteLength() const { - return _length; -} - -inline void* DataView::Data() const { - return _data; -} - -inline float DataView::GetFloat32(size_t byteOffset) const { - return ReadData(byteOffset); -} - -inline double DataView::GetFloat64(size_t byteOffset) const { - return ReadData(byteOffset); -} - -inline int8_t DataView::GetInt8(size_t byteOffset) const { - return ReadData(byteOffset); -} - -inline int16_t DataView::GetInt16(size_t byteOffset) const { - return ReadData(byteOffset); -} - -inline int32_t DataView::GetInt32(size_t byteOffset) const { - return ReadData(byteOffset); -} - -inline uint8_t DataView::GetUint8(size_t byteOffset) const { - return ReadData(byteOffset); -} - -inline uint16_t DataView::GetUint16(size_t byteOffset) const { - return ReadData(byteOffset); -} - -inline uint32_t DataView::GetUint32(size_t byteOffset) const { - return ReadData(byteOffset); -} - -inline void DataView::SetFloat32(size_t byteOffset, float value) const { - WriteData(byteOffset, value); -} - -inline void DataView::SetFloat64(size_t byteOffset, double value) const { - WriteData(byteOffset, value); -} - -inline void DataView::SetInt8(size_t byteOffset, int8_t value) const { - WriteData(byteOffset, value); -} - -inline void DataView::SetInt16(size_t byteOffset, int16_t value) const { - WriteData(byteOffset, value); -} - -inline void DataView::SetInt32(size_t byteOffset, int32_t value) const { - WriteData(byteOffset, value); -} - -inline void DataView::SetUint8(size_t byteOffset, uint8_t value) const { - WriteData(byteOffset, value); -} - -inline void DataView::SetUint16(size_t byteOffset, uint16_t value) const { - WriteData(byteOffset, value); -} - -inline void DataView::SetUint32(size_t byteOffset, uint32_t value) const { - WriteData(byteOffset, value); -} - -template -inline T DataView::ReadData(size_t byteOffset) const { - if (byteOffset + sizeof(T) > _length || - byteOffset + sizeof(T) < byteOffset) { // overflow - NAPI_THROW( - RangeError::New(_env, "Offset is outside the bounds of the DataView"), - 0); - } - - return *reinterpret_cast(static_cast(_data) + byteOffset); -} - -template -inline void DataView::WriteData(size_t byteOffset, T value) const { - if (byteOffset + sizeof(T) > _length || - byteOffset + sizeof(T) < byteOffset) { // overflow - NAPI_THROW_VOID( - RangeError::New(_env, "Offset is outside the bounds of the DataView")); - } - - *reinterpret_cast(static_cast(_data) + byteOffset) = value; -} - -//////////////////////////////////////////////////////////////////////////////// -// TypedArray class -//////////////////////////////////////////////////////////////////////////////// - -inline TypedArray::TypedArray() - : Object(), _type(napi_typedarray_type::napi_int8_array), _length(0) {} - -inline TypedArray::TypedArray(napi_env env, napi_value value) - : Object(env, value), - _type(napi_typedarray_type::napi_int8_array), - _length(0) { - if (value != nullptr) { - napi_status status = - napi_get_typedarray_info(_env, - _value, - &const_cast(this)->_type, - &const_cast(this)->_length, - nullptr, - nullptr, - nullptr); - NAPI_THROW_IF_FAILED_VOID(_env, status); - } -} - -inline TypedArray::TypedArray(napi_env env, - napi_value value, - napi_typedarray_type type, - size_t length) - : Object(env, value), _type(type), _length(length) {} - -inline napi_typedarray_type TypedArray::TypedArrayType() const { - return _type; -} - -inline uint8_t TypedArray::ElementSize() const { - switch (_type) { - case napi_int8_array: - case napi_uint8_array: - case napi_uint8_clamped_array: - return 1; - case napi_int16_array: - case napi_uint16_array: - return 2; - case napi_int32_array: - case napi_uint32_array: - case napi_float32_array: - return 4; - case napi_float64_array: -#if (NAPI_VERSION > 5) - case napi_bigint64_array: - case napi_biguint64_array: -#endif // (NAPI_VERSION > 5) - return 8; - default: - return 0; - } -} - -inline size_t TypedArray::ElementLength() const { - return _length; -} - -inline size_t TypedArray::ByteOffset() const { - size_t byteOffset; - napi_status status = napi_get_typedarray_info( - _env, _value, nullptr, nullptr, nullptr, nullptr, &byteOffset); - NAPI_THROW_IF_FAILED(_env, status, 0); - return byteOffset; -} - -inline size_t TypedArray::ByteLength() const { - return ElementSize() * ElementLength(); -} - -inline Napi::ArrayBuffer TypedArray::ArrayBuffer() const { - napi_value arrayBuffer; - napi_status status = napi_get_typedarray_info( - _env, _value, nullptr, nullptr, nullptr, &arrayBuffer, nullptr); - NAPI_THROW_IF_FAILED(_env, status, Napi::ArrayBuffer()); - return Napi::ArrayBuffer(_env, arrayBuffer); -} - -//////////////////////////////////////////////////////////////////////////////// -// TypedArrayOf class -//////////////////////////////////////////////////////////////////////////////// - -template -inline TypedArrayOf TypedArrayOf::New(napi_env env, - size_t elementLength, - napi_typedarray_type type) { - Napi::ArrayBuffer arrayBuffer = - Napi::ArrayBuffer::New(env, elementLength * sizeof(T)); - return New(env, elementLength, arrayBuffer, 0, type); -} - -template -inline TypedArrayOf TypedArrayOf::New(napi_env env, - size_t elementLength, - Napi::ArrayBuffer arrayBuffer, - size_t bufferOffset, - napi_typedarray_type type) { - napi_value value; - napi_status status = napi_create_typedarray( - env, type, elementLength, arrayBuffer, bufferOffset, &value); - NAPI_THROW_IF_FAILED(env, status, TypedArrayOf()); - - return TypedArrayOf( - env, - value, - type, - elementLength, - reinterpret_cast(reinterpret_cast(arrayBuffer.Data()) + - bufferOffset)); -} - -template -inline TypedArrayOf::TypedArrayOf() : TypedArray(), _data(nullptr) {} - -template -inline TypedArrayOf::TypedArrayOf(napi_env env, napi_value value) - : TypedArray(env, value), _data(nullptr) { - napi_status status = napi_ok; - if (value != nullptr) { - void* data = nullptr; - status = napi_get_typedarray_info( - _env, _value, &_type, &_length, &data, nullptr, nullptr); - _data = static_cast(data); - } else { - _type = TypedArrayTypeForPrimitiveType(); - _length = 0; - } - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -template -inline TypedArrayOf::TypedArrayOf(napi_env env, - napi_value value, - napi_typedarray_type type, - size_t length, - T* data) - : TypedArray(env, value, type, length), _data(data) { - if (!(type == TypedArrayTypeForPrimitiveType() || - (type == napi_uint8_clamped_array && - std::is_same::value))) { - NAPI_THROW_VOID(TypeError::New( - env, - "Array type must match the template parameter. " - "(Uint8 arrays may optionally have the \"clamped\" array type.)")); - } -} - -template -inline T& TypedArrayOf::operator[](size_t index) { - return _data[index]; -} - -template -inline const T& TypedArrayOf::operator[](size_t index) const { - return _data[index]; -} - -template -inline T* TypedArrayOf::Data() { - return _data; -} - -template -inline const T* TypedArrayOf::Data() const { - return _data; -} - -//////////////////////////////////////////////////////////////////////////////// -// Function class -//////////////////////////////////////////////////////////////////////////////// - -template -inline napi_status CreateFunction(napi_env env, - const char* utf8name, - napi_callback cb, - CbData* data, - napi_value* result) { - napi_status status = - napi_create_function(env, utf8name, NAPI_AUTO_LENGTH, cb, data, result); - if (status == napi_ok) { - status = Napi::details::AttachData(env, *result, data); - } - - return status; -} - -template -inline Function Function::New(napi_env env, const char* utf8name, void* data) { - napi_value result = nullptr; - napi_status status = napi_create_function(env, - utf8name, - NAPI_AUTO_LENGTH, - details::TemplatedVoidCallback, - data, - &result); - NAPI_THROW_IF_FAILED(env, status, Function()); - return Function(env, result); -} - -template -inline Function Function::New(napi_env env, const char* utf8name, void* data) { - napi_value result = nullptr; - napi_status status = napi_create_function(env, - utf8name, - NAPI_AUTO_LENGTH, - details::TemplatedCallback, - data, - &result); - NAPI_THROW_IF_FAILED(env, status, Function()); - return Function(env, result); -} - -template -inline Function Function::New(napi_env env, - const std::string& utf8name, - void* data) { - return Function::New(env, utf8name.c_str(), data); -} - -template -inline Function Function::New(napi_env env, - const std::string& utf8name, - void* data) { - return Function::New(env, utf8name.c_str(), data); -} - -template -inline Function Function::New(napi_env env, - Callable cb, - const char* utf8name, - void* data) { - using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr))); - using CbData = details::CallbackData; - auto callbackData = new CbData{std::move(cb), data}; - - napi_value value; - napi_status status = - CreateFunction(env, utf8name, CbData::Wrapper, callbackData, &value); - if (status != napi_ok) { - delete callbackData; - NAPI_THROW_IF_FAILED(env, status, Function()); - } - - return Function(env, value); -} - -template -inline Function Function::New(napi_env env, - Callable cb, - const std::string& utf8name, - void* data) { - return New(env, cb, utf8name.c_str(), data); -} - -inline Function::Function() : Object() {} - -inline Function::Function(napi_env env, napi_value value) - : Object(env, value) {} - -inline MaybeOrValue Function::operator()( - const std::initializer_list& args) const { - return Call(Env().Undefined(), args); -} - -inline MaybeOrValue Function::Call( - const std::initializer_list& args) const { - return Call(Env().Undefined(), args); -} - -inline MaybeOrValue Function::Call( - const std::vector& args) const { - return Call(Env().Undefined(), args); -} - -inline MaybeOrValue Function::Call( - const std::vector& args) const { - return Call(Env().Undefined(), args); -} - -inline MaybeOrValue Function::Call(size_t argc, - const napi_value* args) const { - return Call(Env().Undefined(), argc, args); -} - -inline MaybeOrValue Function::Call( - napi_value recv, const std::initializer_list& args) const { - return Call(recv, args.size(), args.begin()); -} - -inline MaybeOrValue Function::Call( - napi_value recv, const std::vector& args) const { - return Call(recv, args.size(), args.data()); -} - -inline MaybeOrValue Function::Call( - napi_value recv, const std::vector& args) const { - const size_t argc = args.size(); - const size_t stackArgsCount = 6; - napi_value stackArgs[stackArgsCount]; - std::vector heapArgs; - napi_value* argv; - if (argc <= stackArgsCount) { - argv = stackArgs; - } else { - heapArgs.resize(argc); - argv = heapArgs.data(); - } - - for (size_t index = 0; index < argc; index++) { - argv[index] = static_cast(args[index]); - } - - return Call(recv, argc, argv); -} - -inline MaybeOrValue Function::Call(napi_value recv, - size_t argc, - const napi_value* args) const { - napi_value result; - napi_status status = - napi_call_function(_env, recv, _value, argc, args, &result); - NAPI_RETURN_OR_THROW_IF_FAILED( - _env, status, Napi::Value(_env, result), Napi::Value); -} - -inline MaybeOrValue Function::MakeCallback( - napi_value recv, - const std::initializer_list& args, - napi_async_context context) const { - return MakeCallback(recv, args.size(), args.begin(), context); -} - -inline MaybeOrValue Function::MakeCallback( - napi_value recv, - const std::vector& args, - napi_async_context context) const { - return MakeCallback(recv, args.size(), args.data(), context); -} - -inline MaybeOrValue Function::MakeCallback( - napi_value recv, - size_t argc, - const napi_value* args, - napi_async_context context) const { - napi_value result; - napi_status status = - napi_make_callback(_env, context, recv, _value, argc, args, &result); - NAPI_RETURN_OR_THROW_IF_FAILED( - _env, status, Napi::Value(_env, result), Napi::Value); -} - -inline MaybeOrValue Function::New( - const std::initializer_list& args) const { - return New(args.size(), args.begin()); -} - -inline MaybeOrValue Function::New( - const std::vector& args) const { - return New(args.size(), args.data()); -} - -inline MaybeOrValue Function::New(size_t argc, - const napi_value* args) const { - napi_value result; - napi_status status = napi_new_instance(_env, _value, argc, args, &result); - NAPI_RETURN_OR_THROW_IF_FAILED( - _env, status, Napi::Object(_env, result), Napi::Object); -} - -//////////////////////////////////////////////////////////////////////////////// -// Promise class -//////////////////////////////////////////////////////////////////////////////// - -inline Promise::Deferred Promise::Deferred::New(napi_env env) { - return Promise::Deferred(env); -} - -inline Promise::Deferred::Deferred(napi_env env) : _env(env) { - napi_status status = napi_create_promise(_env, &_deferred, &_promise); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline Promise Promise::Deferred::Promise() const { - return Napi::Promise(_env, _promise); -} - -inline Napi::Env Promise::Deferred::Env() const { - return Napi::Env(_env); -} - -inline void Promise::Deferred::Resolve(napi_value value) const { - napi_status status = napi_resolve_deferred(_env, _deferred, value); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline void Promise::Deferred::Reject(napi_value value) const { - napi_status status = napi_reject_deferred(_env, _deferred, value); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline Promise::Promise(napi_env env, napi_value value) : Object(env, value) {} - -//////////////////////////////////////////////////////////////////////////////// -// Buffer class -//////////////////////////////////////////////////////////////////////////////// - -template -inline Buffer Buffer::New(napi_env env, size_t length) { - napi_value value; - void* data; - napi_status status = - napi_create_buffer(env, length * sizeof(T), &data, &value); - NAPI_THROW_IF_FAILED(env, status, Buffer()); - return Buffer(env, value, length, static_cast(data)); -} - -template -inline Buffer Buffer::New(napi_env env, T* data, size_t length) { - napi_value value; - napi_status status = napi_create_external_buffer( - env, length * sizeof(T), data, nullptr, nullptr, &value); - NAPI_THROW_IF_FAILED(env, status, Buffer()); - return Buffer(env, value, length, data); -} - -template -template -inline Buffer Buffer::New(napi_env env, - T* data, - size_t length, - Finalizer finalizeCallback) { - napi_value value; - details::FinalizeData* finalizeData = - new details::FinalizeData( - {std::move(finalizeCallback), nullptr}); - napi_status status = - napi_create_external_buffer(env, - length * sizeof(T), - data, - details::FinalizeData::Wrapper, - finalizeData, - &value); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED(env, status, Buffer()); - } - return Buffer(env, value, length, data); -} - -template -template -inline Buffer Buffer::New(napi_env env, - T* data, - size_t length, - Finalizer finalizeCallback, - Hint* finalizeHint) { - napi_value value; - details::FinalizeData* finalizeData = - new details::FinalizeData( - {std::move(finalizeCallback), finalizeHint}); - napi_status status = napi_create_external_buffer( - env, - length * sizeof(T), - data, - details::FinalizeData::WrapperWithHint, - finalizeData, - &value); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED(env, status, Buffer()); - } - return Buffer(env, value, length, data); -} - -template -inline Buffer Buffer::Copy(napi_env env, const T* data, size_t length) { - napi_value value; - napi_status status = - napi_create_buffer_copy(env, length * sizeof(T), data, nullptr, &value); - NAPI_THROW_IF_FAILED(env, status, Buffer()); - return Buffer(env, value); -} - -template -inline Buffer::Buffer() : Uint8Array(), _length(0), _data(nullptr) {} - -template -inline Buffer::Buffer(napi_env env, napi_value value) - : Uint8Array(env, value), _length(0), _data(nullptr) {} - -template -inline Buffer::Buffer(napi_env env, napi_value value, size_t length, T* data) - : Uint8Array(env, value), _length(length), _data(data) {} - -template -inline size_t Buffer::Length() const { - EnsureInfo(); - return _length; -} - -template -inline T* Buffer::Data() const { - EnsureInfo(); - return _data; -} - -template -inline void Buffer::EnsureInfo() const { - // The Buffer instance may have been constructed from a napi_value whose - // length/data are not yet known. Fetch and cache these values just once, - // since they can never change during the lifetime of the Buffer. - if (_data == nullptr) { - size_t byteLength; - void* voidData; - napi_status status = - napi_get_buffer_info(_env, _value, &voidData, &byteLength); - NAPI_THROW_IF_FAILED_VOID(_env, status); - _length = byteLength / sizeof(T); - _data = static_cast(voidData); - } -} - -//////////////////////////////////////////////////////////////////////////////// -// Error class -//////////////////////////////////////////////////////////////////////////////// - -inline Error Error::New(napi_env env) { - napi_status status; - napi_value error = nullptr; - bool is_exception_pending; - napi_extended_error_info last_error_info_copy; - - { - // We must retrieve the last error info before doing anything else because - // doing anything else will replace the last error info. - const napi_extended_error_info* last_error_info; - status = napi_get_last_error_info(env, &last_error_info); - NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_get_last_error_info"); - - // All fields of the `napi_extended_error_info` structure gets reset in - // subsequent Node-API function calls on the same `env`. This includes a - // call to `napi_is_exception_pending()`. So here it is necessary to make a - // copy of the information as the `error_code` field is used later on. - memcpy(&last_error_info_copy, - last_error_info, - sizeof(napi_extended_error_info)); - } - - status = napi_is_exception_pending(env, &is_exception_pending); - NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_is_exception_pending"); - - // A pending exception takes precedence over any internal error status. - if (is_exception_pending) { - status = napi_get_and_clear_last_exception(env, &error); - NAPI_FATAL_IF_FAILED( - status, "Error::New", "napi_get_and_clear_last_exception"); - } else { - const char* error_message = last_error_info_copy.error_message != nullptr - ? last_error_info_copy.error_message - : "Error in native callback"; - - napi_value message; - status = napi_create_string_utf8( - env, error_message, std::strlen(error_message), &message); - NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_create_string_utf8"); - - switch (last_error_info_copy.error_code) { - case napi_object_expected: - case napi_string_expected: - case napi_boolean_expected: - case napi_number_expected: - status = napi_create_type_error(env, nullptr, message, &error); - break; - default: - status = napi_create_error(env, nullptr, message, &error); - break; - } - NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_create_error"); - } - - return Error(env, error); -} - -inline Error Error::New(napi_env env, const char* message) { - return Error::New( - env, message, std::strlen(message), napi_create_error); -} - -inline Error Error::New(napi_env env, const std::string& message) { - return Error::New( - env, message.c_str(), message.size(), napi_create_error); -} - -inline NAPI_NO_RETURN void Error::Fatal(const char* location, - const char* message) { - napi_fatal_error(location, NAPI_AUTO_LENGTH, message, NAPI_AUTO_LENGTH); -} - -inline Error::Error() : ObjectReference() {} - -inline Error::Error(napi_env env, napi_value value) - : ObjectReference(env, nullptr) { - if (value != nullptr) { - // Attempting to create a reference on the error object. - // If it's not a Object/Function/Symbol, this call will return an error - // status. - napi_status status = napi_create_reference(env, value, 1, &_ref); - - if (status != napi_ok) { - napi_value wrappedErrorObj; - - // Create an error object - status = napi_create_object(env, &wrappedErrorObj); - NAPI_FATAL_IF_FAILED(status, "Error::Error", "napi_create_object"); - - // property flag that we attach to show the error object is wrapped - napi_property_descriptor wrapObjFlag = { - ERROR_WRAP_VALUE(), // Unique GUID identifier since Symbol isn't a - // viable option - nullptr, - nullptr, - nullptr, - nullptr, - Value::From(env, value), - napi_enumerable, - nullptr}; - - status = napi_define_properties(env, wrappedErrorObj, 1, &wrapObjFlag); - NAPI_FATAL_IF_FAILED(status, "Error::Error", "napi_define_properties"); - - // Create a reference on the newly wrapped object - status = napi_create_reference(env, wrappedErrorObj, 1, &_ref); - } - - // Avoid infinite recursion in the failure case. - NAPI_FATAL_IF_FAILED(status, "Error::Error", "napi_create_reference"); - } -} - -inline Object Error::Value() const { - if (_ref == nullptr) { - return Object(_env, nullptr); - } - - napi_value refValue; - napi_status status = napi_get_reference_value(_env, _ref, &refValue); - NAPI_THROW_IF_FAILED(_env, status, Object()); - - napi_valuetype type; - status = napi_typeof(_env, refValue, &type); - NAPI_THROW_IF_FAILED(_env, status, Object()); - - // If refValue isn't a symbol, then we proceed to whether the refValue has the - // wrapped error flag - if (type != napi_symbol) { - // We are checking if the object is wrapped - bool isWrappedObject = false; - - status = napi_has_property(_env, - refValue, - String::From(_env, ERROR_WRAP_VALUE()), - &isWrappedObject); - - // Don't care about status - if (isWrappedObject) { - napi_value unwrappedValue; - status = napi_get_property(_env, - refValue, - String::From(_env, ERROR_WRAP_VALUE()), - &unwrappedValue); - NAPI_THROW_IF_FAILED(_env, status, Object()); - - return Object(_env, unwrappedValue); - } - } - - return Object(_env, refValue); -} - -inline Error::Error(Error&& other) : ObjectReference(std::move(other)) {} - -inline Error& Error::operator=(Error&& other) { - static_cast*>(this)->operator=(std::move(other)); - return *this; -} - -inline Error::Error(const Error& other) : ObjectReference(other) {} - -inline Error& Error::operator=(const Error& other) { - Reset(); - - _env = other.Env(); - HandleScope scope(_env); - - napi_value value = other.Value(); - if (value != nullptr) { - napi_status status = napi_create_reference(_env, value, 1, &_ref); - NAPI_THROW_IF_FAILED(_env, status, *this); - } - - return *this; -} - -inline const std::string& Error::Message() const NAPI_NOEXCEPT { - if (_message.size() == 0 && _env != nullptr) { -#ifdef NAPI_CPP_EXCEPTIONS - try { - _message = Get("message").As(); - } catch (...) { - // Catch all errors here, to include e.g. a std::bad_alloc from - // the std::string::operator=, because this method may not throw. - } -#else // NAPI_CPP_EXCEPTIONS -#if defined(NODE_ADDON_API_ENABLE_MAYBE) - Napi::Value message_val; - if (Get("message").UnwrapTo(&message_val)) { - _message = message_val.As(); - } -#else - _message = Get("message").As(); -#endif -#endif // NAPI_CPP_EXCEPTIONS - } - return _message; -} - -// we created an object on the &_ref -inline void Error::ThrowAsJavaScriptException() const { - HandleScope scope(_env); - if (!IsEmpty()) { -#ifdef NODE_API_SWALLOW_UNTHROWABLE_EXCEPTIONS - bool pendingException = false; - - // check if there is already a pending exception. If so don't try to throw a - // new one as that is not allowed/possible - napi_status status = napi_is_exception_pending(_env, &pendingException); - - if ((status != napi_ok) || - ((status == napi_ok) && (pendingException == false))) { - // We intentionally don't use `NAPI_THROW_*` macros here to ensure - // that there is no possible recursion as `ThrowAsJavaScriptException` - // is part of `NAPI_THROW_*` macro definition for noexcept. - - status = napi_throw(_env, Value()); - - if (status == napi_pending_exception) { - // The environment must be terminating as we checked earlier and there - // was no pending exception. In this case continuing will result - // in a fatal error and there is nothing the author has done incorrectly - // in their code that is worth flagging through a fatal error - return; - } - } else { - status = napi_pending_exception; - } -#else - // We intentionally don't use `NAPI_THROW_*` macros here to ensure - // that there is no possible recursion as `ThrowAsJavaScriptException` - // is part of `NAPI_THROW_*` macro definition for noexcept. - - napi_status status = napi_throw(_env, Value()); -#endif - -#ifdef NAPI_CPP_EXCEPTIONS - if (status != napi_ok) { - throw Error::New(_env); - } -#else // NAPI_CPP_EXCEPTIONS - NAPI_FATAL_IF_FAILED( - status, "Error::ThrowAsJavaScriptException", "napi_throw"); -#endif // NAPI_CPP_EXCEPTIONS - } -} - -#ifdef NAPI_CPP_EXCEPTIONS - -inline const char* Error::what() const NAPI_NOEXCEPT { - return Message().c_str(); -} - -#endif // NAPI_CPP_EXCEPTIONS - -inline const char* Error::ERROR_WRAP_VALUE() NAPI_NOEXCEPT { - return "4bda9e7e-4913-4dbc-95de-891cbf66598e-errorVal"; -} - -template -inline TError Error::New(napi_env env, - const char* message, - size_t length, - create_error_fn create_error) { - napi_value str; - napi_status status = napi_create_string_utf8(env, message, length, &str); - NAPI_THROW_IF_FAILED(env, status, TError()); - - napi_value error; - status = create_error(env, nullptr, str, &error); - NAPI_THROW_IF_FAILED(env, status, TError()); - - return TError(env, error); -} - -inline TypeError TypeError::New(napi_env env, const char* message) { - return Error::New( - env, message, std::strlen(message), napi_create_type_error); -} - -inline TypeError TypeError::New(napi_env env, const std::string& message) { - return Error::New( - env, message.c_str(), message.size(), napi_create_type_error); -} - -inline TypeError::TypeError() : Error() {} - -inline TypeError::TypeError(napi_env env, napi_value value) - : Error(env, value) {} - -inline RangeError RangeError::New(napi_env env, const char* message) { - return Error::New( - env, message, std::strlen(message), napi_create_range_error); -} - -inline RangeError RangeError::New(napi_env env, const std::string& message) { - return Error::New( - env, message.c_str(), message.size(), napi_create_range_error); -} - -inline RangeError::RangeError() : Error() {} - -inline RangeError::RangeError(napi_env env, napi_value value) - : Error(env, value) {} - -//////////////////////////////////////////////////////////////////////////////// -// Reference class -//////////////////////////////////////////////////////////////////////////////// - -template -inline Reference Reference::New(const T& value, - uint32_t initialRefcount) { - napi_env env = value.Env(); - napi_value val = value; - - if (val == nullptr) { - return Reference(env, nullptr); - } - - napi_ref ref; - napi_status status = napi_create_reference(env, value, initialRefcount, &ref); - NAPI_THROW_IF_FAILED(env, status, Reference()); - - return Reference(env, ref); -} - -template -inline Reference::Reference() - : _env(nullptr), _ref(nullptr), _suppressDestruct(false) {} - -template -inline Reference::Reference(napi_env env, napi_ref ref) - : _env(env), _ref(ref), _suppressDestruct(false) {} - -template -inline Reference::~Reference() { - if (_ref != nullptr) { - if (!_suppressDestruct) { - napi_delete_reference(_env, _ref); - } - - _ref = nullptr; - } -} - -template -inline Reference::Reference(Reference&& other) - : _env(other._env), - _ref(other._ref), - _suppressDestruct(other._suppressDestruct) { - other._env = nullptr; - other._ref = nullptr; - other._suppressDestruct = false; -} - -template -inline Reference& Reference::operator=(Reference&& other) { - Reset(); - _env = other._env; - _ref = other._ref; - _suppressDestruct = other._suppressDestruct; - other._env = nullptr; - other._ref = nullptr; - other._suppressDestruct = false; - return *this; -} - -template -inline Reference::Reference(const Reference& other) - : _env(other._env), _ref(nullptr), _suppressDestruct(false) { - HandleScope scope(_env); - - napi_value value = other.Value(); - if (value != nullptr) { - // Copying is a limited scenario (currently only used for Error object) and - // always creates a strong reference to the given value even if the incoming - // reference is weak. - napi_status status = napi_create_reference(_env, value, 1, &_ref); - NAPI_FATAL_IF_FAILED( - status, "Reference::Reference", "napi_create_reference"); - } -} - -template -inline Reference::operator napi_ref() const { - return _ref; -} - -template -inline bool Reference::operator==(const Reference& other) const { - HandleScope scope(_env); - return this->Value().StrictEquals(other.Value()); -} - -template -inline bool Reference::operator!=(const Reference& other) const { - return !this->operator==(other); -} - -template -inline Napi::Env Reference::Env() const { - return Napi::Env(_env); -} - -template -inline bool Reference::IsEmpty() const { - return _ref == nullptr; -} - -template -inline T Reference::Value() const { - if (_ref == nullptr) { - return T(_env, nullptr); - } - - napi_value value; - napi_status status = napi_get_reference_value(_env, _ref, &value); - NAPI_THROW_IF_FAILED(_env, status, T()); - return T(_env, value); -} - -template -inline uint32_t Reference::Ref() const { - uint32_t result; - napi_status status = napi_reference_ref(_env, _ref, &result); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} - -template -inline uint32_t Reference::Unref() const { - uint32_t result; - napi_status status = napi_reference_unref(_env, _ref, &result); - NAPI_THROW_IF_FAILED(_env, status, 0); - return result; -} - -template -inline void Reference::Reset() { - if (_ref != nullptr) { - napi_status status = napi_delete_reference(_env, _ref); - NAPI_THROW_IF_FAILED_VOID(_env, status); - _ref = nullptr; - } -} - -template -inline void Reference::Reset(const T& value, uint32_t refcount) { - Reset(); - _env = value.Env(); - - napi_value val = value; - if (val != nullptr) { - napi_status status = napi_create_reference(_env, value, refcount, &_ref); - NAPI_THROW_IF_FAILED_VOID(_env, status); - } -} - -template -inline void Reference::SuppressDestruct() { - _suppressDestruct = true; -} - -template -inline Reference Weak(T value) { - return Reference::New(value, 0); -} - -inline ObjectReference Weak(Object value) { - return Reference::New(value, 0); -} - -inline FunctionReference Weak(Function value) { - return Reference::New(value, 0); -} - -template -inline Reference Persistent(T value) { - return Reference::New(value, 1); -} - -inline ObjectReference Persistent(Object value) { - return Reference::New(value, 1); -} - -inline FunctionReference Persistent(Function value) { - return Reference::New(value, 1); -} - -//////////////////////////////////////////////////////////////////////////////// -// ObjectReference class -//////////////////////////////////////////////////////////////////////////////// - -inline ObjectReference::ObjectReference() : Reference() {} - -inline ObjectReference::ObjectReference(napi_env env, napi_ref ref) - : Reference(env, ref) {} - -inline ObjectReference::ObjectReference(Reference&& other) - : Reference(std::move(other)) {} - -inline ObjectReference& ObjectReference::operator=(Reference&& other) { - static_cast*>(this)->operator=(std::move(other)); - return *this; -} - -inline ObjectReference::ObjectReference(ObjectReference&& other) - : Reference(std::move(other)) {} - -inline ObjectReference& ObjectReference::operator=(ObjectReference&& other) { - static_cast*>(this)->operator=(std::move(other)); - return *this; -} - -inline ObjectReference::ObjectReference(const ObjectReference& other) - : Reference(other) {} - -inline MaybeOrValue ObjectReference::Get( - const char* utf8name) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().Get(utf8name); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue ObjectReference::Get( - const std::string& utf8name) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().Get(utf8name); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue ObjectReference::Set(const char* utf8name, - napi_value value) const { - HandleScope scope(_env); - return Value().Set(utf8name, value); -} - -inline MaybeOrValue ObjectReference::Set(const char* utf8name, - Napi::Value value) const { - HandleScope scope(_env); - return Value().Set(utf8name, value); -} - -inline MaybeOrValue ObjectReference::Set(const char* utf8name, - const char* utf8value) const { - HandleScope scope(_env); - return Value().Set(utf8name, utf8value); -} - -inline MaybeOrValue ObjectReference::Set(const char* utf8name, - bool boolValue) const { - HandleScope scope(_env); - return Value().Set(utf8name, boolValue); -} - -inline MaybeOrValue ObjectReference::Set(const char* utf8name, - double numberValue) const { - HandleScope scope(_env); - return Value().Set(utf8name, numberValue); -} - -inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, - napi_value value) const { - HandleScope scope(_env); - return Value().Set(utf8name, value); -} - -inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, - Napi::Value value) const { - HandleScope scope(_env); - return Value().Set(utf8name, value); -} - -inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, - std::string& utf8value) const { - HandleScope scope(_env); - return Value().Set(utf8name, utf8value); -} - -inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, - bool boolValue) const { - HandleScope scope(_env); - return Value().Set(utf8name, boolValue); -} - -inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, - double numberValue) const { - HandleScope scope(_env); - return Value().Set(utf8name, numberValue); -} - -inline MaybeOrValue ObjectReference::Get(uint32_t index) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().Get(index); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue ObjectReference::Set(uint32_t index, - napi_value value) const { - HandleScope scope(_env); - return Value().Set(index, value); -} - -inline MaybeOrValue ObjectReference::Set(uint32_t index, - Napi::Value value) const { - HandleScope scope(_env); - return Value().Set(index, value); -} - -inline MaybeOrValue ObjectReference::Set(uint32_t index, - const char* utf8value) const { - HandleScope scope(_env); - return Value().Set(index, utf8value); -} - -inline MaybeOrValue ObjectReference::Set( - uint32_t index, const std::string& utf8value) const { - HandleScope scope(_env); - return Value().Set(index, utf8value); -} - -inline MaybeOrValue ObjectReference::Set(uint32_t index, - bool boolValue) const { - HandleScope scope(_env); - return Value().Set(index, boolValue); -} - -inline MaybeOrValue ObjectReference::Set(uint32_t index, - double numberValue) const { - HandleScope scope(_env); - return Value().Set(index, numberValue); -} - -//////////////////////////////////////////////////////////////////////////////// -// FunctionReference class -//////////////////////////////////////////////////////////////////////////////// - -inline FunctionReference::FunctionReference() : Reference() {} - -inline FunctionReference::FunctionReference(napi_env env, napi_ref ref) - : Reference(env, ref) {} - -inline FunctionReference::FunctionReference(Reference&& other) - : Reference(std::move(other)) {} - -inline FunctionReference& FunctionReference::operator=( - Reference&& other) { - static_cast*>(this)->operator=(std::move(other)); - return *this; -} - -inline FunctionReference::FunctionReference(FunctionReference&& other) - : Reference(std::move(other)) {} - -inline FunctionReference& FunctionReference::operator=( - FunctionReference&& other) { - static_cast*>(this)->operator=(std::move(other)); - return *this; -} - -inline MaybeOrValue FunctionReference::operator()( - const std::initializer_list& args) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value()(args); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue FunctionReference::Call( - const std::initializer_list& args) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().Call(args); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue FunctionReference::Call( - const std::vector& args) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().Call(args); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue FunctionReference::Call( - napi_value recv, const std::initializer_list& args) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().Call(recv, args); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue FunctionReference::Call( - napi_value recv, const std::vector& args) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().Call(recv, args); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue FunctionReference::Call( - napi_value recv, size_t argc, const napi_value* args) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().Call(recv, argc, args); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue FunctionReference::MakeCallback( - napi_value recv, - const std::initializer_list& args, - napi_async_context context) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().MakeCallback(recv, args, context); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue FunctionReference::MakeCallback( - napi_value recv, - const std::vector& args, - napi_async_context context) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().MakeCallback(recv, args, context); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue FunctionReference::MakeCallback( - napi_value recv, - size_t argc, - const napi_value* args, - napi_async_context context) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = - Value().MakeCallback(recv, argc, args, context); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap())); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Value(); - } - return scope.Escape(result); -#endif -} - -inline MaybeOrValue FunctionReference::New( - const std::initializer_list& args) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().New(args); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap()).As()); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Object(); - } - return scope.Escape(result).As(); -#endif -} - -inline MaybeOrValue FunctionReference::New( - const std::vector& args) const { - EscapableHandleScope scope(_env); - MaybeOrValue result = Value().New(args); -#ifdef NODE_ADDON_API_ENABLE_MAYBE - if (result.IsJust()) { - return Just(scope.Escape(result.Unwrap()).As()); - } - return result; -#else - if (scope.Env().IsExceptionPending()) { - return Object(); - } - return scope.Escape(result).As(); -#endif -} - -//////////////////////////////////////////////////////////////////////////////// -// CallbackInfo class -//////////////////////////////////////////////////////////////////////////////// - -inline CallbackInfo::CallbackInfo(napi_env env, napi_callback_info info) - : _env(env), - _info(info), - _this(nullptr), - _dynamicArgs(nullptr), - _data(nullptr) { - _argc = _staticArgCount; - _argv = _staticArgs; - napi_status status = - napi_get_cb_info(env, info, &_argc, _argv, &_this, &_data); - NAPI_THROW_IF_FAILED_VOID(_env, status); - - if (_argc > _staticArgCount) { - // Use either a fixed-size array (on the stack) or a dynamically-allocated - // array (on the heap) depending on the number of args. - _dynamicArgs = new napi_value[_argc]; - _argv = _dynamicArgs; - - status = napi_get_cb_info(env, info, &_argc, _argv, nullptr, nullptr); - NAPI_THROW_IF_FAILED_VOID(_env, status); - } -} - -inline CallbackInfo::~CallbackInfo() { - if (_dynamicArgs != nullptr) { - delete[] _dynamicArgs; - } -} - -inline CallbackInfo::operator napi_callback_info() const { - return _info; -} - -inline Value CallbackInfo::NewTarget() const { - napi_value newTarget; - napi_status status = napi_get_new_target(_env, _info, &newTarget); - NAPI_THROW_IF_FAILED(_env, status, Value()); - return Value(_env, newTarget); -} - -inline bool CallbackInfo::IsConstructCall() const { - return !NewTarget().IsEmpty(); -} - -inline Napi::Env CallbackInfo::Env() const { - return Napi::Env(_env); -} - -inline size_t CallbackInfo::Length() const { - return _argc; -} - -inline const Value CallbackInfo::operator[](size_t index) const { - return index < _argc ? Value(_env, _argv[index]) : Env().Undefined(); -} - -inline Value CallbackInfo::This() const { - if (_this == nullptr) { - return Env().Undefined(); - } - return Object(_env, _this); -} - -inline void* CallbackInfo::Data() const { - return _data; -} - -inline void CallbackInfo::SetData(void* data) { - _data = data; -} - -//////////////////////////////////////////////////////////////////////////////// -// PropertyDescriptor class -//////////////////////////////////////////////////////////////////////////////// - -template -PropertyDescriptor PropertyDescriptor::Accessor( - const char* utf8name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - - desc.utf8name = utf8name; - desc.getter = details::TemplatedCallback; - desc.attributes = attributes; - desc.data = data; - - return desc; -} - -template -PropertyDescriptor PropertyDescriptor::Accessor( - const std::string& utf8name, - napi_property_attributes attributes, - void* data) { - return Accessor(utf8name.c_str(), attributes, data); -} - -template -PropertyDescriptor PropertyDescriptor::Accessor( - Name name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - - desc.name = name; - desc.getter = details::TemplatedCallback; - desc.attributes = attributes; - desc.data = data; - - return desc; -} - -template -PropertyDescriptor PropertyDescriptor::Accessor( - const char* utf8name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - - desc.utf8name = utf8name; - desc.getter = details::TemplatedCallback; - desc.setter = details::TemplatedVoidCallback; - desc.attributes = attributes; - desc.data = data; - - return desc; -} - -template -PropertyDescriptor PropertyDescriptor::Accessor( - const std::string& utf8name, - napi_property_attributes attributes, - void* data) { - return Accessor(utf8name.c_str(), attributes, data); -} - -template -PropertyDescriptor PropertyDescriptor::Accessor( - Name name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - - desc.name = name; - desc.getter = details::TemplatedCallback; - desc.setter = details::TemplatedVoidCallback; - desc.attributes = attributes; - desc.data = data; - - return desc; -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - Napi::Env env, - Napi::Object object, - const char* utf8name, - Getter getter, - napi_property_attributes attributes, - void* data) { - using CbData = details::CallbackData; - auto callbackData = new CbData({getter, data}); - - napi_status status = AttachData(env, object, callbackData); - if (status != napi_ok) { - delete callbackData; - NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); - } - - return PropertyDescriptor({utf8name, - nullptr, - nullptr, - CbData::Wrapper, - nullptr, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - Napi::Env env, - Napi::Object object, - const std::string& utf8name, - Getter getter, - napi_property_attributes attributes, - void* data) { - return Accessor(env, object, utf8name.c_str(), getter, attributes, data); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - Napi::Env env, - Napi::Object object, - Name name, - Getter getter, - napi_property_attributes attributes, - void* data) { - using CbData = details::CallbackData; - auto callbackData = new CbData({getter, data}); - - napi_status status = AttachData(env, object, callbackData); - if (status != napi_ok) { - delete callbackData; - NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); - } - - return PropertyDescriptor({nullptr, - name, - nullptr, - CbData::Wrapper, - nullptr, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - Napi::Env env, - Napi::Object object, - const char* utf8name, - Getter getter, - Setter setter, - napi_property_attributes attributes, - void* data) { - using CbData = details::AccessorCallbackData; - auto callbackData = new CbData({getter, setter, data}); - - napi_status status = AttachData(env, object, callbackData); - if (status != napi_ok) { - delete callbackData; - NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); - } - - return PropertyDescriptor({utf8name, - nullptr, - nullptr, - CbData::GetterWrapper, - CbData::SetterWrapper, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - Napi::Env env, - Napi::Object object, - const std::string& utf8name, - Getter getter, - Setter setter, - napi_property_attributes attributes, - void* data) { - return Accessor( - env, object, utf8name.c_str(), getter, setter, attributes, data); -} - -template -inline PropertyDescriptor PropertyDescriptor::Accessor( - Napi::Env env, - Napi::Object object, - Name name, - Getter getter, - Setter setter, - napi_property_attributes attributes, - void* data) { - using CbData = details::AccessorCallbackData; - auto callbackData = new CbData({getter, setter, data}); - - napi_status status = AttachData(env, object, callbackData); - if (status != napi_ok) { - delete callbackData; - NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); - } - - return PropertyDescriptor({nullptr, - name, - nullptr, - CbData::GetterWrapper, - CbData::SetterWrapper, - nullptr, - attributes, - callbackData}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Function( - Napi::Env env, - Napi::Object /*object*/, - const char* utf8name, - Callable cb, - napi_property_attributes attributes, - void* data) { - return PropertyDescriptor({utf8name, - nullptr, - nullptr, - nullptr, - nullptr, - Napi::Function::New(env, cb, utf8name, data), - attributes, - nullptr}); -} - -template -inline PropertyDescriptor PropertyDescriptor::Function( - Napi::Env env, - Napi::Object object, - const std::string& utf8name, - Callable cb, - napi_property_attributes attributes, - void* data) { - return Function(env, object, utf8name.c_str(), cb, attributes, data); -} - -template -inline PropertyDescriptor PropertyDescriptor::Function( - Napi::Env env, - Napi::Object /*object*/, - Name name, - Callable cb, - napi_property_attributes attributes, - void* data) { - return PropertyDescriptor({nullptr, - name, - nullptr, - nullptr, - nullptr, - Napi::Function::New(env, cb, nullptr, data), - attributes, - nullptr}); -} - -inline PropertyDescriptor PropertyDescriptor::Value( - const char* utf8name, - napi_value value, - napi_property_attributes attributes) { - return PropertyDescriptor({utf8name, - nullptr, - nullptr, - nullptr, - nullptr, - value, - attributes, - nullptr}); -} - -inline PropertyDescriptor PropertyDescriptor::Value( - const std::string& utf8name, - napi_value value, - napi_property_attributes attributes) { - return Value(utf8name.c_str(), value, attributes); -} - -inline PropertyDescriptor PropertyDescriptor::Value( - napi_value name, napi_value value, napi_property_attributes attributes) { - return PropertyDescriptor( - {nullptr, name, nullptr, nullptr, nullptr, value, attributes, nullptr}); -} - -inline PropertyDescriptor PropertyDescriptor::Value( - Name name, Napi::Value value, napi_property_attributes attributes) { - napi_value nameValue = name; - napi_value valueValue = value; - return PropertyDescriptor::Value(nameValue, valueValue, attributes); -} - -inline PropertyDescriptor::PropertyDescriptor(napi_property_descriptor desc) - : _desc(desc) {} - -inline PropertyDescriptor::operator napi_property_descriptor&() { - return _desc; -} - -inline PropertyDescriptor::operator const napi_property_descriptor&() const { - return _desc; -} - -//////////////////////////////////////////////////////////////////////////////// -// InstanceWrap class -//////////////////////////////////////////////////////////////////////////////// - -template -inline void InstanceWrap::AttachPropData( - napi_env env, napi_value value, const napi_property_descriptor* prop) { - napi_status status; - if (!(prop->attributes & napi_static)) { - if (prop->method == T::InstanceVoidMethodCallbackWrapper) { - status = Napi::details::AttachData( - env, value, static_cast(prop->data)); - NAPI_THROW_IF_FAILED_VOID(env, status); - } else if (prop->method == T::InstanceMethodCallbackWrapper) { - status = Napi::details::AttachData( - env, value, static_cast(prop->data)); - NAPI_THROW_IF_FAILED_VOID(env, status); - } else if (prop->getter == T::InstanceGetterCallbackWrapper || - prop->setter == T::InstanceSetterCallbackWrapper) { - status = Napi::details::AttachData( - env, value, static_cast(prop->data)); - NAPI_THROW_IF_FAILED_VOID(env, status); - } - } -} - -template -inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( - const char* utf8name, - InstanceVoidMethodCallback method, - napi_property_attributes attributes, - void* data) { - InstanceVoidMethodCallbackData* callbackData = - new InstanceVoidMethodCallbackData({method, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.method = T::InstanceVoidMethodCallbackWrapper; - desc.data = callbackData; - desc.attributes = attributes; - return desc; -} - -template -inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( - const char* utf8name, - InstanceMethodCallback method, - napi_property_attributes attributes, - void* data) { - InstanceMethodCallbackData* callbackData = - new InstanceMethodCallbackData({method, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.method = T::InstanceMethodCallbackWrapper; - desc.data = callbackData; - desc.attributes = attributes; - return desc; -} - -template -inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( - Symbol name, - InstanceVoidMethodCallback method, - napi_property_attributes attributes, - void* data) { - InstanceVoidMethodCallbackData* callbackData = - new InstanceVoidMethodCallbackData({method, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.method = T::InstanceVoidMethodCallbackWrapper; - desc.data = callbackData; - desc.attributes = attributes; - return desc; -} - -template -inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( - Symbol name, - InstanceMethodCallback method, - napi_property_attributes attributes, - void* data) { - InstanceMethodCallbackData* callbackData = - new InstanceMethodCallbackData({method, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.method = T::InstanceMethodCallbackWrapper; - desc.data = callbackData; - desc.attributes = attributes; - return desc; -} - -template -template ::InstanceVoidMethodCallback method> -inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( - const char* utf8name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.method = details::TemplatedInstanceVoidCallback; - desc.data = data; - desc.attributes = attributes; - return desc; -} - -template -template ::InstanceMethodCallback method> -inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( - const char* utf8name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.method = details::TemplatedInstanceCallback; - desc.data = data; - desc.attributes = attributes; - return desc; -} - -template -template ::InstanceVoidMethodCallback method> -inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( - Symbol name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.method = details::TemplatedInstanceVoidCallback; - desc.data = data; - desc.attributes = attributes; - return desc; -} - -template -template ::InstanceMethodCallback method> -inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( - Symbol name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.method = details::TemplatedInstanceCallback; - desc.data = data; - desc.attributes = attributes; - return desc; -} - -template -inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( - const char* utf8name, - InstanceGetterCallback getter, - InstanceSetterCallback setter, - napi_property_attributes attributes, - void* data) { - InstanceAccessorCallbackData* callbackData = - new InstanceAccessorCallbackData({getter, setter, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.getter = getter != nullptr ? T::InstanceGetterCallbackWrapper : nullptr; - desc.setter = setter != nullptr ? T::InstanceSetterCallbackWrapper : nullptr; - desc.data = callbackData; - desc.attributes = attributes; - return desc; -} - -template -inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( - Symbol name, - InstanceGetterCallback getter, - InstanceSetterCallback setter, - napi_property_attributes attributes, - void* data) { - InstanceAccessorCallbackData* callbackData = - new InstanceAccessorCallbackData({getter, setter, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.getter = getter != nullptr ? T::InstanceGetterCallbackWrapper : nullptr; - desc.setter = setter != nullptr ? T::InstanceSetterCallbackWrapper : nullptr; - desc.data = callbackData; - desc.attributes = attributes; - return desc; -} - -template -template ::InstanceGetterCallback getter, - typename InstanceWrap::InstanceSetterCallback setter> -inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( - const char* utf8name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.getter = details::TemplatedInstanceCallback; - desc.setter = This::WrapSetter(This::SetterTag()); - desc.data = data; - desc.attributes = attributes; - return desc; -} - -template -template ::InstanceGetterCallback getter, - typename InstanceWrap::InstanceSetterCallback setter> -inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( - Symbol name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.getter = details::TemplatedInstanceCallback; - desc.setter = This::WrapSetter(This::SetterTag()); - desc.data = data; - desc.attributes = attributes; - return desc; -} - -template -inline ClassPropertyDescriptor InstanceWrap::InstanceValue( - const char* utf8name, - Napi::Value value, - napi_property_attributes attributes) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.value = value; - desc.attributes = attributes; - return desc; -} - -template -inline ClassPropertyDescriptor InstanceWrap::InstanceValue( - Symbol name, Napi::Value value, napi_property_attributes attributes) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.value = value; - desc.attributes = attributes; - return desc; -} - -template -inline napi_value InstanceWrap::InstanceVoidMethodCallbackWrapper( - napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - InstanceVoidMethodCallbackData* callbackData = - reinterpret_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - T* instance = T::Unwrap(callbackInfo.This().As()); - auto cb = callbackData->callback; - (instance->*cb)(callbackInfo); - return nullptr; - }); -} - -template -inline napi_value InstanceWrap::InstanceMethodCallbackWrapper( - napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - InstanceMethodCallbackData* callbackData = - reinterpret_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - T* instance = T::Unwrap(callbackInfo.This().As()); - auto cb = callbackData->callback; - return (instance->*cb)(callbackInfo); - }); -} - -template -inline napi_value InstanceWrap::InstanceGetterCallbackWrapper( - napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - InstanceAccessorCallbackData* callbackData = - reinterpret_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - T* instance = T::Unwrap(callbackInfo.This().As()); - auto cb = callbackData->getterCallback; - return (instance->*cb)(callbackInfo); - }); -} - -template -inline napi_value InstanceWrap::InstanceSetterCallbackWrapper( - napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - InstanceAccessorCallbackData* callbackData = - reinterpret_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - T* instance = T::Unwrap(callbackInfo.This().As()); - auto cb = callbackData->setterCallback; - (instance->*cb)(callbackInfo, callbackInfo[0]); - return nullptr; - }); -} - -template -template ::InstanceSetterCallback method> -inline napi_value InstanceWrap::WrappedMethod( - napi_env env, napi_callback_info info) NAPI_NOEXCEPT { - return details::WrapCallback([&] { - const CallbackInfo cbInfo(env, info); - T* instance = T::Unwrap(cbInfo.This().As()); - (instance->*method)(cbInfo, cbInfo[0]); - return nullptr; - }); -} - -//////////////////////////////////////////////////////////////////////////////// -// ObjectWrap class -//////////////////////////////////////////////////////////////////////////////// - -template -inline ObjectWrap::ObjectWrap(const Napi::CallbackInfo& callbackInfo) { - napi_env env = callbackInfo.Env(); - napi_value wrapper = callbackInfo.This(); - napi_status status; - napi_ref ref; - T* instance = static_cast(this); - status = napi_wrap(env, wrapper, instance, FinalizeCallback, nullptr, &ref); - NAPI_THROW_IF_FAILED_VOID(env, status); - - Reference* instanceRef = instance; - *instanceRef = Reference(env, ref); -} - -template -inline ObjectWrap::~ObjectWrap() { - // If the JS object still exists at this point, remove the finalizer added - // through `napi_wrap()`. - if (!IsEmpty()) { - Object object = Value(); - // It is not valid to call `napi_remove_wrap()` with an empty `object`. - // This happens e.g. during garbage collection. - if (!object.IsEmpty() && _construction_failed) { - napi_remove_wrap(Env(), object, nullptr); - } - } -} - -template -inline T* ObjectWrap::Unwrap(Object wrapper) { - void* unwrapped; - napi_status status = napi_unwrap(wrapper.Env(), wrapper, &unwrapped); - NAPI_THROW_IF_FAILED(wrapper.Env(), status, nullptr); - return static_cast(unwrapped); -} - -template -inline Function ObjectWrap::DefineClass( - Napi::Env env, - const char* utf8name, - const size_t props_count, - const napi_property_descriptor* descriptors, - void* data) { - napi_status status; - std::vector props(props_count); - - // We copy the descriptors to a local array because before defining the class - // we must replace static method property descriptors with value property - // descriptors such that the value is a function-valued `napi_value` created - // with `CreateFunction()`. - // - // This replacement could be made for instance methods as well, but V8 aborts - // if we do that, because it expects methods defined on the prototype template - // to have `FunctionTemplate`s. - for (size_t index = 0; index < props_count; index++) { - props[index] = descriptors[index]; - napi_property_descriptor* prop = &props[index]; - if (prop->method == T::StaticMethodCallbackWrapper) { - status = - CreateFunction(env, - utf8name, - prop->method, - static_cast(prop->data), - &(prop->value)); - NAPI_THROW_IF_FAILED(env, status, Function()); - prop->method = nullptr; - prop->data = nullptr; - } else if (prop->method == T::StaticVoidMethodCallbackWrapper) { - status = - CreateFunction(env, - utf8name, - prop->method, - static_cast(prop->data), - &(prop->value)); - NAPI_THROW_IF_FAILED(env, status, Function()); - prop->method = nullptr; - prop->data = nullptr; - } - } - - napi_value value; - status = napi_define_class(env, - utf8name, - NAPI_AUTO_LENGTH, - T::ConstructorCallbackWrapper, - data, - props_count, - props.data(), - &value); - NAPI_THROW_IF_FAILED(env, status, Function()); - - // After defining the class we iterate once more over the property descriptors - // and attach the data associated with accessors and instance methods to the - // newly created JavaScript class. - for (size_t idx = 0; idx < props_count; idx++) { - const napi_property_descriptor* prop = &props[idx]; - - if (prop->getter == T::StaticGetterCallbackWrapper || - prop->setter == T::StaticSetterCallbackWrapper) { - status = Napi::details::AttachData( - env, value, static_cast(prop->data)); - NAPI_THROW_IF_FAILED(env, status, Function()); - } else { - // InstanceWrap::AttachPropData is responsible for attaching the data - // of instance methods and accessors. - T::AttachPropData(env, value, prop); - } - } - - return Function(env, value); -} - -template -inline Function ObjectWrap::DefineClass( - Napi::Env env, - const char* utf8name, - const std::initializer_list>& properties, - void* data) { - return DefineClass( - env, - utf8name, - properties.size(), - reinterpret_cast(properties.begin()), - data); -} - -template -inline Function ObjectWrap::DefineClass( - Napi::Env env, - const char* utf8name, - const std::vector>& properties, - void* data) { - return DefineClass( - env, - utf8name, - properties.size(), - reinterpret_cast(properties.data()), - data); -} - -template -inline ClassPropertyDescriptor ObjectWrap::StaticMethod( - const char* utf8name, - StaticVoidMethodCallback method, - napi_property_attributes attributes, - void* data) { - StaticVoidMethodCallbackData* callbackData = - new StaticVoidMethodCallbackData({method, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.method = T::StaticVoidMethodCallbackWrapper; - desc.data = callbackData; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -inline ClassPropertyDescriptor ObjectWrap::StaticMethod( - const char* utf8name, - StaticMethodCallback method, - napi_property_attributes attributes, - void* data) { - StaticMethodCallbackData* callbackData = - new StaticMethodCallbackData({method, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.method = T::StaticMethodCallbackWrapper; - desc.data = callbackData; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -inline ClassPropertyDescriptor ObjectWrap::StaticMethod( - Symbol name, - StaticVoidMethodCallback method, - napi_property_attributes attributes, - void* data) { - StaticVoidMethodCallbackData* callbackData = - new StaticVoidMethodCallbackData({method, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.method = T::StaticVoidMethodCallbackWrapper; - desc.data = callbackData; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -inline ClassPropertyDescriptor ObjectWrap::StaticMethod( - Symbol name, - StaticMethodCallback method, - napi_property_attributes attributes, - void* data) { - StaticMethodCallbackData* callbackData = - new StaticMethodCallbackData({method, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.method = T::StaticMethodCallbackWrapper; - desc.data = callbackData; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -template ::StaticVoidMethodCallback method> -inline ClassPropertyDescriptor ObjectWrap::StaticMethod( - const char* utf8name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.method = details::TemplatedVoidCallback; - desc.data = data; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -template ::StaticVoidMethodCallback method> -inline ClassPropertyDescriptor ObjectWrap::StaticMethod( - Symbol name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.method = details::TemplatedVoidCallback; - desc.data = data; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -template ::StaticMethodCallback method> -inline ClassPropertyDescriptor ObjectWrap::StaticMethod( - const char* utf8name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.method = details::TemplatedCallback; - desc.data = data; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -template ::StaticMethodCallback method> -inline ClassPropertyDescriptor ObjectWrap::StaticMethod( - Symbol name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.method = details::TemplatedCallback; - desc.data = data; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( - const char* utf8name, - StaticGetterCallback getter, - StaticSetterCallback setter, - napi_property_attributes attributes, - void* data) { - StaticAccessorCallbackData* callbackData = - new StaticAccessorCallbackData({getter, setter, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.getter = getter != nullptr ? T::StaticGetterCallbackWrapper : nullptr; - desc.setter = setter != nullptr ? T::StaticSetterCallbackWrapper : nullptr; - desc.data = callbackData; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( - Symbol name, - StaticGetterCallback getter, - StaticSetterCallback setter, - napi_property_attributes attributes, - void* data) { - StaticAccessorCallbackData* callbackData = - new StaticAccessorCallbackData({getter, setter, data}); - - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.getter = getter != nullptr ? T::StaticGetterCallbackWrapper : nullptr; - desc.setter = setter != nullptr ? T::StaticSetterCallbackWrapper : nullptr; - desc.data = callbackData; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -template ::StaticGetterCallback getter, - typename ObjectWrap::StaticSetterCallback setter> -inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( - const char* utf8name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.getter = details::TemplatedCallback; - desc.setter = This::WrapStaticSetter(This::StaticSetterTag()); - desc.data = data; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -template ::StaticGetterCallback getter, - typename ObjectWrap::StaticSetterCallback setter> -inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( - Symbol name, napi_property_attributes attributes, void* data) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.getter = details::TemplatedCallback; - desc.setter = This::WrapStaticSetter(This::StaticSetterTag()); - desc.data = data; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -inline ClassPropertyDescriptor ObjectWrap::StaticValue( - const char* utf8name, - Napi::Value value, - napi_property_attributes attributes) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.utf8name = utf8name; - desc.value = value; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -inline ClassPropertyDescriptor ObjectWrap::StaticValue( - Symbol name, Napi::Value value, napi_property_attributes attributes) { - napi_property_descriptor desc = napi_property_descriptor(); - desc.name = name; - desc.value = value; - desc.attributes = - static_cast(attributes | napi_static); - return desc; -} - -template -inline Value ObjectWrap::OnCalledAsFunction( - const Napi::CallbackInfo& callbackInfo) { - NAPI_THROW( - TypeError::New(callbackInfo.Env(), - "Class constructors cannot be invoked without 'new'"), - Napi::Value()); -} - -template -inline void ObjectWrap::Finalize(Napi::Env /*env*/) {} - -template -inline napi_value ObjectWrap::ConstructorCallbackWrapper( - napi_env env, napi_callback_info info) { - napi_value new_target; - napi_status status = napi_get_new_target(env, info, &new_target); - if (status != napi_ok) return nullptr; - - bool isConstructCall = (new_target != nullptr); - if (!isConstructCall) { - return details::WrapCallback( - [&] { return T::OnCalledAsFunction(CallbackInfo(env, info)); }); - } - - napi_value wrapper = details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - T* instance = new T(callbackInfo); -#ifdef NAPI_CPP_EXCEPTIONS - instance->_construction_failed = false; -#else - if (callbackInfo.Env().IsExceptionPending()) { - // We need to clear the exception so that removing the wrap might work. - Error e = callbackInfo.Env().GetAndClearPendingException(); - delete instance; - e.ThrowAsJavaScriptException(); - } else { - instance->_construction_failed = false; - } -#endif // NAPI_CPP_EXCEPTIONS - return callbackInfo.This(); - }); - - return wrapper; -} - -template -inline napi_value ObjectWrap::StaticVoidMethodCallbackWrapper( - napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - StaticVoidMethodCallbackData* callbackData = - reinterpret_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - callbackData->callback(callbackInfo); - return nullptr; - }); -} - -template -inline napi_value ObjectWrap::StaticMethodCallbackWrapper( - napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - StaticMethodCallbackData* callbackData = - reinterpret_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - return callbackData->callback(callbackInfo); - }); -} - -template -inline napi_value ObjectWrap::StaticGetterCallbackWrapper( - napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - StaticAccessorCallbackData* callbackData = - reinterpret_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - return callbackData->getterCallback(callbackInfo); - }); -} - -template -inline napi_value ObjectWrap::StaticSetterCallbackWrapper( - napi_env env, napi_callback_info info) { - return details::WrapCallback([&] { - CallbackInfo callbackInfo(env, info); - StaticAccessorCallbackData* callbackData = - reinterpret_cast(callbackInfo.Data()); - callbackInfo.SetData(callbackData->data); - callbackData->setterCallback(callbackInfo, callbackInfo[0]); - return nullptr; - }); -} - -template -inline void ObjectWrap::FinalizeCallback(napi_env env, - void* data, - void* /*hint*/) { - HandleScope scope(env); - T* instance = static_cast(data); - instance->Finalize(Napi::Env(env)); - delete instance; -} - -template -template ::StaticSetterCallback method> -inline napi_value ObjectWrap::WrappedMethod( - napi_env env, napi_callback_info info) NAPI_NOEXCEPT { - return details::WrapCallback([&] { - const CallbackInfo cbInfo(env, info); - method(cbInfo, cbInfo[0]); - return nullptr; - }); -} - -//////////////////////////////////////////////////////////////////////////////// -// HandleScope class -//////////////////////////////////////////////////////////////////////////////// - -inline HandleScope::HandleScope(napi_env env, napi_handle_scope scope) - : _env(env), _scope(scope) {} - -inline HandleScope::HandleScope(Napi::Env env) : _env(env) { - napi_status status = napi_open_handle_scope(_env, &_scope); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline HandleScope::~HandleScope() { - napi_status status = napi_close_handle_scope(_env, _scope); - NAPI_FATAL_IF_FAILED( - status, "HandleScope::~HandleScope", "napi_close_handle_scope"); -} - -inline HandleScope::operator napi_handle_scope() const { - return _scope; -} - -inline Napi::Env HandleScope::Env() const { - return Napi::Env(_env); -} - -//////////////////////////////////////////////////////////////////////////////// -// EscapableHandleScope class -//////////////////////////////////////////////////////////////////////////////// - -inline EscapableHandleScope::EscapableHandleScope( - napi_env env, napi_escapable_handle_scope scope) - : _env(env), _scope(scope) {} - -inline EscapableHandleScope::EscapableHandleScope(Napi::Env env) : _env(env) { - napi_status status = napi_open_escapable_handle_scope(_env, &_scope); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline EscapableHandleScope::~EscapableHandleScope() { - napi_status status = napi_close_escapable_handle_scope(_env, _scope); - NAPI_FATAL_IF_FAILED(status, - "EscapableHandleScope::~EscapableHandleScope", - "napi_close_escapable_handle_scope"); -} - -inline EscapableHandleScope::operator napi_escapable_handle_scope() const { - return _scope; -} - -inline Napi::Env EscapableHandleScope::Env() const { - return Napi::Env(_env); -} - -inline Value EscapableHandleScope::Escape(napi_value escapee) { - napi_value result; - napi_status status = napi_escape_handle(_env, _scope, escapee, &result); - NAPI_THROW_IF_FAILED(_env, status, Value()); - return Value(_env, result); -} - -#if (NAPI_VERSION > 2) -//////////////////////////////////////////////////////////////////////////////// -// CallbackScope class -//////////////////////////////////////////////////////////////////////////////// - -inline CallbackScope::CallbackScope(napi_env env, napi_callback_scope scope) - : _env(env), _scope(scope) {} - -inline CallbackScope::CallbackScope(napi_env env, napi_async_context context) - : _env(env) { - napi_status status = - napi_open_callback_scope(_env, Object::New(env), context, &_scope); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline CallbackScope::~CallbackScope() { - napi_status status = napi_close_callback_scope(_env, _scope); - NAPI_FATAL_IF_FAILED( - status, "CallbackScope::~CallbackScope", "napi_close_callback_scope"); -} - -inline CallbackScope::operator napi_callback_scope() const { - return _scope; -} - -inline Napi::Env CallbackScope::Env() const { - return Napi::Env(_env); -} -#endif - -//////////////////////////////////////////////////////////////////////////////// -// AsyncContext class -//////////////////////////////////////////////////////////////////////////////// - -inline AsyncContext::AsyncContext(napi_env env, const char* resource_name) - : AsyncContext(env, resource_name, Object::New(env)) {} - -inline AsyncContext::AsyncContext(napi_env env, - const char* resource_name, - const Object& resource) - : _env(env), _context(nullptr) { - napi_value resource_id; - napi_status status = napi_create_string_utf8( - _env, resource_name, NAPI_AUTO_LENGTH, &resource_id); - NAPI_THROW_IF_FAILED_VOID(_env, status); - - status = napi_async_init(_env, resource, resource_id, &_context); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline AsyncContext::~AsyncContext() { - if (_context != nullptr) { - napi_async_destroy(_env, _context); - _context = nullptr; - } -} - -inline AsyncContext::AsyncContext(AsyncContext&& other) { - _env = other._env; - other._env = nullptr; - _context = other._context; - other._context = nullptr; -} - -inline AsyncContext& AsyncContext::operator=(AsyncContext&& other) { - _env = other._env; - other._env = nullptr; - _context = other._context; - other._context = nullptr; - return *this; -} - -inline AsyncContext::operator napi_async_context() const { - return _context; -} - -inline Napi::Env AsyncContext::Env() const { - return Napi::Env(_env); -} - -//////////////////////////////////////////////////////////////////////////////// -// AsyncWorker class -//////////////////////////////////////////////////////////////////////////////// - -inline AsyncWorker::AsyncWorker(const Function& callback) - : AsyncWorker(callback, "generic") {} - -inline AsyncWorker::AsyncWorker(const Function& callback, - const char* resource_name) - : AsyncWorker(callback, resource_name, Object::New(callback.Env())) {} - -inline AsyncWorker::AsyncWorker(const Function& callback, - const char* resource_name, - const Object& resource) - : AsyncWorker( - Object::New(callback.Env()), callback, resource_name, resource) {} - -inline AsyncWorker::AsyncWorker(const Object& receiver, - const Function& callback) - : AsyncWorker(receiver, callback, "generic") {} - -inline AsyncWorker::AsyncWorker(const Object& receiver, - const Function& callback, - const char* resource_name) - : AsyncWorker( - receiver, callback, resource_name, Object::New(callback.Env())) {} - -inline AsyncWorker::AsyncWorker(const Object& receiver, - const Function& callback, - const char* resource_name, - const Object& resource) - : _env(callback.Env()), - _receiver(Napi::Persistent(receiver)), - _callback(Napi::Persistent(callback)), - _suppress_destruct(false) { - napi_value resource_id; - napi_status status = napi_create_string_latin1( - _env, resource_name, NAPI_AUTO_LENGTH, &resource_id); - NAPI_THROW_IF_FAILED_VOID(_env, status); - - status = napi_create_async_work(_env, - resource, - resource_id, - OnAsyncWorkExecute, - OnAsyncWorkComplete, - this, - &_work); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline AsyncWorker::AsyncWorker(Napi::Env env) : AsyncWorker(env, "generic") {} - -inline AsyncWorker::AsyncWorker(Napi::Env env, const char* resource_name) - : AsyncWorker(env, resource_name, Object::New(env)) {} - -inline AsyncWorker::AsyncWorker(Napi::Env env, - const char* resource_name, - const Object& resource) - : _env(env), _receiver(), _callback(), _suppress_destruct(false) { - napi_value resource_id; - napi_status status = napi_create_string_latin1( - _env, resource_name, NAPI_AUTO_LENGTH, &resource_id); - NAPI_THROW_IF_FAILED_VOID(_env, status); - - status = napi_create_async_work(_env, - resource, - resource_id, - OnAsyncWorkExecute, - OnAsyncWorkComplete, - this, - &_work); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline AsyncWorker::~AsyncWorker() { - if (_work != nullptr) { - napi_delete_async_work(_env, _work); - _work = nullptr; - } -} - -inline void AsyncWorker::Destroy() { - delete this; -} - -inline AsyncWorker::AsyncWorker(AsyncWorker&& other) { - _env = other._env; - other._env = nullptr; - _work = other._work; - other._work = nullptr; - _receiver = std::move(other._receiver); - _callback = std::move(other._callback); - _error = std::move(other._error); - _suppress_destruct = other._suppress_destruct; -} - -inline AsyncWorker& AsyncWorker::operator=(AsyncWorker&& other) { - _env = other._env; - other._env = nullptr; - _work = other._work; - other._work = nullptr; - _receiver = std::move(other._receiver); - _callback = std::move(other._callback); - _error = std::move(other._error); - _suppress_destruct = other._suppress_destruct; - return *this; -} - -inline AsyncWorker::operator napi_async_work() const { - return _work; -} - -inline Napi::Env AsyncWorker::Env() const { - return Napi::Env(_env); -} - -inline void AsyncWorker::Queue() { - napi_status status = napi_queue_async_work(_env, _work); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline void AsyncWorker::Cancel() { - napi_status status = napi_cancel_async_work(_env, _work); - NAPI_THROW_IF_FAILED_VOID(_env, status); -} - -inline ObjectReference& AsyncWorker::Receiver() { - return _receiver; -} - -inline FunctionReference& AsyncWorker::Callback() { - return _callback; -} - -inline void AsyncWorker::SuppressDestruct() { - _suppress_destruct = true; -} - -inline void AsyncWorker::OnOK() { - if (!_callback.IsEmpty()) { - _callback.Call(_receiver.Value(), GetResult(_callback.Env())); - } -} - -inline void AsyncWorker::OnError(const Error& e) { - if (!_callback.IsEmpty()) { - _callback.Call(_receiver.Value(), - std::initializer_list{e.Value()}); - } -} - -inline void AsyncWorker::SetError(const std::string& error) { - _error = error; -} - -inline std::vector AsyncWorker::GetResult(Napi::Env /*env*/) { - return {}; -} -// The OnAsyncWorkExecute method receives an napi_env argument. However, do NOT -// use it within this method, as it does not run on the JavaScript thread and -// must not run any method that would cause JavaScript to run. In practice, -// this means that almost any use of napi_env will be incorrect. -inline void AsyncWorker::OnAsyncWorkExecute(napi_env env, void* asyncworker) { - AsyncWorker* self = static_cast(asyncworker); - self->OnExecute(env); -} -// The OnExecute method receives an napi_env argument. However, do NOT -// use it within this method, as it does not run on the JavaScript thread and -// must not run any method that would cause JavaScript to run. In practice, -// this means that almost any use of napi_env will be incorrect. -inline void AsyncWorker::OnExecute(Napi::Env /*DO_NOT_USE*/) { -#ifdef NAPI_CPP_EXCEPTIONS - try { - Execute(); - } catch (const std::exception& e) { - SetError(e.what()); - } -#else // NAPI_CPP_EXCEPTIONS - Execute(); -#endif // NAPI_CPP_EXCEPTIONS -} - -inline void AsyncWorker::OnAsyncWorkComplete(napi_env env, - napi_status status, - void* asyncworker) { - AsyncWorker* self = static_cast(asyncworker); - self->OnWorkComplete(env, status); -} -inline void AsyncWorker::OnWorkComplete(Napi::Env /*env*/, napi_status status) { - if (status != napi_cancelled) { - HandleScope scope(_env); - details::WrapCallback([&] { - if (_error.size() == 0) { - OnOK(); - } else { - OnError(Error::New(_env, _error)); - } - return nullptr; - }); - } - if (!_suppress_destruct) { - Destroy(); - } -} - -#if (NAPI_VERSION > 3 && !defined(__wasm32__)) -//////////////////////////////////////////////////////////////////////////////// -// TypedThreadSafeFunction class -//////////////////////////////////////////////////////////////////////////////// - -// Starting with NAPI 5, the JavaScript function `func` parameter of -// `napi_create_threadsafe_function` is optional. -#if NAPI_VERSION > 4 -// static, with Callback [missing] Resource [missing] Finalizer [missing] -template -template -inline TypedThreadSafeFunction -TypedThreadSafeFunction::New( - napi_env env, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context) { - TypedThreadSafeFunction tsfn; - - napi_status status = - napi_create_threadsafe_function(env, - nullptr, - nullptr, - String::From(env, resourceName), - maxQueueSize, - initialThreadCount, - nullptr, - nullptr, - context, - CallJsInternal, - &tsfn._tsfn); - if (status != napi_ok) { - NAPI_THROW_IF_FAILED( - env, status, TypedThreadSafeFunction()); - } - - return tsfn; -} - -// static, with Callback [missing] Resource [passed] Finalizer [missing] -template -template -inline TypedThreadSafeFunction -TypedThreadSafeFunction::New( - napi_env env, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context) { - TypedThreadSafeFunction tsfn; - - napi_status status = - napi_create_threadsafe_function(env, - nullptr, - resource, - String::From(env, resourceName), - maxQueueSize, - initialThreadCount, - nullptr, - nullptr, - context, - CallJsInternal, - &tsfn._tsfn); - if (status != napi_ok) { - NAPI_THROW_IF_FAILED( - env, status, TypedThreadSafeFunction()); - } - - return tsfn; -} - -// static, with Callback [missing] Resource [missing] Finalizer [passed] -template -template -inline TypedThreadSafeFunction -TypedThreadSafeFunction::New( - napi_env env, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data) { - TypedThreadSafeFunction tsfn; - - auto* finalizeData = new details:: - ThreadSafeFinalize( - {data, finalizeCallback}); - napi_status status = napi_create_threadsafe_function( - env, - nullptr, - nullptr, - String::From(env, resourceName), - maxQueueSize, - initialThreadCount, - finalizeData, - details::ThreadSafeFinalize:: - FinalizeFinalizeWrapperWithDataAndContext, - context, - CallJsInternal, - &tsfn._tsfn); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED( - env, status, TypedThreadSafeFunction()); - } - - return tsfn; -} - -// static, with Callback [missing] Resource [passed] Finalizer [passed] -template -template -inline TypedThreadSafeFunction -TypedThreadSafeFunction::New( - napi_env env, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data) { - TypedThreadSafeFunction tsfn; - - auto* finalizeData = new details:: - ThreadSafeFinalize( - {data, finalizeCallback}); - napi_status status = napi_create_threadsafe_function( - env, - nullptr, - resource, - String::From(env, resourceName), - maxQueueSize, - initialThreadCount, - finalizeData, - details::ThreadSafeFinalize:: - FinalizeFinalizeWrapperWithDataAndContext, - context, - CallJsInternal, - &tsfn._tsfn); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED( - env, status, TypedThreadSafeFunction()); - } - - return tsfn; -} -#endif - -// static, with Callback [passed] Resource [missing] Finalizer [missing] -template -template -inline TypedThreadSafeFunction -TypedThreadSafeFunction::New( - napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context) { - TypedThreadSafeFunction tsfn; - - napi_status status = - napi_create_threadsafe_function(env, - callback, - nullptr, - String::From(env, resourceName), - maxQueueSize, - initialThreadCount, - nullptr, - nullptr, - context, - CallJsInternal, - &tsfn._tsfn); - if (status != napi_ok) { - NAPI_THROW_IF_FAILED( - env, status, TypedThreadSafeFunction()); - } - - return tsfn; -} - -// static, with Callback [passed] Resource [passed] Finalizer [missing] -template -template -inline TypedThreadSafeFunction -TypedThreadSafeFunction::New( - napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context) { - TypedThreadSafeFunction tsfn; - - napi_status status = - napi_create_threadsafe_function(env, - callback, - resource, - String::From(env, resourceName), - maxQueueSize, - initialThreadCount, - nullptr, - nullptr, - context, - CallJsInternal, - &tsfn._tsfn); - if (status != napi_ok) { - NAPI_THROW_IF_FAILED( - env, status, TypedThreadSafeFunction()); - } - - return tsfn; -} - -// static, with Callback [passed] Resource [missing] Finalizer [passed] -template -template -inline TypedThreadSafeFunction -TypedThreadSafeFunction::New( - napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data) { - TypedThreadSafeFunction tsfn; - - auto* finalizeData = new details:: - ThreadSafeFinalize( - {data, finalizeCallback}); - napi_status status = napi_create_threadsafe_function( - env, - callback, - nullptr, - String::From(env, resourceName), - maxQueueSize, - initialThreadCount, - finalizeData, - details::ThreadSafeFinalize:: - FinalizeFinalizeWrapperWithDataAndContext, - context, - CallJsInternal, - &tsfn._tsfn); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED( - env, status, TypedThreadSafeFunction()); - } - - return tsfn; -} - -// static, with: Callback [passed] Resource [passed] Finalizer [passed] -template -template -inline TypedThreadSafeFunction -TypedThreadSafeFunction::New( - napi_env env, - CallbackType callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data) { - TypedThreadSafeFunction tsfn; - - auto* finalizeData = new details:: - ThreadSafeFinalize( - {data, finalizeCallback}); - napi_status status = napi_create_threadsafe_function( - env, - details::DefaultCallbackWrapper< - CallbackType, - TypedThreadSafeFunction>(env, - callback), - resource, - String::From(env, resourceName), - maxQueueSize, - initialThreadCount, - finalizeData, - details::ThreadSafeFinalize:: - FinalizeFinalizeWrapperWithDataAndContext, - context, - CallJsInternal, - &tsfn._tsfn); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED( - env, status, TypedThreadSafeFunction()); - } - - return tsfn; -} - -template -inline TypedThreadSafeFunction:: - TypedThreadSafeFunction() - : _tsfn() {} - -template -inline TypedThreadSafeFunction:: - TypedThreadSafeFunction(napi_threadsafe_function tsfn) - : _tsfn(tsfn) {} - -template -inline TypedThreadSafeFunction:: -operator napi_threadsafe_function() const { - return _tsfn; -} - -template -inline napi_status -TypedThreadSafeFunction::BlockingCall( - DataType* data) const { - return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_blocking); -} - -template -inline napi_status -TypedThreadSafeFunction::NonBlockingCall( - DataType* data) const { - return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_nonblocking); -} - -template -inline void TypedThreadSafeFunction::Ref( - napi_env env) const { - if (_tsfn != nullptr) { - napi_status status = napi_ref_threadsafe_function(env, _tsfn); - NAPI_THROW_IF_FAILED_VOID(env, status); - } -} - -template -inline void TypedThreadSafeFunction::Unref( - napi_env env) const { - if (_tsfn != nullptr) { - napi_status status = napi_unref_threadsafe_function(env, _tsfn); - NAPI_THROW_IF_FAILED_VOID(env, status); - } -} - -template -inline napi_status -TypedThreadSafeFunction::Acquire() const { - return napi_acquire_threadsafe_function(_tsfn); -} - -template -inline napi_status -TypedThreadSafeFunction::Release() const { - return napi_release_threadsafe_function(_tsfn, napi_tsfn_release); -} - -template -inline napi_status -TypedThreadSafeFunction::Abort() const { - return napi_release_threadsafe_function(_tsfn, napi_tsfn_abort); -} - -template -inline ContextType* -TypedThreadSafeFunction::GetContext() const { - void* context; - napi_status status = napi_get_threadsafe_function_context(_tsfn, &context); - NAPI_FATAL_IF_FAILED(status, - "TypedThreadSafeFunction::GetContext", - "napi_get_threadsafe_function_context"); - return static_cast(context); -} - -// static -template -void TypedThreadSafeFunction::CallJsInternal( - napi_env env, napi_value jsCallback, void* context, void* data) { - details::CallJsWrapper( - env, jsCallback, context, data); -} - -#if NAPI_VERSION == 4 -// static -template -Napi::Function -TypedThreadSafeFunction::EmptyFunctionFactory( - Napi::Env env) { - return Napi::Function::New(env, [](const CallbackInfo& cb) {}); -} - -// static -template -Napi::Function -TypedThreadSafeFunction::FunctionOrEmpty( - Napi::Env env, Napi::Function& callback) { - if (callback.IsEmpty()) { - return EmptyFunctionFactory(env); - } - return callback; -} - -#else -// static -template -std::nullptr_t -TypedThreadSafeFunction::EmptyFunctionFactory( - Napi::Env /*env*/) { - return nullptr; -} - -// static -template -Napi::Function -TypedThreadSafeFunction::FunctionOrEmpty( - Napi::Env /*env*/, Napi::Function& callback) { - return callback; -} - -#endif - -//////////////////////////////////////////////////////////////////////////////// -// ThreadSafeFunction class -//////////////////////////////////////////////////////////////////////////////// - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount) { - return New( - env, callback, Object(), resourceName, maxQueueSize, initialThreadCount); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context) { - return New(env, - callback, - Object(), - resourceName, - maxQueueSize, - initialThreadCount, - context); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - Finalizer finalizeCallback) { - return New(env, - callback, - Object(), - resourceName, - maxQueueSize, - initialThreadCount, - finalizeCallback); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - Finalizer finalizeCallback, - FinalizerDataType* data) { - return New(env, - callback, - Object(), - resourceName, - maxQueueSize, - initialThreadCount, - finalizeCallback, - data); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback) { - return New(env, - callback, - Object(), - resourceName, - maxQueueSize, - initialThreadCount, - context, - finalizeCallback); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data) { - return New(env, - callback, - Object(), - resourceName, - maxQueueSize, - initialThreadCount, - context, - finalizeCallback, - data); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount) { - return New(env, - callback, - resource, - resourceName, - maxQueueSize, - initialThreadCount, - static_cast(nullptr) /* context */); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context) { - return New(env, - callback, - resource, - resourceName, - maxQueueSize, - initialThreadCount, - context, - [](Env, ContextType*) {} /* empty finalizer */); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - Finalizer finalizeCallback) { - return New(env, - callback, - resource, - resourceName, - maxQueueSize, - initialThreadCount, - static_cast(nullptr) /* context */, - finalizeCallback, - static_cast(nullptr) /* data */, - details::ThreadSafeFinalize::Wrapper); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - Finalizer finalizeCallback, - FinalizerDataType* data) { - return New(env, - callback, - resource, - resourceName, - maxQueueSize, - initialThreadCount, - static_cast(nullptr) /* context */, - finalizeCallback, - data, - details::ThreadSafeFinalize:: - FinalizeWrapperWithData); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback) { - return New( - env, - callback, - resource, - resourceName, - maxQueueSize, - initialThreadCount, - context, - finalizeCallback, - static_cast(nullptr) /* data */, - details::ThreadSafeFinalize::FinalizeWrapperWithContext); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data) { - return New( - env, - callback, - resource, - resourceName, - maxQueueSize, - initialThreadCount, - context, - finalizeCallback, - data, - details::ThreadSafeFinalize:: - FinalizeFinalizeWrapperWithDataAndContext); -} - -inline ThreadSafeFunction::ThreadSafeFunction() : _tsfn() {} - -inline ThreadSafeFunction::ThreadSafeFunction(napi_threadsafe_function tsfn) - : _tsfn(tsfn) {} - -inline ThreadSafeFunction::operator napi_threadsafe_function() const { - return _tsfn; -} - -inline napi_status ThreadSafeFunction::BlockingCall() const { - return CallInternal(nullptr, napi_tsfn_blocking); -} - -template <> -inline napi_status ThreadSafeFunction::BlockingCall(void* data) const { - return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_blocking); -} - -template -inline napi_status ThreadSafeFunction::BlockingCall(Callback callback) const { - return CallInternal(new CallbackWrapper(callback), napi_tsfn_blocking); -} - -template -inline napi_status ThreadSafeFunction::BlockingCall(DataType* data, - Callback callback) const { - auto wrapper = [data, callback](Env env, Function jsCallback) { - callback(env, jsCallback, data); - }; - return CallInternal(new CallbackWrapper(wrapper), napi_tsfn_blocking); -} - -inline napi_status ThreadSafeFunction::NonBlockingCall() const { - return CallInternal(nullptr, napi_tsfn_nonblocking); -} - -template <> -inline napi_status ThreadSafeFunction::NonBlockingCall(void* data) const { - return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_nonblocking); -} - -template -inline napi_status ThreadSafeFunction::NonBlockingCall( - Callback callback) const { - return CallInternal(new CallbackWrapper(callback), napi_tsfn_nonblocking); -} - -template -inline napi_status ThreadSafeFunction::NonBlockingCall( - DataType* data, Callback callback) const { - auto wrapper = [data, callback](Env env, Function jsCallback) { - callback(env, jsCallback, data); - }; - return CallInternal(new CallbackWrapper(wrapper), napi_tsfn_nonblocking); -} - -inline void ThreadSafeFunction::Ref(napi_env env) const { - if (_tsfn != nullptr) { - napi_status status = napi_ref_threadsafe_function(env, _tsfn); - NAPI_THROW_IF_FAILED_VOID(env, status); - } -} - -inline void ThreadSafeFunction::Unref(napi_env env) const { - if (_tsfn != nullptr) { - napi_status status = napi_unref_threadsafe_function(env, _tsfn); - NAPI_THROW_IF_FAILED_VOID(env, status); - } -} - -inline napi_status ThreadSafeFunction::Acquire() const { - return napi_acquire_threadsafe_function(_tsfn); -} - -inline napi_status ThreadSafeFunction::Release() const { - return napi_release_threadsafe_function(_tsfn, napi_tsfn_release); -} - -inline napi_status ThreadSafeFunction::Abort() const { - return napi_release_threadsafe_function(_tsfn, napi_tsfn_abort); -} - -inline ThreadSafeFunction::ConvertibleContext ThreadSafeFunction::GetContext() - const { - void* context; - napi_status status = napi_get_threadsafe_function_context(_tsfn, &context); - NAPI_FATAL_IF_FAILED(status, - "ThreadSafeFunction::GetContext", - "napi_get_threadsafe_function_context"); - return ConvertibleContext({context}); -} - -// static -template -inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data, - napi_finalize wrapper) { - static_assert(details::can_make_string::value || - std::is_convertible::value, - "Resource name should be convertible to the string type"); - - ThreadSafeFunction tsfn; - auto* finalizeData = new details:: - ThreadSafeFinalize( - {data, finalizeCallback}); - napi_status status = - napi_create_threadsafe_function(env, - callback, - resource, - Value::From(env, resourceName), - maxQueueSize, - initialThreadCount, - finalizeData, - wrapper, - context, - CallJS, - &tsfn._tsfn); - if (status != napi_ok) { - delete finalizeData; - NAPI_THROW_IF_FAILED(env, status, ThreadSafeFunction()); - } - - return tsfn; -} - -inline napi_status ThreadSafeFunction::CallInternal( - CallbackWrapper* callbackWrapper, - napi_threadsafe_function_call_mode mode) const { - napi_status status = - napi_call_threadsafe_function(_tsfn, callbackWrapper, mode); - if (status != napi_ok && callbackWrapper != nullptr) { - delete callbackWrapper; - } - - return status; -} - -// static -inline void ThreadSafeFunction::CallJS(napi_env env, - napi_value jsCallback, - void* /* context */, - void* data) { - if (env == nullptr && jsCallback == nullptr) { - return; - } - - if (data != nullptr) { - auto* callbackWrapper = static_cast(data); - (*callbackWrapper)(env, Function(env, jsCallback)); - delete callbackWrapper; - } else if (jsCallback != nullptr) { - Function(env, jsCallback).Call({}); - } -} - -//////////////////////////////////////////////////////////////////////////////// -// Async Progress Worker Base class -//////////////////////////////////////////////////////////////////////////////// -template -inline AsyncProgressWorkerBase::AsyncProgressWorkerBase( - const Object& receiver, - const Function& callback, - const char* resource_name, - const Object& resource, - size_t queue_size) - : AsyncWorker(receiver, callback, resource_name, resource) { - // Fill all possible arguments to work around ambiguous - // ThreadSafeFunction::New signatures. - _tsfn = ThreadSafeFunction::New(callback.Env(), - callback, - resource, - resource_name, - queue_size, - /** initialThreadCount */ 1, - /** context */ this, - OnThreadSafeFunctionFinalize, - /** finalizeData */ this); -} - -#if NAPI_VERSION > 4 -template -inline AsyncProgressWorkerBase::AsyncProgressWorkerBase( - Napi::Env env, - const char* resource_name, - const Object& resource, - size_t queue_size) - : AsyncWorker(env, resource_name, resource) { - // TODO: Once the changes to make the callback optional for threadsafe - // functions are available on all versions we can remove the dummy Function - // here. - Function callback; - // Fill all possible arguments to work around ambiguous - // ThreadSafeFunction::New signatures. - _tsfn = ThreadSafeFunction::New(env, - callback, - resource, - resource_name, - queue_size, - /** initialThreadCount */ 1, - /** context */ this, - OnThreadSafeFunctionFinalize, - /** finalizeData */ this); -} -#endif - -template -inline AsyncProgressWorkerBase::~AsyncProgressWorkerBase() { - // Abort pending tsfn call. - // Don't send progress events after we've already completed. - // It's ok to call ThreadSafeFunction::Abort and ThreadSafeFunction::Release - // duplicated. - _tsfn.Abort(); -} - -template -inline void AsyncProgressWorkerBase::OnAsyncWorkProgress( - Napi::Env /* env */, Napi::Function /* jsCallback */, void* data) { - ThreadSafeData* tsd = static_cast(data); - tsd->asyncprogressworker()->OnWorkProgress(tsd->data()); - delete tsd; -} - -template -inline napi_status AsyncProgressWorkerBase::NonBlockingCall( - DataType* data) { - auto tsd = new AsyncProgressWorkerBase::ThreadSafeData(this, data); - auto ret = _tsfn.NonBlockingCall(tsd, OnAsyncWorkProgress); - if (ret != napi_ok) { - delete tsd; - } - return ret; -} - -template -inline void AsyncProgressWorkerBase::OnWorkComplete( - Napi::Env /* env */, napi_status status) { - _work_completed = true; - _complete_status = status; - _tsfn.Release(); -} - -template -inline void AsyncProgressWorkerBase::OnThreadSafeFunctionFinalize( - Napi::Env env, void* /* data */, AsyncProgressWorkerBase* context) { - if (context->_work_completed) { - context->AsyncWorker::OnWorkComplete(env, context->_complete_status); - } -} - -//////////////////////////////////////////////////////////////////////////////// -// Async Progress Worker class -//////////////////////////////////////////////////////////////////////////////// -template -inline AsyncProgressWorker::AsyncProgressWorker(const Function& callback) - : AsyncProgressWorker(callback, "generic") {} - -template -inline AsyncProgressWorker::AsyncProgressWorker(const Function& callback, - const char* resource_name) - : AsyncProgressWorker( - callback, resource_name, Object::New(callback.Env())) {} - -template -inline AsyncProgressWorker::AsyncProgressWorker(const Function& callback, - const char* resource_name, - const Object& resource) - : AsyncProgressWorker( - Object::New(callback.Env()), callback, resource_name, resource) {} - -template -inline AsyncProgressWorker::AsyncProgressWorker(const Object& receiver, - const Function& callback) - : AsyncProgressWorker(receiver, callback, "generic") {} - -template -inline AsyncProgressWorker::AsyncProgressWorker(const Object& receiver, - const Function& callback, - const char* resource_name) - : AsyncProgressWorker( - receiver, callback, resource_name, Object::New(callback.Env())) {} - -template -inline AsyncProgressWorker::AsyncProgressWorker(const Object& receiver, - const Function& callback, - const char* resource_name, - const Object& resource) - : AsyncProgressWorkerBase(receiver, callback, resource_name, resource), - _asyncdata(nullptr), - _asyncsize(0), - _signaled(false) {} - -#if NAPI_VERSION > 4 -template -inline AsyncProgressWorker::AsyncProgressWorker(Napi::Env env) - : AsyncProgressWorker(env, "generic") {} - -template -inline AsyncProgressWorker::AsyncProgressWorker(Napi::Env env, - const char* resource_name) - : AsyncProgressWorker(env, resource_name, Object::New(env)) {} - -template -inline AsyncProgressWorker::AsyncProgressWorker(Napi::Env env, - const char* resource_name, - const Object& resource) - : AsyncProgressWorkerBase(env, resource_name, resource), - _asyncdata(nullptr), - _asyncsize(0) {} -#endif - -template -inline AsyncProgressWorker::~AsyncProgressWorker() { - { - std::lock_guard lock(this->_mutex); - _asyncdata = nullptr; - _asyncsize = 0; - } -} - -template -inline void AsyncProgressWorker::Execute() { - ExecutionProgress progress(this); - Execute(progress); -} - -template -inline void AsyncProgressWorker::OnWorkProgress(void*) { - T* data; - size_t size; - bool signaled; - { - std::lock_guard lock(this->_mutex); - data = this->_asyncdata; - size = this->_asyncsize; - signaled = this->_signaled; - this->_asyncdata = nullptr; - this->_asyncsize = 0; - this->_signaled = false; - } - - /** - * The callback of ThreadSafeFunction is not been invoked immediately on the - * callback of uv_async_t (uv io poll), rather the callback of TSFN is - * invoked on the right next uv idle callback. There are chances that during - * the deferring the signal of uv_async_t is been sent again, i.e. potential - * not coalesced two calls of the TSFN callback. - */ - if (data == nullptr && !signaled) { - return; - } - - this->OnProgress(data, size); - delete[] data; -} - -template -inline void AsyncProgressWorker::SendProgress_(const T* data, size_t count) { - T* new_data = new T[count]; - std::copy(data, data + count, new_data); - - T* old_data; - { - std::lock_guard lock(this->_mutex); - old_data = _asyncdata; - _asyncdata = new_data; - _asyncsize = count; - _signaled = false; - } - this->NonBlockingCall(nullptr); - - delete[] old_data; -} - -template -inline void AsyncProgressWorker::Signal() { - { - std::lock_guard lock(this->_mutex); - _signaled = true; - } - this->NonBlockingCall(static_cast(nullptr)); -} - -template -inline void AsyncProgressWorker::ExecutionProgress::Signal() const { - this->_worker->Signal(); -} - -template -inline void AsyncProgressWorker::ExecutionProgress::Send( - const T* data, size_t count) const { - _worker->SendProgress_(data, count); -} - -//////////////////////////////////////////////////////////////////////////////// -// Async Progress Queue Worker class -//////////////////////////////////////////////////////////////////////////////// -template -inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( - const Function& callback) - : AsyncProgressQueueWorker(callback, "generic") {} - -template -inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( - const Function& callback, const char* resource_name) - : AsyncProgressQueueWorker( - callback, resource_name, Object::New(callback.Env())) {} - -template -inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( - const Function& callback, const char* resource_name, const Object& resource) - : AsyncProgressQueueWorker( - Object::New(callback.Env()), callback, resource_name, resource) {} - -template -inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( - const Object& receiver, const Function& callback) - : AsyncProgressQueueWorker(receiver, callback, "generic") {} - -template -inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( - const Object& receiver, const Function& callback, const char* resource_name) - : AsyncProgressQueueWorker( - receiver, callback, resource_name, Object::New(callback.Env())) {} - -template -inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( - const Object& receiver, - const Function& callback, - const char* resource_name, - const Object& resource) - : AsyncProgressWorkerBase>( - receiver, - callback, - resource_name, - resource, - /** unlimited queue size */ 0) {} - -#if NAPI_VERSION > 4 -template -inline AsyncProgressQueueWorker::AsyncProgressQueueWorker(Napi::Env env) - : AsyncProgressQueueWorker(env, "generic") {} - -template -inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( - Napi::Env env, const char* resource_name) - : AsyncProgressQueueWorker(env, resource_name, Object::New(env)) {} - -template -inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( - Napi::Env env, const char* resource_name, const Object& resource) - : AsyncProgressWorkerBase>( - env, resource_name, resource, /** unlimited queue size */ 0) {} -#endif - -template -inline void AsyncProgressQueueWorker::Execute() { - ExecutionProgress progress(this); - Execute(progress); -} - -template -inline void AsyncProgressQueueWorker::OnWorkProgress( - std::pair* datapair) { - if (datapair == nullptr) { - return; - } - - T* data = datapair->first; - size_t size = datapair->second; - - this->OnProgress(data, size); - delete datapair; - delete[] data; -} - -template -inline void AsyncProgressQueueWorker::SendProgress_(const T* data, - size_t count) { - T* new_data = new T[count]; - std::copy(data, data + count, new_data); - - auto pair = new std::pair(new_data, count); - this->NonBlockingCall(pair); -} - -template -inline void AsyncProgressQueueWorker::Signal() const { - this->SendProgress_(static_cast(nullptr), 0); -} - -template -inline void AsyncProgressQueueWorker::OnWorkComplete(Napi::Env env, - napi_status status) { - // Draining queued items in TSFN. - AsyncProgressWorkerBase>::OnWorkComplete(env, status); -} - -template -inline void AsyncProgressQueueWorker::ExecutionProgress::Signal() const { - _worker->SendProgress_(static_cast(nullptr), 0); -} - -template -inline void AsyncProgressQueueWorker::ExecutionProgress::Send( - const T* data, size_t count) const { - _worker->SendProgress_(data, count); -} -#endif // NAPI_VERSION > 3 && !defined(__wasm32__) - -//////////////////////////////////////////////////////////////////////////////// -// Memory Management class -//////////////////////////////////////////////////////////////////////////////// - -inline int64_t MemoryManagement::AdjustExternalMemory(Env env, - int64_t change_in_bytes) { - int64_t result; - napi_status status = - napi_adjust_external_memory(env, change_in_bytes, &result); - NAPI_THROW_IF_FAILED(env, status, 0); - return result; -} - -//////////////////////////////////////////////////////////////////////////////// -// Version Management class -//////////////////////////////////////////////////////////////////////////////// - -inline uint32_t VersionManagement::GetNapiVersion(Env env) { - uint32_t result; - napi_status status = napi_get_version(env, &result); - NAPI_THROW_IF_FAILED(env, status, 0); - return result; -} - -inline const napi_node_version* VersionManagement::GetNodeVersion(Env env) { - const napi_node_version* result; - napi_status status = napi_get_node_version(env, &result); - NAPI_THROW_IF_FAILED(env, status, 0); - return result; -} - -#if NAPI_VERSION > 5 -//////////////////////////////////////////////////////////////////////////////// -// Addon class -//////////////////////////////////////////////////////////////////////////////// - -template -inline Object Addon::Init(Env env, Object exports) { - T* addon = new T(env, exports); - env.SetInstanceData(addon); - return addon->entry_point_; -} - -template -inline T* Addon::Unwrap(Object wrapper) { - return wrapper.Env().GetInstanceData(); -} - -template -inline void Addon::DefineAddon( - Object exports, const std::initializer_list& props) { - DefineProperties(exports, props); - entry_point_ = exports; -} - -template -inline Napi::Object Addon::DefineProperties( - Object object, const std::initializer_list& props) { - const napi_property_descriptor* properties = - reinterpret_cast(props.begin()); - size_t size = props.size(); - napi_status status = - napi_define_properties(object.Env(), object, size, properties); - NAPI_THROW_IF_FAILED(object.Env(), status, object); - for (size_t idx = 0; idx < size; idx++) - T::AttachPropData(object.Env(), object, &properties[idx]); - return object; -} -#endif // NAPI_VERSION > 5 - -#if NAPI_VERSION > 2 -template -Env::CleanupHook Env::AddCleanupHook(Hook hook, Arg* arg) { - return CleanupHook(*this, hook, arg); -} - -template -Env::CleanupHook Env::AddCleanupHook(Hook hook) { - return CleanupHook(*this, hook); -} - -template -Env::CleanupHook::CleanupHook() { - data = nullptr; -} - -template -Env::CleanupHook::CleanupHook(Napi::Env env, Hook hook) - : wrapper(Env::CleanupHook::Wrapper) { - data = new CleanupData{std::move(hook), nullptr}; - napi_status status = napi_add_env_cleanup_hook(env, wrapper, data); - if (status != napi_ok) { - delete data; - data = nullptr; - } -} - -template -Env::CleanupHook::CleanupHook(Napi::Env env, Hook hook, Arg* arg) - : wrapper(Env::CleanupHook::WrapperWithArg) { - data = new CleanupData{std::move(hook), arg}; - napi_status status = napi_add_env_cleanup_hook(env, wrapper, data); - if (status != napi_ok) { - delete data; - data = nullptr; - } -} - -template -bool Env::CleanupHook::Remove(Env env) { - napi_status status = napi_remove_env_cleanup_hook(env, wrapper, data); - delete data; - data = nullptr; - return status == napi_ok; -} - -template -bool Env::CleanupHook::IsEmpty() const { - return data == nullptr; -} -#endif // NAPI_VERSION > 2 - -#ifdef NAPI_CPP_CUSTOM_NAMESPACE -} // namespace NAPI_CPP_CUSTOM_NAMESPACE -#endif - -} // namespace Napi - -#endif // SRC_NAPI_INL_H_ diff --git a/node_modules/node-addon-api/napi.h b/node_modules/node-addon-api/napi.h deleted file mode 100644 index 831b3b6d..00000000 --- a/node_modules/node-addon-api/napi.h +++ /dev/null @@ -1,3114 +0,0 @@ -#ifndef SRC_NAPI_H_ -#define SRC_NAPI_H_ - -#include -#include -#include -#include -#include -#include -#include - -// VS2015 RTM has bugs with constexpr, so require min of VS2015 Update 3 (known -// good version) -#if !defined(_MSC_VER) || _MSC_FULL_VER >= 190024210 -#define NAPI_HAS_CONSTEXPR 1 -#endif - -// VS2013 does not support char16_t literal strings, so we'll work around it -// using wchar_t strings and casting them. This is safe as long as the character -// sizes are the same. -#if defined(_MSC_VER) && _MSC_VER <= 1800 -static_assert(sizeof(char16_t) == sizeof(wchar_t), - "Size mismatch between char16_t and wchar_t"); -#define NAPI_WIDE_TEXT(x) reinterpret_cast(L##x) -#else -#define NAPI_WIDE_TEXT(x) u##x -#endif - -// If C++ exceptions are not explicitly enabled or disabled, enable them -// if exceptions were enabled in the compiler settings. -#if !defined(NAPI_CPP_EXCEPTIONS) && !defined(NAPI_DISABLE_CPP_EXCEPTIONS) -#if defined(_CPPUNWIND) || defined(__EXCEPTIONS) -#define NAPI_CPP_EXCEPTIONS -#else -#error Exception support not detected. \ - Define either NAPI_CPP_EXCEPTIONS or NAPI_DISABLE_CPP_EXCEPTIONS. -#endif -#endif - -// If C++ NAPI_CPP_EXCEPTIONS are enabled, NODE_ADDON_API_ENABLE_MAYBE should -// not be set -#if defined(NAPI_CPP_EXCEPTIONS) && defined(NODE_ADDON_API_ENABLE_MAYBE) -#error NODE_ADDON_API_ENABLE_MAYBE should not be set when \ - NAPI_CPP_EXCEPTIONS is defined. -#endif - -#ifdef _NOEXCEPT -#define NAPI_NOEXCEPT _NOEXCEPT -#else -#define NAPI_NOEXCEPT noexcept -#endif - -#ifdef NAPI_CPP_EXCEPTIONS - -// When C++ exceptions are enabled, Errors are thrown directly. There is no need -// to return anything after the throw statements. The variadic parameter is an -// optional return value that is ignored. -// We need _VOID versions of the macros to avoid warnings resulting from -// leaving the NAPI_THROW_* `...` argument empty. - -#define NAPI_THROW(e, ...) throw e -#define NAPI_THROW_VOID(e) throw e - -#define NAPI_THROW_IF_FAILED(env, status, ...) \ - if ((status) != napi_ok) throw Napi::Error::New(env); - -#define NAPI_THROW_IF_FAILED_VOID(env, status) \ - if ((status) != napi_ok) throw Napi::Error::New(env); - -#else // NAPI_CPP_EXCEPTIONS - -// When C++ exceptions are disabled, Errors are thrown as JavaScript exceptions, -// which are pending until the callback returns to JS. The variadic parameter -// is an optional return value; usually it is an empty result. -// We need _VOID versions of the macros to avoid warnings resulting from -// leaving the NAPI_THROW_* `...` argument empty. - -#define NAPI_THROW(e, ...) \ - do { \ - (e).ThrowAsJavaScriptException(); \ - return __VA_ARGS__; \ - } while (0) - -#define NAPI_THROW_VOID(e) \ - do { \ - (e).ThrowAsJavaScriptException(); \ - return; \ - } while (0) - -#define NAPI_THROW_IF_FAILED(env, status, ...) \ - if ((status) != napi_ok) { \ - Napi::Error::New(env).ThrowAsJavaScriptException(); \ - return __VA_ARGS__; \ - } - -#define NAPI_THROW_IF_FAILED_VOID(env, status) \ - if ((status) != napi_ok) { \ - Napi::Error::New(env).ThrowAsJavaScriptException(); \ - return; \ - } - -#endif // NAPI_CPP_EXCEPTIONS - -#ifdef NODE_ADDON_API_ENABLE_MAYBE -#define NAPI_MAYBE_THROW_IF_FAILED(env, status, type) \ - NAPI_THROW_IF_FAILED(env, status, Napi::Nothing()) - -#define NAPI_RETURN_OR_THROW_IF_FAILED(env, status, result, type) \ - NAPI_MAYBE_THROW_IF_FAILED(env, status, type); \ - return Napi::Just(result); -#else -#define NAPI_MAYBE_THROW_IF_FAILED(env, status, type) \ - NAPI_THROW_IF_FAILED(env, status, type()) - -#define NAPI_RETURN_OR_THROW_IF_FAILED(env, status, result, type) \ - NAPI_MAYBE_THROW_IF_FAILED(env, status, type); \ - return result; -#endif - -#define NAPI_DISALLOW_ASSIGN(CLASS) void operator=(const CLASS&) = delete; -#define NAPI_DISALLOW_COPY(CLASS) CLASS(const CLASS&) = delete; - -#define NAPI_DISALLOW_ASSIGN_COPY(CLASS) \ - NAPI_DISALLOW_ASSIGN(CLASS) \ - NAPI_DISALLOW_COPY(CLASS) - -#define NAPI_CHECK(condition, location, message) \ - do { \ - if (!(condition)) { \ - Napi::Error::Fatal((location), (message)); \ - } \ - } while (0) - -#define NAPI_FATAL_IF_FAILED(status, location, message) \ - NAPI_CHECK((status) == napi_ok, location, message) - -//////////////////////////////////////////////////////////////////////////////// -/// Node-API C++ Wrapper Classes -/// -/// These classes wrap the "Node-API" ABI-stable C APIs for Node.js, providing a -/// C++ object model and C++ exception-handling semantics with low overhead. -/// The wrappers are all header-only so that they do not affect the ABI. -//////////////////////////////////////////////////////////////////////////////// -namespace Napi { - -#ifdef NAPI_CPP_CUSTOM_NAMESPACE -// NAPI_CPP_CUSTOM_NAMESPACE can be #define'd per-addon to avoid symbol -// conflicts between different instances of node-addon-api - -// First dummy definition of the namespace to make sure that Napi::(name) still -// refers to the right things inside this file. -namespace NAPI_CPP_CUSTOM_NAMESPACE {} -using namespace NAPI_CPP_CUSTOM_NAMESPACE; - -namespace NAPI_CPP_CUSTOM_NAMESPACE { -#endif - -// Forward declarations -class Env; -class Value; -class Boolean; -class Number; -#if NAPI_VERSION > 5 -class BigInt; -#endif // NAPI_VERSION > 5 -#if (NAPI_VERSION > 4) -class Date; -#endif -class String; -class Object; -class Array; -class ArrayBuffer; -class Function; -class Error; -class PropertyDescriptor; -class CallbackInfo; -class TypedArray; -template -class TypedArrayOf; - -using Int8Array = - TypedArrayOf; ///< Typed-array of signed 8-bit integers -using Uint8Array = - TypedArrayOf; ///< Typed-array of unsigned 8-bit integers -using Int16Array = - TypedArrayOf; ///< Typed-array of signed 16-bit integers -using Uint16Array = - TypedArrayOf; ///< Typed-array of unsigned 16-bit integers -using Int32Array = - TypedArrayOf; ///< Typed-array of signed 32-bit integers -using Uint32Array = - TypedArrayOf; ///< Typed-array of unsigned 32-bit integers -using Float32Array = - TypedArrayOf; ///< Typed-array of 32-bit floating-point values -using Float64Array = - TypedArrayOf; ///< Typed-array of 64-bit floating-point values -#if NAPI_VERSION > 5 -using BigInt64Array = - TypedArrayOf; ///< Typed array of signed 64-bit integers -using BigUint64Array = - TypedArrayOf; ///< Typed array of unsigned 64-bit integers -#endif // NAPI_VERSION > 5 - -/// Defines the signature of a Node-API C++ module's registration callback -/// (init) function. -using ModuleRegisterCallback = Object (*)(Env env, Object exports); - -class MemoryManagement; - -/// A simple Maybe type, representing an object which may or may not have a -/// value. -/// -/// If an API method returns a Maybe<>, the API method can potentially fail -/// either because an exception is thrown, or because an exception is pending, -/// e.g. because a previous API call threw an exception that hasn't been -/// caught yet. In that case, a "Nothing" value is returned. -template -class Maybe { - public: - bool IsNothing() const; - bool IsJust() const; - - /// Short-hand for Unwrap(), which doesn't return a value. Could be used - /// where the actual value of the Maybe is not needed like Object::Set. - /// If this Maybe is nothing (empty), node-addon-api will crash the - /// process. - void Check() const; - - /// Return the value of type T contained in the Maybe. If this Maybe is - /// nothing (empty), node-addon-api will crash the process. - T Unwrap() const; - - /// Return the value of type T contained in the Maybe, or using a default - /// value if this Maybe is nothing (empty). - T UnwrapOr(const T& default_value) const; - - /// Converts this Maybe to a value of type T in the out. If this Maybe is - /// nothing (empty), `false` is returned and `out` is left untouched. - bool UnwrapTo(T* out) const; - - bool operator==(const Maybe& other) const; - bool operator!=(const Maybe& other) const; - - private: - Maybe(); - explicit Maybe(const T& t); - - bool _has_value; - T _value; - - template - friend Maybe Nothing(); - template - friend Maybe Just(const U& u); -}; - -template -inline Maybe Nothing(); - -template -inline Maybe Just(const T& t); - -#if defined(NODE_ADDON_API_ENABLE_MAYBE) -template -using MaybeOrValue = Maybe; -#else -template -using MaybeOrValue = T; -#endif - -/// Environment for Node-API values and operations. -/// -/// All Node-API values and operations must be associated with an environment. -/// An environment instance is always provided to callback functions; that -/// environment must then be used for any creation of Node-API values or other -/// Node-API operations within the callback. (Many methods infer the -/// environment from the `this` instance that the method is called on.) -/// -/// In the future, multiple environments per process may be supported, -/// although current implementations only support one environment per process. -/// -/// In the V8 JavaScript engine, a Node-API environment approximately -/// corresponds to an Isolate. -class Env { - private: - napi_env _env; -#if NAPI_VERSION > 5 - template - static void DefaultFini(Env, T* data); - template - static void DefaultFiniWithHint(Env, DataType* data, HintType* hint); -#endif // NAPI_VERSION > 5 - public: - Env(napi_env env); - - operator napi_env() const; - - Object Global() const; - Value Undefined() const; - Value Null() const; - - bool IsExceptionPending() const; - Error GetAndClearPendingException() const; - - MaybeOrValue RunScript(const char* utf8script) const; - MaybeOrValue RunScript(const std::string& utf8script) const; - MaybeOrValue RunScript(String script) const; - -#if NAPI_VERSION > 2 - template - class CleanupHook; - - template - CleanupHook AddCleanupHook(Hook hook); - - template - CleanupHook AddCleanupHook(Hook hook, Arg* arg); -#endif // NAPI_VERSION > 2 - -#if NAPI_VERSION > 5 - template - T* GetInstanceData() const; - - template - using Finalizer = void (*)(Env, T*); - template fini = Env::DefaultFini> - void SetInstanceData(T* data) const; - - template - using FinalizerWithHint = void (*)(Env, DataType*, HintType*); - template fini = - Env::DefaultFiniWithHint> - void SetInstanceData(DataType* data, HintType* hint) const; -#endif // NAPI_VERSION > 5 - -#if NAPI_VERSION > 2 - template - class CleanupHook { - public: - CleanupHook(); - CleanupHook(Env env, Hook hook, Arg* arg); - CleanupHook(Env env, Hook hook); - bool Remove(Env env); - bool IsEmpty() const; - - private: - static inline void Wrapper(void* data) NAPI_NOEXCEPT; - static inline void WrapperWithArg(void* data) NAPI_NOEXCEPT; - - void (*wrapper)(void* arg); - struct CleanupData { - Hook hook; - Arg* arg; - } * data; - }; -#endif // NAPI_VERSION > 2 -}; - -/// A JavaScript value of unknown type. -/// -/// For type-specific operations, convert to one of the Value subclasses using a -/// `To*` or `As()` method. The `To*` methods do type coercion; the `As()` -/// method does not. -/// -/// Napi::Value value = ... -/// if (!value.IsString()) throw Napi::TypeError::New(env, "Invalid -/// arg..."); Napi::String str = value.As(); // Cast to a -/// string value -/// -/// Napi::Value anotherValue = ... -/// bool isTruthy = anotherValue.ToBoolean(); // Coerce to a boolean value -class Value { - public: - Value(); ///< Creates a new _empty_ Value instance. - Value(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - /// Creates a JS value from a C++ primitive. - /// - /// `value` may be any of: - /// - bool - /// - Any integer type - /// - Any floating point type - /// - const char* (encoded using UTF-8, null-terminated) - /// - const char16_t* (encoded using UTF-16-LE, null-terminated) - /// - std::string (encoded using UTF-8) - /// - std::u16string - /// - napi::Value - /// - napi_value - template - static Value From(napi_env env, const T& value); - - /// Converts to a Node-API value primitive. - /// - /// If the instance is _empty_, this returns `nullptr`. - operator napi_value() const; - - /// Tests if this value strictly equals another value. - bool operator==(const Value& other) const; - - /// Tests if this value does not strictly equal another value. - bool operator!=(const Value& other) const; - - /// Tests if this value strictly equals another value. - bool StrictEquals(const Value& other) const; - - /// Gets the environment the value is associated with. - Napi::Env Env() const; - - /// Checks if the value is empty (uninitialized). - /// - /// An empty value is invalid, and most attempts to perform an operation on an - /// empty value will result in an exception. Note an empty value is distinct - /// from JavaScript `null` or `undefined`, which are valid values. - /// - /// When C++ exceptions are disabled at compile time, a method with a `Value` - /// return type may return an empty value to indicate a pending exception. So - /// when not using C++ exceptions, callers should check whether the value is - /// empty before attempting to use it. - bool IsEmpty() const; - - napi_valuetype Type() const; ///< Gets the type of the value. - - bool IsUndefined() - const; ///< Tests if a value is an undefined JavaScript value. - bool IsNull() const; ///< Tests if a value is a null JavaScript value. - bool IsBoolean() const; ///< Tests if a value is a JavaScript boolean. - bool IsNumber() const; ///< Tests if a value is a JavaScript number. -#if NAPI_VERSION > 5 - bool IsBigInt() const; ///< Tests if a value is a JavaScript bigint. -#endif // NAPI_VERSION > 5 -#if (NAPI_VERSION > 4) - bool IsDate() const; ///< Tests if a value is a JavaScript date. -#endif - bool IsString() const; ///< Tests if a value is a JavaScript string. - bool IsSymbol() const; ///< Tests if a value is a JavaScript symbol. - bool IsArray() const; ///< Tests if a value is a JavaScript array. - bool IsArrayBuffer() - const; ///< Tests if a value is a JavaScript array buffer. - bool IsTypedArray() const; ///< Tests if a value is a JavaScript typed array. - bool IsObject() const; ///< Tests if a value is a JavaScript object. - bool IsFunction() const; ///< Tests if a value is a JavaScript function. - bool IsPromise() const; ///< Tests if a value is a JavaScript promise. - bool IsDataView() const; ///< Tests if a value is a JavaScript data view. - bool IsBuffer() const; ///< Tests if a value is a Node buffer. - bool IsExternal() const; ///< Tests if a value is a pointer to external data. - - /// Casts to another type of `Napi::Value`, when the actual type is known or - /// assumed. - /// - /// This conversion does NOT coerce the type. Calling any methods - /// inappropriate for the actual value type will throw `Napi::Error`. - template - T As() const; - - MaybeOrValue ToBoolean() - const; ///< Coerces a value to a JavaScript boolean. - MaybeOrValue ToNumber() - const; ///< Coerces a value to a JavaScript number. - MaybeOrValue ToString() - const; ///< Coerces a value to a JavaScript string. - MaybeOrValue ToObject() - const; ///< Coerces a value to a JavaScript object. - - protected: - /// !cond INTERNAL - napi_env _env; - napi_value _value; - /// !endcond -}; - -/// A JavaScript boolean value. -class Boolean : public Value { - public: - static Boolean New(napi_env env, ///< Node-API environment - bool value ///< Boolean value - ); - - Boolean(); ///< Creates a new _empty_ Boolean instance. - Boolean(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - operator bool() const; ///< Converts a Boolean value to a boolean primitive. - bool Value() const; ///< Converts a Boolean value to a boolean primitive. -}; - -/// A JavaScript number value. -class Number : public Value { - public: - static Number New(napi_env env, ///< Node-API environment - double value ///< Number value - ); - - Number(); ///< Creates a new _empty_ Number instance. - Number(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - operator int32_t() - const; ///< Converts a Number value to a 32-bit signed integer value. - operator uint32_t() - const; ///< Converts a Number value to a 32-bit unsigned integer value. - operator int64_t() - const; ///< Converts a Number value to a 64-bit signed integer value. - operator float() - const; ///< Converts a Number value to a 32-bit floating-point value. - operator double() - const; ///< Converts a Number value to a 64-bit floating-point value. - - int32_t Int32Value() - const; ///< Converts a Number value to a 32-bit signed integer value. - uint32_t Uint32Value() - const; ///< Converts a Number value to a 32-bit unsigned integer value. - int64_t Int64Value() - const; ///< Converts a Number value to a 64-bit signed integer value. - float FloatValue() - const; ///< Converts a Number value to a 32-bit floating-point value. - double DoubleValue() - const; ///< Converts a Number value to a 64-bit floating-point value. -}; - -#if NAPI_VERSION > 5 -/// A JavaScript bigint value. -class BigInt : public Value { - public: - static BigInt New(napi_env env, ///< Node-API environment - int64_t value ///< Number value - ); - static BigInt New(napi_env env, ///< Node-API environment - uint64_t value ///< Number value - ); - - /// Creates a new BigInt object using a specified sign bit and a - /// specified list of digits/words. - /// The resulting number is calculated as: - /// (-1)^sign_bit * (words[0] * (2^64)^0 + words[1] * (2^64)^1 + ...) - static BigInt New(napi_env env, ///< Node-API environment - int sign_bit, ///< Sign bit. 1 if negative. - size_t word_count, ///< Number of words in array - const uint64_t* words ///< Array of words - ); - - BigInt(); ///< Creates a new _empty_ BigInt instance. - BigInt(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - int64_t Int64Value(bool* lossless) - const; ///< Converts a BigInt value to a 64-bit signed integer value. - uint64_t Uint64Value(bool* lossless) - const; ///< Converts a BigInt value to a 64-bit unsigned integer value. - - size_t WordCount() const; ///< The number of 64-bit words needed to store - ///< the result of ToWords(). - - /// Writes the contents of this BigInt to a specified memory location. - /// `sign_bit` must be provided and will be set to 1 if this BigInt is - /// negative. - /// `*word_count` has to be initialized to the length of the `words` array. - /// Upon return, it will be set to the actual number of words that would - /// be needed to store this BigInt (i.e. the return value of `WordCount()`). - void ToWords(int* sign_bit, size_t* word_count, uint64_t* words); -}; -#endif // NAPI_VERSION > 5 - -#if (NAPI_VERSION > 4) -/// A JavaScript date value. -class Date : public Value { - public: - /// Creates a new Date value from a double primitive. - static Date New(napi_env env, ///< Node-API environment - double value ///< Number value - ); - - Date(); ///< Creates a new _empty_ Date instance. - Date(napi_env env, napi_value value); ///< Wraps a Node-API value primitive. - operator double() const; ///< Converts a Date value to double primitive - - double ValueOf() const; ///< Converts a Date value to a double primitive. -}; -#endif - -/// A JavaScript string or symbol value (that can be used as a property name). -class Name : public Value { - public: - Name(); ///< Creates a new _empty_ Name instance. - Name(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. -}; - -/// A JavaScript string value. -class String : public Name { - public: - /// Creates a new String value from a UTF-8 encoded C++ string. - static String New(napi_env env, ///< Node-API environment - const std::string& value ///< UTF-8 encoded C++ string - ); - - /// Creates a new String value from a UTF-16 encoded C++ string. - static String New(napi_env env, ///< Node-API environment - const std::u16string& value ///< UTF-16 encoded C++ string - ); - - /// Creates a new String value from a UTF-8 encoded C string. - static String New( - napi_env env, ///< Node-API environment - const char* value ///< UTF-8 encoded null-terminated C string - ); - - /// Creates a new String value from a UTF-16 encoded C string. - static String New( - napi_env env, ///< Node-API environment - const char16_t* value ///< UTF-16 encoded null-terminated C string - ); - - /// Creates a new String value from a UTF-8 encoded C string with specified - /// length. - static String New(napi_env env, ///< Node-API environment - const char* value, ///< UTF-8 encoded C string (not - ///< necessarily null-terminated) - size_t length ///< length of the string in bytes - ); - - /// Creates a new String value from a UTF-16 encoded C string with specified - /// length. - static String New( - napi_env env, ///< Node-API environment - const char16_t* value, ///< UTF-16 encoded C string (not necessarily - ///< null-terminated) - size_t length ///< Length of the string in 2-byte code units - ); - - /// Creates a new String based on the original object's type. - /// - /// `value` may be any of: - /// - const char* (encoded using UTF-8, null-terminated) - /// - const char16_t* (encoded using UTF-16-LE, null-terminated) - /// - std::string (encoded using UTF-8) - /// - std::u16string - template - static String From(napi_env env, const T& value); - - String(); ///< Creates a new _empty_ String instance. - String(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - operator std::string() - const; ///< Converts a String value to a UTF-8 encoded C++ string. - operator std::u16string() - const; ///< Converts a String value to a UTF-16 encoded C++ string. - std::string Utf8Value() - const; ///< Converts a String value to a UTF-8 encoded C++ string. - std::u16string Utf16Value() - const; ///< Converts a String value to a UTF-16 encoded C++ string. -}; - -/// A JavaScript symbol value. -class Symbol : public Name { - public: - /// Creates a new Symbol value with an optional description. - static Symbol New( - napi_env env, ///< Node-API environment - const char* description = - nullptr ///< Optional UTF-8 encoded null-terminated C string - /// describing the symbol - ); - - /// Creates a new Symbol value with a description. - static Symbol New( - napi_env env, ///< Node-API environment - const std::string& - description ///< UTF-8 encoded C++ string describing the symbol - ); - - /// Creates a new Symbol value with a description. - static Symbol New(napi_env env, ///< Node-API environment - String description ///< String value describing the symbol - ); - - /// Creates a new Symbol value with a description. - static Symbol New( - napi_env env, ///< Node-API environment - napi_value description ///< String value describing the symbol - ); - - /// Get a public Symbol (e.g. Symbol.iterator). - static MaybeOrValue WellKnown(napi_env, const std::string& name); - - // Create a symbol in the global registry, UTF-8 Encoded cpp string - static MaybeOrValue For(napi_env env, const std::string& description); - - // Create a symbol in the global registry, C style string (null terminated) - static MaybeOrValue For(napi_env env, const char* description); - - // Create a symbol in the global registry, String value describing the symbol - static MaybeOrValue For(napi_env env, String description); - - // Create a symbol in the global registry, napi_value describing the symbol - static MaybeOrValue For(napi_env env, napi_value description); - - Symbol(); ///< Creates a new _empty_ Symbol instance. - Symbol(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. -}; - -/// A JavaScript object value. -class Object : public Value { - public: - /// Enables property and element assignments using indexing syntax. - /// - /// This is a convenient helper to get and set object properties. As - /// getting and setting object properties may throw with JavaScript - /// exceptions, it is notable that these operations may fail. - /// When NODE_ADDON_API_ENABLE_MAYBE is defined, the process will abort - /// on JavaScript exceptions. - /// - /// Example: - /// - /// Napi::Value propertyValue = object1['A']; - /// object2['A'] = propertyValue; - /// Napi::Value elementValue = array[0]; - /// array[1] = elementValue; - template - class PropertyLValue { - public: - /// Converts an L-value to a value. - operator Value() const; - - /// Assigns a value to the property. The type of value can be - /// anything supported by `Object::Set`. - template - PropertyLValue& operator=(ValueType value); - - private: - PropertyLValue() = delete; - PropertyLValue(Object object, Key key); - napi_env _env; - napi_value _object; - Key _key; - - friend class Napi::Object; - }; - - /// Creates a new Object value. - static Object New(napi_env env ///< Node-API environment - ); - - Object(); ///< Creates a new _empty_ Object instance. - Object(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - /// Gets or sets a named property. - PropertyLValue operator[]( - const char* utf8name ///< UTF-8 encoded null-terminated property name - ); - - /// Gets or sets a named property. - PropertyLValue operator[]( - const std::string& utf8name ///< UTF-8 encoded property name - ); - - /// Gets or sets an indexed property or array element. - PropertyLValue operator[]( - uint32_t index /// Property / element index - ); - - /// Gets or sets an indexed property or array element. - PropertyLValue operator[](Value index /// Property / element index - ) const; - - /// Gets a named property. - MaybeOrValue operator[]( - const char* utf8name ///< UTF-8 encoded null-terminated property name - ) const; - - /// Gets a named property. - MaybeOrValue operator[]( - const std::string& utf8name ///< UTF-8 encoded property name - ) const; - - /// Gets an indexed property or array element. - MaybeOrValue operator[](uint32_t index ///< Property / element index - ) const; - - /// Checks whether a property is present. - MaybeOrValue Has(napi_value key ///< Property key primitive - ) const; - - /// Checks whether a property is present. - MaybeOrValue Has(Value key ///< Property key - ) const; - - /// Checks whether a named property is present. - MaybeOrValue Has( - const char* utf8name ///< UTF-8 encoded null-terminated property name - ) const; - - /// Checks whether a named property is present. - MaybeOrValue Has( - const std::string& utf8name ///< UTF-8 encoded property name - ) const; - - /// Checks whether a own property is present. - MaybeOrValue HasOwnProperty(napi_value key ///< Property key primitive - ) const; - - /// Checks whether a own property is present. - MaybeOrValue HasOwnProperty(Value key ///< Property key - ) const; - - /// Checks whether a own property is present. - MaybeOrValue HasOwnProperty( - const char* utf8name ///< UTF-8 encoded null-terminated property name - ) const; - - /// Checks whether a own property is present. - MaybeOrValue HasOwnProperty( - const std::string& utf8name ///< UTF-8 encoded property name - ) const; - - /// Gets a property. - MaybeOrValue Get(napi_value key ///< Property key primitive - ) const; - - /// Gets a property. - MaybeOrValue Get(Value key ///< Property key - ) const; - - /// Gets a named property. - MaybeOrValue Get( - const char* utf8name ///< UTF-8 encoded null-terminated property name - ) const; - - /// Gets a named property. - MaybeOrValue Get( - const std::string& utf8name ///< UTF-8 encoded property name - ) const; - - /// Sets a property. - template - MaybeOrValue Set(napi_value key, ///< Property key primitive - const ValueType& value ///< Property value primitive - ) const; - - /// Sets a property. - template - MaybeOrValue Set(Value key, ///< Property key - const ValueType& value ///< Property value - ) const; - - /// Sets a named property. - template - MaybeOrValue Set( - const char* utf8name, ///< UTF-8 encoded null-terminated property name - const ValueType& value) const; - - /// Sets a named property. - template - MaybeOrValue Set( - const std::string& utf8name, ///< UTF-8 encoded property name - const ValueType& value ///< Property value primitive - ) const; - - /// Delete property. - MaybeOrValue Delete(napi_value key ///< Property key primitive - ) const; - - /// Delete property. - MaybeOrValue Delete(Value key ///< Property key - ) const; - - /// Delete property. - MaybeOrValue Delete( - const char* utf8name ///< UTF-8 encoded null-terminated property name - ) const; - - /// Delete property. - MaybeOrValue Delete( - const std::string& utf8name ///< UTF-8 encoded property name - ) const; - - /// Checks whether an indexed property is present. - MaybeOrValue Has(uint32_t index ///< Property / element index - ) const; - - /// Gets an indexed property or array element. - MaybeOrValue Get(uint32_t index ///< Property / element index - ) const; - - /// Sets an indexed property or array element. - template - MaybeOrValue Set(uint32_t index, ///< Property / element index - const ValueType& value ///< Property value primitive - ) const; - - /// Deletes an indexed property or array element. - MaybeOrValue Delete(uint32_t index ///< Property / element index - ) const; - - /// This operation can fail in case of Proxy.[[OwnPropertyKeys]] and - /// Proxy.[[GetOwnProperty]] calling into JavaScript. See: - /// - - /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-ownpropertykeys - /// - - /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getownproperty-p - MaybeOrValue GetPropertyNames() const; ///< Get all property names - - /// Defines a property on the object. - /// - /// This operation can fail in case of Proxy.[[DefineOwnProperty]] calling - /// into JavaScript. See - /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-defineownproperty-p-desc - MaybeOrValue DefineProperty( - const PropertyDescriptor& - property ///< Descriptor for the property to be defined - ) const; - - /// Defines properties on the object. - /// - /// This operation can fail in case of Proxy.[[DefineOwnProperty]] calling - /// into JavaScript. See - /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-defineownproperty-p-desc - MaybeOrValue DefineProperties( - const std::initializer_list& properties - ///< List of descriptors for the properties to be defined - ) const; - - /// Defines properties on the object. - /// - /// This operation can fail in case of Proxy.[[DefineOwnProperty]] calling - /// into JavaScript. See - /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-defineownproperty-p-desc - MaybeOrValue DefineProperties( - const std::vector& properties - ///< Vector of descriptors for the properties to be defined - ) const; - - /// Checks if an object is an instance created by a constructor function. - /// - /// This is equivalent to the JavaScript `instanceof` operator. - /// - /// This operation can fail in case of Proxy.[[GetPrototypeOf]] calling into - /// JavaScript. - /// See - /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getprototypeof - MaybeOrValue InstanceOf( - const Function& constructor ///< Constructor function - ) const; - - template - inline void AddFinalizer(Finalizer finalizeCallback, T* data) const; - - template - inline void AddFinalizer(Finalizer finalizeCallback, - T* data, - Hint* finalizeHint) const; - -#ifdef NAPI_CPP_EXCEPTIONS - class const_iterator; - - inline const_iterator begin() const; - - inline const_iterator end() const; - - class iterator; - - inline iterator begin(); - - inline iterator end(); -#endif // NAPI_CPP_EXCEPTIONS - -#if NAPI_VERSION >= 8 - /// This operation can fail in case of Proxy.[[GetPrototypeOf]] calling into - /// JavaScript. - /// See - /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getprototypeof - MaybeOrValue Freeze() const; - /// This operation can fail in case of Proxy.[[GetPrototypeOf]] calling into - /// JavaScript. - /// See - /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getprototypeof - MaybeOrValue Seal() const; -#endif // NAPI_VERSION >= 8 -}; - -template -class External : public Value { - public: - static External New(napi_env env, T* data); - - // Finalizer must implement `void operator()(Env env, T* data)`. - template - static External New(napi_env env, T* data, Finalizer finalizeCallback); - // Finalizer must implement `void operator()(Env env, T* data, Hint* hint)`. - template - static External New(napi_env env, - T* data, - Finalizer finalizeCallback, - Hint* finalizeHint); - - External(); - External(napi_env env, napi_value value); - - T* Data() const; -}; - -class Array : public Object { - public: - static Array New(napi_env env); - static Array New(napi_env env, size_t length); - - Array(); - Array(napi_env env, napi_value value); - - uint32_t Length() const; -}; - -#ifdef NAPI_CPP_EXCEPTIONS -class Object::const_iterator { - private: - enum class Type { BEGIN, END }; - - inline const_iterator(const Object* object, const Type type); - - public: - inline const_iterator& operator++(); - - inline bool operator==(const const_iterator& other) const; - - inline bool operator!=(const const_iterator& other) const; - - inline const std::pair> operator*() - const; - - private: - const Napi::Object* _object; - Array _keys; - uint32_t _index; - - friend class Object; -}; - -class Object::iterator { - private: - enum class Type { BEGIN, END }; - - inline iterator(Object* object, const Type type); - - public: - inline iterator& operator++(); - - inline bool operator==(const iterator& other) const; - - inline bool operator!=(const iterator& other) const; - - inline std::pair> operator*(); - - private: - Napi::Object* _object; - Array _keys; - uint32_t _index; - - friend class Object; -}; -#endif // NAPI_CPP_EXCEPTIONS - -/// A JavaScript array buffer value. -class ArrayBuffer : public Object { - public: - /// Creates a new ArrayBuffer instance over a new automatically-allocated - /// buffer. - static ArrayBuffer New( - napi_env env, ///< Node-API environment - size_t byteLength ///< Length of the buffer to be allocated, in bytes - ); - - /// Creates a new ArrayBuffer instance, using an external buffer with - /// specified byte length. - static ArrayBuffer New( - napi_env env, ///< Node-API environment - void* externalData, ///< Pointer to the external buffer to be used by - ///< the array - size_t byteLength ///< Length of the external buffer to be used by the - ///< array, in bytes - ); - - /// Creates a new ArrayBuffer instance, using an external buffer with - /// specified byte length. - template - static ArrayBuffer New( - napi_env env, ///< Node-API environment - void* externalData, ///< Pointer to the external buffer to be used by - ///< the array - size_t byteLength, ///< Length of the external buffer to be used by the - ///< array, - /// in bytes - Finalizer finalizeCallback ///< Function to be called when the array - ///< buffer is destroyed; - /// must implement `void operator()(Env env, - /// void* externalData)` - ); - - /// Creates a new ArrayBuffer instance, using an external buffer with - /// specified byte length. - template - static ArrayBuffer New( - napi_env env, ///< Node-API environment - void* externalData, ///< Pointer to the external buffer to be used by - ///< the array - size_t byteLength, ///< Length of the external buffer to be used by the - ///< array, - /// in bytes - Finalizer finalizeCallback, ///< Function to be called when the array - ///< buffer is destroyed; - /// must implement `void operator()(Env - /// env, void* externalData, Hint* hint)` - Hint* finalizeHint ///< Hint (second parameter) to be passed to the - ///< finalize callback - ); - - ArrayBuffer(); ///< Creates a new _empty_ ArrayBuffer instance. - ArrayBuffer(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - void* Data(); ///< Gets a pointer to the data buffer. - size_t ByteLength(); ///< Gets the length of the array buffer in bytes. - -#if NAPI_VERSION >= 7 - bool IsDetached() const; - void Detach(); -#endif // NAPI_VERSION >= 7 -}; - -/// A JavaScript typed-array value with unknown array type. -/// -/// For type-specific operations, cast to a `TypedArrayOf` instance using the -/// `As()` method: -/// -/// Napi::TypedArray array = ... -/// if (t.TypedArrayType() == napi_int32_array) { -/// Napi::Int32Array int32Array = t.As(); -/// } -class TypedArray : public Object { - public: - TypedArray(); ///< Creates a new _empty_ TypedArray instance. - TypedArray(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - napi_typedarray_type TypedArrayType() - const; ///< Gets the type of this typed-array. - Napi::ArrayBuffer ArrayBuffer() const; ///< Gets the backing array buffer. - - uint8_t ElementSize() - const; ///< Gets the size in bytes of one element in the array. - size_t ElementLength() const; ///< Gets the number of elements in the array. - size_t ByteOffset() - const; ///< Gets the offset into the buffer where the array starts. - size_t ByteLength() const; ///< Gets the length of the array in bytes. - - protected: - /// !cond INTERNAL - napi_typedarray_type _type; - size_t _length; - - TypedArray(napi_env env, - napi_value value, - napi_typedarray_type type, - size_t length); - - template - static -#if defined(NAPI_HAS_CONSTEXPR) - constexpr -#endif - napi_typedarray_type - TypedArrayTypeForPrimitiveType() { - return std::is_same::value ? napi_int8_array - : std::is_same::value ? napi_uint8_array - : std::is_same::value ? napi_int16_array - : std::is_same::value ? napi_uint16_array - : std::is_same::value ? napi_int32_array - : std::is_same::value ? napi_uint32_array - : std::is_same::value ? napi_float32_array - : std::is_same::value ? napi_float64_array -#if NAPI_VERSION > 5 - : std::is_same::value ? napi_bigint64_array - : std::is_same::value ? napi_biguint64_array -#endif // NAPI_VERSION > 5 - : napi_int8_array; - } - /// !endcond -}; - -/// A JavaScript typed-array value with known array type. -/// -/// Note while it is possible to create and access Uint8 "clamped" arrays using -/// this class, the _clamping_ behavior is only applied in JavaScript. -template -class TypedArrayOf : public TypedArray { - public: - /// Creates a new TypedArray instance over a new automatically-allocated array - /// buffer. - /// - /// The array type parameter can normally be omitted (because it is inferred - /// from the template parameter T), except when creating a "clamped" array: - /// - /// Uint8Array::New(env, length, napi_uint8_clamped_array) - static TypedArrayOf New( - napi_env env, ///< Node-API environment - size_t elementLength, ///< Length of the created array, as a number of - ///< elements -#if defined(NAPI_HAS_CONSTEXPR) - napi_typedarray_type type = - TypedArray::TypedArrayTypeForPrimitiveType() -#else - napi_typedarray_type type -#endif - ///< Type of array, if different from the default array type for the - ///< template parameter T. - ); - - /// Creates a new TypedArray instance over a provided array buffer. - /// - /// The array type parameter can normally be omitted (because it is inferred - /// from the template parameter T), except when creating a "clamped" array: - /// - /// Uint8Array::New(env, length, buffer, 0, napi_uint8_clamped_array) - static TypedArrayOf New( - napi_env env, ///< Node-API environment - size_t elementLength, ///< Length of the created array, as a number of - ///< elements - Napi::ArrayBuffer arrayBuffer, ///< Backing array buffer instance to use - size_t bufferOffset, ///< Offset into the array buffer where the - ///< typed-array starts -#if defined(NAPI_HAS_CONSTEXPR) - napi_typedarray_type type = - TypedArray::TypedArrayTypeForPrimitiveType() -#else - napi_typedarray_type type -#endif - ///< Type of array, if different from the default array type for the - ///< template parameter T. - ); - - TypedArrayOf(); ///< Creates a new _empty_ TypedArrayOf instance. - TypedArrayOf(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - T& operator[](size_t index); ///< Gets or sets an element in the array. - const T& operator[](size_t index) const; ///< Gets an element in the array. - - /// Gets a pointer to the array's backing buffer. - /// - /// This is not necessarily the same as the `ArrayBuffer::Data()` pointer, - /// because the typed-array may have a non-zero `ByteOffset()` into the - /// `ArrayBuffer`. - T* Data(); - - /// Gets a pointer to the array's backing buffer. - /// - /// This is not necessarily the same as the `ArrayBuffer::Data()` pointer, - /// because the typed-array may have a non-zero `ByteOffset()` into the - /// `ArrayBuffer`. - const T* Data() const; - - private: - T* _data; - - TypedArrayOf(napi_env env, - napi_value value, - napi_typedarray_type type, - size_t length, - T* data); -}; - -/// The DataView provides a low-level interface for reading/writing multiple -/// number types in an ArrayBuffer irrespective of the platform's endianness. -class DataView : public Object { - public: - static DataView New(napi_env env, Napi::ArrayBuffer arrayBuffer); - static DataView New(napi_env env, - Napi::ArrayBuffer arrayBuffer, - size_t byteOffset); - static DataView New(napi_env env, - Napi::ArrayBuffer arrayBuffer, - size_t byteOffset, - size_t byteLength); - - DataView(); ///< Creates a new _empty_ DataView instance. - DataView(napi_env env, - napi_value value); ///< Wraps a Node-API value primitive. - - Napi::ArrayBuffer ArrayBuffer() const; ///< Gets the backing array buffer. - size_t ByteOffset() - const; ///< Gets the offset into the buffer where the array starts. - size_t ByteLength() const; ///< Gets the length of the array in bytes. - - void* Data() const; - - float GetFloat32(size_t byteOffset) const; - double GetFloat64(size_t byteOffset) const; - int8_t GetInt8(size_t byteOffset) const; - int16_t GetInt16(size_t byteOffset) const; - int32_t GetInt32(size_t byteOffset) const; - uint8_t GetUint8(size_t byteOffset) const; - uint16_t GetUint16(size_t byteOffset) const; - uint32_t GetUint32(size_t byteOffset) const; - - void SetFloat32(size_t byteOffset, float value) const; - void SetFloat64(size_t byteOffset, double value) const; - void SetInt8(size_t byteOffset, int8_t value) const; - void SetInt16(size_t byteOffset, int16_t value) const; - void SetInt32(size_t byteOffset, int32_t value) const; - void SetUint8(size_t byteOffset, uint8_t value) const; - void SetUint16(size_t byteOffset, uint16_t value) const; - void SetUint32(size_t byteOffset, uint32_t value) const; - - private: - template - T ReadData(size_t byteOffset) const; - - template - void WriteData(size_t byteOffset, T value) const; - - void* _data; - size_t _length; -}; - -class Function : public Object { - public: - using VoidCallback = void (*)(const CallbackInfo& info); - using Callback = Value (*)(const CallbackInfo& info); - - template - static Function New(napi_env env, - const char* utf8name = nullptr, - void* data = nullptr); - - template - static Function New(napi_env env, - const char* utf8name = nullptr, - void* data = nullptr); - - template - static Function New(napi_env env, - const std::string& utf8name, - void* data = nullptr); - - template - static Function New(napi_env env, - const std::string& utf8name, - void* data = nullptr); - - /// Callable must implement operator() accepting a const CallbackInfo& - /// and return either void or Value. - template - static Function New(napi_env env, - Callable cb, - const char* utf8name = nullptr, - void* data = nullptr); - /// Callable must implement operator() accepting a const CallbackInfo& - /// and return either void or Value. - template - static Function New(napi_env env, - Callable cb, - const std::string& utf8name, - void* data = nullptr); - - Function(); - Function(napi_env env, napi_value value); - - MaybeOrValue operator()( - const std::initializer_list& args) const; - - MaybeOrValue Call(const std::initializer_list& args) const; - MaybeOrValue Call(const std::vector& args) const; - MaybeOrValue Call(const std::vector& args) const; - MaybeOrValue Call(size_t argc, const napi_value* args) const; - MaybeOrValue Call(napi_value recv, - const std::initializer_list& args) const; - MaybeOrValue Call(napi_value recv, - const std::vector& args) const; - MaybeOrValue Call(napi_value recv, - const std::vector& args) const; - MaybeOrValue Call(napi_value recv, - size_t argc, - const napi_value* args) const; - - MaybeOrValue MakeCallback( - napi_value recv, - const std::initializer_list& args, - napi_async_context context = nullptr) const; - MaybeOrValue MakeCallback(napi_value recv, - const std::vector& args, - napi_async_context context = nullptr) const; - MaybeOrValue MakeCallback(napi_value recv, - size_t argc, - const napi_value* args, - napi_async_context context = nullptr) const; - - MaybeOrValue New(const std::initializer_list& args) const; - MaybeOrValue New(const std::vector& args) const; - MaybeOrValue New(size_t argc, const napi_value* args) const; -}; - -class Promise : public Object { - public: - class Deferred { - public: - static Deferred New(napi_env env); - Deferred(napi_env env); - - Napi::Promise Promise() const; - Napi::Env Env() const; - - void Resolve(napi_value value) const; - void Reject(napi_value value) const; - - private: - napi_env _env; - napi_deferred _deferred; - napi_value _promise; - }; - - Promise(napi_env env, napi_value value); -}; - -template -class Buffer : public Uint8Array { - public: - static Buffer New(napi_env env, size_t length); - static Buffer New(napi_env env, T* data, size_t length); - - // Finalizer must implement `void operator()(Env env, T* data)`. - template - static Buffer New(napi_env env, - T* data, - size_t length, - Finalizer finalizeCallback); - // Finalizer must implement `void operator()(Env env, T* data, Hint* hint)`. - template - static Buffer New(napi_env env, - T* data, - size_t length, - Finalizer finalizeCallback, - Hint* finalizeHint); - - static Buffer Copy(napi_env env, const T* data, size_t length); - - Buffer(); - Buffer(napi_env env, napi_value value); - size_t Length() const; - T* Data() const; - - private: - mutable size_t _length; - mutable T* _data; - - Buffer(napi_env env, napi_value value, size_t length, T* data); - void EnsureInfo() const; -}; - -/// Holds a counted reference to a value; initially a weak reference unless -/// otherwise specified, may be changed to/from a strong reference by adjusting -/// the refcount. -/// -/// The referenced value is not immediately destroyed when the reference count -/// is zero; it is merely then eligible for garbage-collection if there are no -/// other references to the value. -template -class Reference { - public: - static Reference New(const T& value, uint32_t initialRefcount = 0); - - Reference(); - Reference(napi_env env, napi_ref ref); - ~Reference(); - - // A reference can be moved but cannot be copied. - Reference(Reference&& other); - Reference& operator=(Reference&& other); - NAPI_DISALLOW_ASSIGN(Reference) - - operator napi_ref() const; - bool operator==(const Reference& other) const; - bool operator!=(const Reference& other) const; - - Napi::Env Env() const; - bool IsEmpty() const; - - // Note when getting the value of a Reference it is usually correct to do so - // within a HandleScope so that the value handle gets cleaned up efficiently. - T Value() const; - - uint32_t Ref() const; - uint32_t Unref() const; - void Reset(); - void Reset(const T& value, uint32_t refcount = 0); - - // Call this on a reference that is declared as static data, to prevent its - // destructor from running at program shutdown time, which would attempt to - // reset the reference when the environment is no longer valid. Avoid using - // this if at all possible. If you do need to use static data, MAKE SURE to - // warn your users that your addon is NOT threadsafe. - void SuppressDestruct(); - - protected: - Reference(const Reference&); - - /// !cond INTERNAL - napi_env _env; - napi_ref _ref; - /// !endcond - - private: - bool _suppressDestruct; -}; - -class ObjectReference : public Reference { - public: - ObjectReference(); - ObjectReference(napi_env env, napi_ref ref); - - // A reference can be moved but cannot be copied. - ObjectReference(Reference&& other); - ObjectReference& operator=(Reference&& other); - ObjectReference(ObjectReference&& other); - ObjectReference& operator=(ObjectReference&& other); - NAPI_DISALLOW_ASSIGN(ObjectReference) - - MaybeOrValue Get(const char* utf8name) const; - MaybeOrValue Get(const std::string& utf8name) const; - MaybeOrValue Set(const char* utf8name, napi_value value) const; - MaybeOrValue Set(const char* utf8name, Napi::Value value) const; - MaybeOrValue Set(const char* utf8name, const char* utf8value) const; - MaybeOrValue Set(const char* utf8name, bool boolValue) const; - MaybeOrValue Set(const char* utf8name, double numberValue) const; - MaybeOrValue Set(const std::string& utf8name, napi_value value) const; - MaybeOrValue Set(const std::string& utf8name, Napi::Value value) const; - MaybeOrValue Set(const std::string& utf8name, - std::string& utf8value) const; - MaybeOrValue Set(const std::string& utf8name, bool boolValue) const; - MaybeOrValue Set(const std::string& utf8name, double numberValue) const; - - MaybeOrValue Get(uint32_t index) const; - MaybeOrValue Set(uint32_t index, const napi_value value) const; - MaybeOrValue Set(uint32_t index, const Napi::Value value) const; - MaybeOrValue Set(uint32_t index, const char* utf8value) const; - MaybeOrValue Set(uint32_t index, const std::string& utf8value) const; - MaybeOrValue Set(uint32_t index, bool boolValue) const; - MaybeOrValue Set(uint32_t index, double numberValue) const; - - protected: - ObjectReference(const ObjectReference&); -}; - -class FunctionReference : public Reference { - public: - FunctionReference(); - FunctionReference(napi_env env, napi_ref ref); - - // A reference can be moved but cannot be copied. - FunctionReference(Reference&& other); - FunctionReference& operator=(Reference&& other); - FunctionReference(FunctionReference&& other); - FunctionReference& operator=(FunctionReference&& other); - NAPI_DISALLOW_ASSIGN_COPY(FunctionReference) - - MaybeOrValue operator()( - const std::initializer_list& args) const; - - MaybeOrValue Call( - const std::initializer_list& args) const; - MaybeOrValue Call(const std::vector& args) const; - MaybeOrValue Call( - napi_value recv, const std::initializer_list& args) const; - MaybeOrValue Call(napi_value recv, - const std::vector& args) const; - MaybeOrValue Call(napi_value recv, - size_t argc, - const napi_value* args) const; - - MaybeOrValue MakeCallback( - napi_value recv, - const std::initializer_list& args, - napi_async_context context = nullptr) const; - MaybeOrValue MakeCallback( - napi_value recv, - const std::vector& args, - napi_async_context context = nullptr) const; - MaybeOrValue MakeCallback( - napi_value recv, - size_t argc, - const napi_value* args, - napi_async_context context = nullptr) const; - - MaybeOrValue New(const std::initializer_list& args) const; - MaybeOrValue New(const std::vector& args) const; -}; - -// Shortcuts to creating a new reference with inferred type and refcount = 0. -template -Reference Weak(T value); -ObjectReference Weak(Object value); -FunctionReference Weak(Function value); - -// Shortcuts to creating a new reference with inferred type and refcount = 1. -template -Reference Persistent(T value); -ObjectReference Persistent(Object value); -FunctionReference Persistent(Function value); - -/// A persistent reference to a JavaScript error object. Use of this class -/// depends somewhat on whether C++ exceptions are enabled at compile time. -/// -/// ### Handling Errors With C++ Exceptions -/// -/// If C++ exceptions are enabled, then the `Error` class extends -/// `std::exception` and enables integrated error-handling for C++ exceptions -/// and JavaScript exceptions. -/// -/// If a Node-API call fails without executing any JavaScript code (for -/// example due to an invalid argument), then the Node-API wrapper -/// automatically converts and throws the error as a C++ exception of type -/// `Napi::Error`. Or if a JavaScript function called by C++ code via Node-API -/// throws a JavaScript exception, then the Node-API wrapper automatically -/// converts and throws it as a C++ exception of type `Napi::Error`. -/// -/// If a C++ exception of type `Napi::Error` escapes from a Node-API C++ -/// callback, then the Node-API wrapper automatically converts and throws it -/// as a JavaScript exception. Therefore, catching a C++ exception of type -/// `Napi::Error` prevents a JavaScript exception from being thrown. -/// -/// #### Example 1A - Throwing a C++ exception: -/// -/// Napi::Env env = ... -/// throw Napi::Error::New(env, "Example exception"); -/// -/// Following C++ statements will not be executed. The exception will bubble -/// up as a C++ exception of type `Napi::Error`, until it is either caught -/// while still in C++, or else automatically propataged as a JavaScript -/// exception when the callback returns to JavaScript. -/// -/// #### Example 2A - Propagating a Node-API C++ exception: -/// -/// Napi::Function jsFunctionThatThrows = someObj.As(); -/// Napi::Value result = jsFunctionThatThrows({ arg1, arg2 }); -/// -/// Following C++ statements will not be executed. The exception will bubble -/// up as a C++ exception of type `Napi::Error`, until it is either caught -/// while still in C++, or else automatically propagated as a JavaScript -/// exception when the callback returns to JavaScript. -/// -/// #### Example 3A - Handling a Node-API C++ exception: -/// -/// Napi::Function jsFunctionThatThrows = someObj.As(); -/// Napi::Value result; -/// try { -/// result = jsFunctionThatThrows({ arg1, arg2 }); -/// } catch (const Napi::Error& e) { -/// cerr << "Caught JavaScript exception: " + e.what(); -/// } -/// -/// Since the exception was caught here, it will not be propagated as a -/// JavaScript exception. -/// -/// ### Handling Errors Without C++ Exceptions -/// -/// If C++ exceptions are disabled (by defining `NAPI_DISABLE_CPP_EXCEPTIONS`) -/// then this class does not extend `std::exception`, and APIs in the `Napi` -/// namespace do not throw C++ exceptions when they fail. Instead, they raise -/// _pending_ JavaScript exceptions and return _empty_ `Value`s. Calling code -/// should check `Value::IsEmpty()` before attempting to use a returned value, -/// and may use methods on the `Env` class to check for, get, and clear a -/// pending JavaScript exception. If the pending exception is not cleared, it -/// will be thrown when the native callback returns to JavaScript. -/// -/// #### Example 1B - Throwing a JS exception -/// -/// Napi::Env env = ... -/// Napi::Error::New(env, "Example -/// exception").ThrowAsJavaScriptException(); return; -/// -/// After throwing a JS exception, the code should generally return -/// immediately from the native callback, after performing any necessary -/// cleanup. -/// -/// #### Example 2B - Propagating a Node-API JS exception: -/// -/// Napi::Function jsFunctionThatThrows = someObj.As(); -/// Napi::Value result = jsFunctionThatThrows({ arg1, arg2 }); -/// if (result.IsEmpty()) return; -/// -/// An empty value result from a Node-API call indicates an error occurred, -/// and a JavaScript exception is pending. To let the exception propagate, the -/// code should generally return immediately from the native callback, after -/// performing any necessary cleanup. -/// -/// #### Example 3B - Handling a Node-API JS exception: -/// -/// Napi::Function jsFunctionThatThrows = someObj.As(); -/// Napi::Value result = jsFunctionThatThrows({ arg1, arg2 }); -/// if (result.IsEmpty()) { -/// Napi::Error e = env.GetAndClearPendingException(); -/// cerr << "Caught JavaScript exception: " + e.Message(); -/// } -/// -/// Since the exception was cleared here, it will not be propagated as a -/// JavaScript exception after the native callback returns. -class Error : public ObjectReference -#ifdef NAPI_CPP_EXCEPTIONS - , - public std::exception -#endif // NAPI_CPP_EXCEPTIONS -{ - public: - static Error New(napi_env env); - static Error New(napi_env env, const char* message); - static Error New(napi_env env, const std::string& message); - - static NAPI_NO_RETURN void Fatal(const char* location, const char* message); - - Error(); - Error(napi_env env, napi_value value); - - // An error can be moved or copied. - Error(Error&& other); - Error& operator=(Error&& other); - Error(const Error&); - Error& operator=(const Error&); - - const std::string& Message() const NAPI_NOEXCEPT; - void ThrowAsJavaScriptException() const; - - Object Value() const; - -#ifdef NAPI_CPP_EXCEPTIONS - const char* what() const NAPI_NOEXCEPT override; -#endif // NAPI_CPP_EXCEPTIONS - - protected: - /// !cond INTERNAL - using create_error_fn = napi_status (*)(napi_env envb, - napi_value code, - napi_value msg, - napi_value* result); - - template - static TError New(napi_env env, - const char* message, - size_t length, - create_error_fn create_error); - /// !endcond - - private: - static inline const char* ERROR_WRAP_VALUE() NAPI_NOEXCEPT; - mutable std::string _message; -}; - -class TypeError : public Error { - public: - static TypeError New(napi_env env, const char* message); - static TypeError New(napi_env env, const std::string& message); - - TypeError(); - TypeError(napi_env env, napi_value value); -}; - -class RangeError : public Error { - public: - static RangeError New(napi_env env, const char* message); - static RangeError New(napi_env env, const std::string& message); - - RangeError(); - RangeError(napi_env env, napi_value value); -}; - -class CallbackInfo { - public: - CallbackInfo(napi_env env, napi_callback_info info); - ~CallbackInfo(); - - // Disallow copying to prevent multiple free of _dynamicArgs - NAPI_DISALLOW_ASSIGN_COPY(CallbackInfo) - - Napi::Env Env() const; - Value NewTarget() const; - bool IsConstructCall() const; - size_t Length() const; - const Value operator[](size_t index) const; - Value This() const; - void* Data() const; - void SetData(void* data); - operator napi_callback_info() const; - - private: - const size_t _staticArgCount = 6; - napi_env _env; - napi_callback_info _info; - napi_value _this; - size_t _argc; - napi_value* _argv; - napi_value _staticArgs[6]; - napi_value* _dynamicArgs; - void* _data; -}; - -class PropertyDescriptor { - public: - using GetterCallback = Napi::Value (*)(const Napi::CallbackInfo& info); - using SetterCallback = void (*)(const Napi::CallbackInfo& info); - -#ifndef NODE_ADDON_API_DISABLE_DEPRECATED - template - static PropertyDescriptor Accessor( - const char* utf8name, - Getter getter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - const std::string& utf8name, - Getter getter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - napi_value name, - Getter getter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - Name name, - Getter getter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - const char* utf8name, - Getter getter, - Setter setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - const std::string& utf8name, - Getter getter, - Setter setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - napi_value name, - Getter getter, - Setter setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - Name name, - Getter getter, - Setter setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Function( - const char* utf8name, - Callable cb, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Function( - const std::string& utf8name, - Callable cb, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Function( - napi_value name, - Callable cb, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Function( - Name name, - Callable cb, - napi_property_attributes attributes = napi_default, - void* data = nullptr); -#endif // !NODE_ADDON_API_DISABLE_DEPRECATED - - template - static PropertyDescriptor Accessor( - const char* utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - - template - static PropertyDescriptor Accessor( - const std::string& utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - - template - static PropertyDescriptor Accessor( - Name name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - - template - static PropertyDescriptor Accessor( - const char* utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - - template - static PropertyDescriptor Accessor( - const std::string& utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - - template - static PropertyDescriptor Accessor( - Name name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - - template - static PropertyDescriptor Accessor( - Napi::Env env, - Napi::Object object, - const char* utf8name, - Getter getter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - Napi::Env env, - Napi::Object object, - const std::string& utf8name, - Getter getter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - Napi::Env env, - Napi::Object object, - Name name, - Getter getter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - Napi::Env env, - Napi::Object object, - const char* utf8name, - Getter getter, - Setter setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - Napi::Env env, - Napi::Object object, - const std::string& utf8name, - Getter getter, - Setter setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Accessor( - Napi::Env env, - Napi::Object object, - Name name, - Getter getter, - Setter setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Function( - Napi::Env env, - Napi::Object object, - const char* utf8name, - Callable cb, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Function( - Napi::Env env, - Napi::Object object, - const std::string& utf8name, - Callable cb, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor Function( - Napi::Env env, - Napi::Object object, - Name name, - Callable cb, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor Value( - const char* utf8name, - napi_value value, - napi_property_attributes attributes = napi_default); - static PropertyDescriptor Value( - const std::string& utf8name, - napi_value value, - napi_property_attributes attributes = napi_default); - static PropertyDescriptor Value( - napi_value name, - napi_value value, - napi_property_attributes attributes = napi_default); - static PropertyDescriptor Value( - Name name, - Napi::Value value, - napi_property_attributes attributes = napi_default); - - PropertyDescriptor(napi_property_descriptor desc); - - operator napi_property_descriptor&(); - operator const napi_property_descriptor&() const; - - private: - napi_property_descriptor _desc; -}; - -/// Property descriptor for use with `ObjectWrap::DefineClass()`. -/// -/// This is different from the standalone `PropertyDescriptor` because it is -/// specific to each `ObjectWrap` subclass. This prevents using descriptors -/// from a different class when defining a new class (preventing the callbacks -/// from having incorrect `this` pointers). -template -class ClassPropertyDescriptor { - public: - ClassPropertyDescriptor(napi_property_descriptor desc) : _desc(desc) {} - - operator napi_property_descriptor&() { return _desc; } - operator const napi_property_descriptor&() const { return _desc; } - - private: - napi_property_descriptor _desc; -}; - -template -struct MethodCallbackData { - TCallback callback; - void* data; -}; - -template -struct AccessorCallbackData { - TGetterCallback getterCallback; - TSetterCallback setterCallback; - void* data; -}; - -template -class InstanceWrap { - public: - using InstanceVoidMethodCallback = void (T::*)(const CallbackInfo& info); - using InstanceMethodCallback = Napi::Value (T::*)(const CallbackInfo& info); - using InstanceGetterCallback = Napi::Value (T::*)(const CallbackInfo& info); - using InstanceSetterCallback = void (T::*)(const CallbackInfo& info, - const Napi::Value& value); - - using PropertyDescriptor = ClassPropertyDescriptor; - - static PropertyDescriptor InstanceMethod( - const char* utf8name, - InstanceVoidMethodCallback method, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor InstanceMethod( - const char* utf8name, - InstanceMethodCallback method, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor InstanceMethod( - Symbol name, - InstanceVoidMethodCallback method, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor InstanceMethod( - Symbol name, - InstanceMethodCallback method, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor InstanceMethod( - const char* utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor InstanceMethod( - const char* utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor InstanceMethod( - Symbol name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor InstanceMethod( - Symbol name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor InstanceAccessor( - const char* utf8name, - InstanceGetterCallback getter, - InstanceSetterCallback setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor InstanceAccessor( - Symbol name, - InstanceGetterCallback getter, - InstanceSetterCallback setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor InstanceAccessor( - const char* utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor InstanceAccessor( - Symbol name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor InstanceValue( - const char* utf8name, - Napi::Value value, - napi_property_attributes attributes = napi_default); - static PropertyDescriptor InstanceValue( - Symbol name, - Napi::Value value, - napi_property_attributes attributes = napi_default); - - protected: - static void AttachPropData(napi_env env, - napi_value value, - const napi_property_descriptor* prop); - - private: - using This = InstanceWrap; - - using InstanceVoidMethodCallbackData = - MethodCallbackData; - using InstanceMethodCallbackData = - MethodCallbackData; - using InstanceAccessorCallbackData = - AccessorCallbackData; - - static napi_value InstanceVoidMethodCallbackWrapper(napi_env env, - napi_callback_info info); - static napi_value InstanceMethodCallbackWrapper(napi_env env, - napi_callback_info info); - static napi_value InstanceGetterCallbackWrapper(napi_env env, - napi_callback_info info); - static napi_value InstanceSetterCallbackWrapper(napi_env env, - napi_callback_info info); - - template - static napi_value WrappedMethod(napi_env env, - napi_callback_info info) NAPI_NOEXCEPT; - - template - struct SetterTag {}; - - template - static napi_callback WrapSetter(SetterTag) NAPI_NOEXCEPT { - return &This::WrappedMethod; - } - static napi_callback WrapSetter(SetterTag) NAPI_NOEXCEPT { - return nullptr; - } -}; - -/// Base class to be extended by C++ classes exposed to JavaScript; each C++ -/// class instance gets "wrapped" by a JavaScript object that is managed by this -/// class. -/// -/// At initialization time, the `DefineClass()` method must be used to -/// hook up the accessor and method callbacks. It takes a list of -/// property descriptors, which can be constructed via the various -/// static methods on the base class. -/// -/// #### Example: -/// -/// class Example: public Napi::ObjectWrap { -/// public: -/// static void Initialize(Napi::Env& env, Napi::Object& target) { -/// Napi::Function constructor = DefineClass(env, "Example", { -/// InstanceAccessor<&Example::GetSomething, -/// &Example::SetSomething>("value"), -/// InstanceMethod<&Example::DoSomething>("doSomething"), -/// }); -/// target.Set("Example", constructor); -/// } -/// -/// Example(const Napi::CallbackInfo& info); // Constructor -/// Napi::Value GetSomething(const Napi::CallbackInfo& info); -/// void SetSomething(const Napi::CallbackInfo& info, const Napi::Value& -/// value); Napi::Value DoSomething(const Napi::CallbackInfo& info); -/// } -template -class ObjectWrap : public InstanceWrap, public Reference { - public: - ObjectWrap(const CallbackInfo& callbackInfo); - virtual ~ObjectWrap(); - - static T* Unwrap(Object wrapper); - - // Methods exposed to JavaScript must conform to one of these callback - // signatures. - using StaticVoidMethodCallback = void (*)(const CallbackInfo& info); - using StaticMethodCallback = Napi::Value (*)(const CallbackInfo& info); - using StaticGetterCallback = Napi::Value (*)(const CallbackInfo& info); - using StaticSetterCallback = void (*)(const CallbackInfo& info, - const Napi::Value& value); - - using PropertyDescriptor = ClassPropertyDescriptor; - - static Function DefineClass( - Napi::Env env, - const char* utf8name, - const std::initializer_list& properties, - void* data = nullptr); - static Function DefineClass(Napi::Env env, - const char* utf8name, - const std::vector& properties, - void* data = nullptr); - static PropertyDescriptor StaticMethod( - const char* utf8name, - StaticVoidMethodCallback method, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor StaticMethod( - const char* utf8name, - StaticMethodCallback method, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor StaticMethod( - Symbol name, - StaticVoidMethodCallback method, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor StaticMethod( - Symbol name, - StaticMethodCallback method, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor StaticMethod( - const char* utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor StaticMethod( - Symbol name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor StaticMethod( - const char* utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor StaticMethod( - Symbol name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor StaticAccessor( - const char* utf8name, - StaticGetterCallback getter, - StaticSetterCallback setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor StaticAccessor( - Symbol name, - StaticGetterCallback getter, - StaticSetterCallback setter, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor StaticAccessor( - const char* utf8name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - template - static PropertyDescriptor StaticAccessor( - Symbol name, - napi_property_attributes attributes = napi_default, - void* data = nullptr); - static PropertyDescriptor StaticValue( - const char* utf8name, - Napi::Value value, - napi_property_attributes attributes = napi_default); - static PropertyDescriptor StaticValue( - Symbol name, - Napi::Value value, - napi_property_attributes attributes = napi_default); - static Napi::Value OnCalledAsFunction(const Napi::CallbackInfo& callbackInfo); - virtual void Finalize(Napi::Env env); - - private: - using This = ObjectWrap; - - static napi_value ConstructorCallbackWrapper(napi_env env, - napi_callback_info info); - static napi_value StaticVoidMethodCallbackWrapper(napi_env env, - napi_callback_info info); - static napi_value StaticMethodCallbackWrapper(napi_env env, - napi_callback_info info); - static napi_value StaticGetterCallbackWrapper(napi_env env, - napi_callback_info info); - static napi_value StaticSetterCallbackWrapper(napi_env env, - napi_callback_info info); - static void FinalizeCallback(napi_env env, void* data, void* hint); - static Function DefineClass(Napi::Env env, - const char* utf8name, - const size_t props_count, - const napi_property_descriptor* props, - void* data = nullptr); - - using StaticVoidMethodCallbackData = - MethodCallbackData; - using StaticMethodCallbackData = MethodCallbackData; - - using StaticAccessorCallbackData = - AccessorCallbackData; - - template - static napi_value WrappedMethod(napi_env env, - napi_callback_info info) NAPI_NOEXCEPT; - - template - struct StaticSetterTag {}; - - template - static napi_callback WrapStaticSetter(StaticSetterTag) NAPI_NOEXCEPT { - return &This::WrappedMethod; - } - static napi_callback WrapStaticSetter(StaticSetterTag) - NAPI_NOEXCEPT { - return nullptr; - } - - bool _construction_failed = true; -}; - -class HandleScope { - public: - HandleScope(napi_env env, napi_handle_scope scope); - explicit HandleScope(Napi::Env env); - ~HandleScope(); - - // Disallow copying to prevent double close of napi_handle_scope - NAPI_DISALLOW_ASSIGN_COPY(HandleScope) - - operator napi_handle_scope() const; - - Napi::Env Env() const; - - private: - napi_env _env; - napi_handle_scope _scope; -}; - -class EscapableHandleScope { - public: - EscapableHandleScope(napi_env env, napi_escapable_handle_scope scope); - explicit EscapableHandleScope(Napi::Env env); - ~EscapableHandleScope(); - - // Disallow copying to prevent double close of napi_escapable_handle_scope - NAPI_DISALLOW_ASSIGN_COPY(EscapableHandleScope) - - operator napi_escapable_handle_scope() const; - - Napi::Env Env() const; - Value Escape(napi_value escapee); - - private: - napi_env _env; - napi_escapable_handle_scope _scope; -}; - -#if (NAPI_VERSION > 2) -class CallbackScope { - public: - CallbackScope(napi_env env, napi_callback_scope scope); - CallbackScope(napi_env env, napi_async_context context); - virtual ~CallbackScope(); - - // Disallow copying to prevent double close of napi_callback_scope - NAPI_DISALLOW_ASSIGN_COPY(CallbackScope) - - operator napi_callback_scope() const; - - Napi::Env Env() const; - - private: - napi_env _env; - napi_callback_scope _scope; -}; -#endif - -class AsyncContext { - public: - explicit AsyncContext(napi_env env, const char* resource_name); - explicit AsyncContext(napi_env env, - const char* resource_name, - const Object& resource); - virtual ~AsyncContext(); - - AsyncContext(AsyncContext&& other); - AsyncContext& operator=(AsyncContext&& other); - NAPI_DISALLOW_ASSIGN_COPY(AsyncContext) - - operator napi_async_context() const; - - Napi::Env Env() const; - - private: - napi_env _env; - napi_async_context _context; -}; - -class AsyncWorker { - public: - virtual ~AsyncWorker(); - - // An async worker can be moved but cannot be copied. - AsyncWorker(AsyncWorker&& other); - AsyncWorker& operator=(AsyncWorker&& other); - NAPI_DISALLOW_ASSIGN_COPY(AsyncWorker) - - operator napi_async_work() const; - - Napi::Env Env() const; - - void Queue(); - void Cancel(); - void SuppressDestruct(); - - ObjectReference& Receiver(); - FunctionReference& Callback(); - - virtual void OnExecute(Napi::Env env); - virtual void OnWorkComplete(Napi::Env env, napi_status status); - - protected: - explicit AsyncWorker(const Function& callback); - explicit AsyncWorker(const Function& callback, const char* resource_name); - explicit AsyncWorker(const Function& callback, - const char* resource_name, - const Object& resource); - explicit AsyncWorker(const Object& receiver, const Function& callback); - explicit AsyncWorker(const Object& receiver, - const Function& callback, - const char* resource_name); - explicit AsyncWorker(const Object& receiver, - const Function& callback, - const char* resource_name, - const Object& resource); - - explicit AsyncWorker(Napi::Env env); - explicit AsyncWorker(Napi::Env env, const char* resource_name); - explicit AsyncWorker(Napi::Env env, - const char* resource_name, - const Object& resource); - - virtual void Execute() = 0; - virtual void OnOK(); - virtual void OnError(const Error& e); - virtual void Destroy(); - virtual std::vector GetResult(Napi::Env env); - - void SetError(const std::string& error); - - private: - static inline void OnAsyncWorkExecute(napi_env env, void* asyncworker); - static inline void OnAsyncWorkComplete(napi_env env, - napi_status status, - void* asyncworker); - - napi_env _env; - napi_async_work _work; - ObjectReference _receiver; - FunctionReference _callback; - std::string _error; - bool _suppress_destruct; -}; - -#if (NAPI_VERSION > 3 && !defined(__wasm32__)) -class ThreadSafeFunction { - public: - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - Finalizer finalizeCallback); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - Finalizer finalizeCallback, - FinalizerDataType* data); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - Finalizer finalizeCallback); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - Finalizer finalizeCallback, - FinalizerDataType* data); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback); - - // This API may only be called from the main thread. - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data); - - ThreadSafeFunction(); - ThreadSafeFunction(napi_threadsafe_function tsFunctionValue); - - operator napi_threadsafe_function() const; - - // This API may be called from any thread. - napi_status BlockingCall() const; - - // This API may be called from any thread. - template - napi_status BlockingCall(Callback callback) const; - - // This API may be called from any thread. - template - napi_status BlockingCall(DataType* data, Callback callback) const; - - // This API may be called from any thread. - napi_status NonBlockingCall() const; - - // This API may be called from any thread. - template - napi_status NonBlockingCall(Callback callback) const; - - // This API may be called from any thread. - template - napi_status NonBlockingCall(DataType* data, Callback callback) const; - - // This API may only be called from the main thread. - void Ref(napi_env env) const; - - // This API may only be called from the main thread. - void Unref(napi_env env) const; - - // This API may be called from any thread. - napi_status Acquire() const; - - // This API may be called from any thread. - napi_status Release() const; - - // This API may be called from any thread. - napi_status Abort() const; - - struct ConvertibleContext { - template - operator T*() { - return static_cast(context); - } - void* context; - }; - - // This API may be called from any thread. - ConvertibleContext GetContext() const; - - private: - using CallbackWrapper = std::function; - - template - static ThreadSafeFunction New(napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data, - napi_finalize wrapper); - - napi_status CallInternal(CallbackWrapper* callbackWrapper, - napi_threadsafe_function_call_mode mode) const; - - static void CallJS(napi_env env, - napi_value jsCallback, - void* context, - void* data); - - napi_threadsafe_function _tsfn; -}; - -// A TypedThreadSafeFunction by default has no context (nullptr) and can -// accept any type (void) to its CallJs. -template -class TypedThreadSafeFunction { - public: - // This API may only be called from the main thread. - // Helper function that returns nullptr if running Node-API 5+, otherwise a - // non-empty, no-op Function. This provides the ability to specify at - // compile-time a callback parameter to `New` that safely does no action - // when targeting _any_ Node-API version. -#if NAPI_VERSION > 4 - static std::nullptr_t EmptyFunctionFactory(Napi::Env env); -#else - static Napi::Function EmptyFunctionFactory(Napi::Env env); -#endif - static Napi::Function FunctionOrEmpty(Napi::Env env, - Napi::Function& callback); - -#if NAPI_VERSION > 4 - // This API may only be called from the main thread. - // Creates a new threadsafe function with: - // Callback [missing] Resource [missing] Finalizer [missing] - template - static TypedThreadSafeFunction New( - napi_env env, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context = nullptr); - - // This API may only be called from the main thread. - // Creates a new threadsafe function with: - // Callback [missing] Resource [passed] Finalizer [missing] - template - static TypedThreadSafeFunction New( - napi_env env, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context = nullptr); - - // This API may only be called from the main thread. - // Creates a new threadsafe function with: - // Callback [missing] Resource [missing] Finalizer [passed] - template - static TypedThreadSafeFunction New( - napi_env env, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data = nullptr); - - // This API may only be called from the main thread. - // Creates a new threadsafe function with: - // Callback [missing] Resource [passed] Finalizer [passed] - template - static TypedThreadSafeFunction New( - napi_env env, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data = nullptr); -#endif - - // This API may only be called from the main thread. - // Creates a new threadsafe function with: - // Callback [passed] Resource [missing] Finalizer [missing] - template - static TypedThreadSafeFunction New( - napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context = nullptr); - - // This API may only be called from the main thread. - // Creates a new threadsafe function with: - // Callback [passed] Resource [passed] Finalizer [missing] - template - static TypedThreadSafeFunction New( - napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context = nullptr); - - // This API may only be called from the main thread. - // Creates a new threadsafe function with: - // Callback [passed] Resource [missing] Finalizer [passed] - template - static TypedThreadSafeFunction New( - napi_env env, - const Function& callback, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data = nullptr); - - // This API may only be called from the main thread. - // Creates a new threadsafe function with: - // Callback [passed] Resource [passed] Finalizer [passed] - template - static TypedThreadSafeFunction New( - napi_env env, - CallbackType callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data = nullptr); - - TypedThreadSafeFunction(); - TypedThreadSafeFunction(napi_threadsafe_function tsFunctionValue); - - operator napi_threadsafe_function() const; - - // This API may be called from any thread. - napi_status BlockingCall(DataType* data = nullptr) const; - - // This API may be called from any thread. - napi_status NonBlockingCall(DataType* data = nullptr) const; - - // This API may only be called from the main thread. - void Ref(napi_env env) const; - - // This API may only be called from the main thread. - void Unref(napi_env env) const; - - // This API may be called from any thread. - napi_status Acquire() const; - - // This API may be called from any thread. - napi_status Release() const; - - // This API may be called from any thread. - napi_status Abort() const; - - // This API may be called from any thread. - ContextType* GetContext() const; - - private: - template - static TypedThreadSafeFunction New( - napi_env env, - const Function& callback, - const Object& resource, - ResourceString resourceName, - size_t maxQueueSize, - size_t initialThreadCount, - ContextType* context, - Finalizer finalizeCallback, - FinalizerDataType* data, - napi_finalize wrapper); - - static void CallJsInternal(napi_env env, - napi_value jsCallback, - void* context, - void* data); - - protected: - napi_threadsafe_function _tsfn; -}; -template -class AsyncProgressWorkerBase : public AsyncWorker { - public: - virtual void OnWorkProgress(DataType* data) = 0; - class ThreadSafeData { - public: - ThreadSafeData(AsyncProgressWorkerBase* asyncprogressworker, DataType* data) - : _asyncprogressworker(asyncprogressworker), _data(data) {} - - AsyncProgressWorkerBase* asyncprogressworker() { - return _asyncprogressworker; - }; - DataType* data() { return _data; }; - - private: - AsyncProgressWorkerBase* _asyncprogressworker; - DataType* _data; - }; - void OnWorkComplete(Napi::Env env, napi_status status) override; - - protected: - explicit AsyncProgressWorkerBase(const Object& receiver, - const Function& callback, - const char* resource_name, - const Object& resource, - size_t queue_size = 1); - virtual ~AsyncProgressWorkerBase(); - -// Optional callback of Napi::ThreadSafeFunction only available after -// NAPI_VERSION 4. Refs: https://github.com/nodejs/node/pull/27791 -#if NAPI_VERSION > 4 - explicit AsyncProgressWorkerBase(Napi::Env env, - const char* resource_name, - const Object& resource, - size_t queue_size = 1); -#endif - - static inline void OnAsyncWorkProgress(Napi::Env env, - Napi::Function jsCallback, - void* data); - - napi_status NonBlockingCall(DataType* data); - - private: - ThreadSafeFunction _tsfn; - bool _work_completed = false; - napi_status _complete_status; - static inline void OnThreadSafeFunctionFinalize( - Napi::Env env, void* data, AsyncProgressWorkerBase* context); -}; - -template -class AsyncProgressWorker : public AsyncProgressWorkerBase { - public: - virtual ~AsyncProgressWorker(); - - class ExecutionProgress { - friend class AsyncProgressWorker; - - public: - void Signal() const; - void Send(const T* data, size_t count) const; - - private: - explicit ExecutionProgress(AsyncProgressWorker* worker) : _worker(worker) {} - AsyncProgressWorker* const _worker; - }; - - void OnWorkProgress(void*) override; - - protected: - explicit AsyncProgressWorker(const Function& callback); - explicit AsyncProgressWorker(const Function& callback, - const char* resource_name); - explicit AsyncProgressWorker(const Function& callback, - const char* resource_name, - const Object& resource); - explicit AsyncProgressWorker(const Object& receiver, - const Function& callback); - explicit AsyncProgressWorker(const Object& receiver, - const Function& callback, - const char* resource_name); - explicit AsyncProgressWorker(const Object& receiver, - const Function& callback, - const char* resource_name, - const Object& resource); - -// Optional callback of Napi::ThreadSafeFunction only available after -// NAPI_VERSION 4. Refs: https://github.com/nodejs/node/pull/27791 -#if NAPI_VERSION > 4 - explicit AsyncProgressWorker(Napi::Env env); - explicit AsyncProgressWorker(Napi::Env env, const char* resource_name); - explicit AsyncProgressWorker(Napi::Env env, - const char* resource_name, - const Object& resource); -#endif - virtual void Execute(const ExecutionProgress& progress) = 0; - virtual void OnProgress(const T* data, size_t count) = 0; - - private: - void Execute() override; - void Signal(); - void SendProgress_(const T* data, size_t count); - - std::mutex _mutex; - T* _asyncdata; - size_t _asyncsize; - bool _signaled; -}; - -template -class AsyncProgressQueueWorker - : public AsyncProgressWorkerBase> { - public: - virtual ~AsyncProgressQueueWorker(){}; - - class ExecutionProgress { - friend class AsyncProgressQueueWorker; - - public: - void Signal() const; - void Send(const T* data, size_t count) const; - - private: - explicit ExecutionProgress(AsyncProgressQueueWorker* worker) - : _worker(worker) {} - AsyncProgressQueueWorker* const _worker; - }; - - void OnWorkComplete(Napi::Env env, napi_status status) override; - void OnWorkProgress(std::pair*) override; - - protected: - explicit AsyncProgressQueueWorker(const Function& callback); - explicit AsyncProgressQueueWorker(const Function& callback, - const char* resource_name); - explicit AsyncProgressQueueWorker(const Function& callback, - const char* resource_name, - const Object& resource); - explicit AsyncProgressQueueWorker(const Object& receiver, - const Function& callback); - explicit AsyncProgressQueueWorker(const Object& receiver, - const Function& callback, - const char* resource_name); - explicit AsyncProgressQueueWorker(const Object& receiver, - const Function& callback, - const char* resource_name, - const Object& resource); - -// Optional callback of Napi::ThreadSafeFunction only available after -// NAPI_VERSION 4. Refs: https://github.com/nodejs/node/pull/27791 -#if NAPI_VERSION > 4 - explicit AsyncProgressQueueWorker(Napi::Env env); - explicit AsyncProgressQueueWorker(Napi::Env env, const char* resource_name); - explicit AsyncProgressQueueWorker(Napi::Env env, - const char* resource_name, - const Object& resource); -#endif - virtual void Execute(const ExecutionProgress& progress) = 0; - virtual void OnProgress(const T* data, size_t count) = 0; - - private: - void Execute() override; - void Signal() const; - void SendProgress_(const T* data, size_t count); -}; -#endif // NAPI_VERSION > 3 && !defined(__wasm32__) - -// Memory management. -class MemoryManagement { - public: - static int64_t AdjustExternalMemory(Env env, int64_t change_in_bytes); -}; - -// Version management -class VersionManagement { - public: - static uint32_t GetNapiVersion(Env env); - static const napi_node_version* GetNodeVersion(Env env); -}; - -#if NAPI_VERSION > 5 -template -class Addon : public InstanceWrap { - public: - static inline Object Init(Env env, Object exports); - static T* Unwrap(Object wrapper); - - protected: - using AddonProp = ClassPropertyDescriptor; - void DefineAddon(Object exports, - const std::initializer_list& props); - Napi::Object DefineProperties(Object object, - const std::initializer_list& props); - - private: - Object entry_point_; -}; -#endif // NAPI_VERSION > 5 - -#ifdef NAPI_CPP_CUSTOM_NAMESPACE -} // namespace NAPI_CPP_CUSTOM_NAMESPACE -#endif - -} // namespace Napi - -// Inline implementations of all the above class methods are included here. -#include "napi-inl.h" - -#endif // SRC_NAPI_H_ diff --git a/node_modules/node-addon-api/package.json b/node_modules/node-addon-api/package.json deleted file mode 100644 index 3ec3776e..00000000 --- a/node_modules/node-addon-api/package.json +++ /dev/null @@ -1,456 +0,0 @@ -{ - "bugs": { - "url": "https://github.com/nodejs/node-addon-api/issues" - }, - "contributors": [ - { - "name": "Abhishek Kumar Singh", - "url": "https://github.com/abhi11210646" - }, - { - "name": "Alba Mendez", - "url": "https://github.com/jmendeth" - }, - { - "name": "Alexander Floh", - "url": "https://github.com/alexanderfloh" - }, - { - "name": "Ammar Faizi", - "url": "https://github.com/ammarfaizi2" - }, - { - "name": "András Timár, Dr", - "url": "https://github.com/timarandras" - }, - { - "name": "Andrew Petersen", - "url": "https://github.com/kirbysayshi" - }, - { - "name": "Anisha Rohra", - "url": "https://github.com/anisha-rohra" - }, - { - "name": "Anna Henningsen", - "url": "https://github.com/addaleax" - }, - { - "name": "Arnaud Botella", - "url": "https://github.com/BotellaA" - }, - { - "name": "Arunesh Chandra", - "url": "https://github.com/aruneshchandra" - }, - { - "name": "Azlan Mukhtar", - "url": "https://github.com/azlan" - }, - { - "name": "Ben Berman", - "url": "https://github.com/rivertam" - }, - { - "name": "Benjamin Byholm", - "url": "https://github.com/kkoopa" - }, - { - "name": "Bill Gallafent", - "url": "https://github.com/gallafent" - }, - { - "name": "blagoev", - "url": "https://github.com/blagoev" - }, - { - "name": "Bruce A. MacNaughton", - "url": "https://github.com/bmacnaughton" - }, - { - "name": "Cory Mickelson", - "url": "https://github.com/corymickelson" - }, - { - "name": "Daniel Bevenius", - "url": "https://github.com/danbev" - }, - { - "name": "Dante Calderón", - "url": "https://github.com/dantehemerson" - }, - { - "name": "Darshan Sen", - "url": "https://github.com/RaisinTen" - }, - { - "name": "David Halls", - "url": "https://github.com/davedoesdev" - }, - { - "name": "Deepak Rajamohan", - "url": "https://github.com/deepakrkris" - }, - { - "name": "Dmitry Ashkadov", - "url": "https://github.com/dmitryash" - }, - { - "name": "Dongjin Na", - "url": "https://github.com/nadongguri" - }, - { - "name": "Doni Rubiagatra", - "url": "https://github.com/rubiagatra" - }, - { - "name": "Eric Bickle", - "url": "https://github.com/ebickle" - }, - { - "name": "extremeheat", - "url": "https://github.com/extremeheat" - }, - { - "name": "Feng Yu", - "url": "https://github.com/F3n67u" - }, - { - "name": "Ferdinand Holzer", - "url": "https://github.com/fholzer" - }, - { - "name": "Gabriel Schulhof", - "url": "https://github.com/gabrielschulhof" - }, - { - "name": "Guenter Sandner", - "url": "https://github.com/gms1" - }, - { - "name": "Gus Caplan", - "url": "https://github.com/devsnek" - }, - { - "name": "Helio Frota", - "url": "https://github.com/helio-frota" - }, - { - "name": "Hitesh Kanwathirtha", - "url": "https://github.com/digitalinfinity" - }, - { - "name": "ikokostya", - "url": "https://github.com/ikokostya" - }, - { - "name": "Jack Xia", - "url": "https://github.com/JckXia" - }, - { - "name": "Jake Barnes", - "url": "https://github.com/DuBistKomisch" - }, - { - "name": "Jake Yoon", - "url": "https://github.com/yjaeseok" - }, - { - "name": "Jason Ginchereau", - "url": "https://github.com/jasongin" - }, - { - "name": "Jenny", - "url": "https://github.com/egg-bread" - }, - { - "name": "Jeroen Janssen", - "url": "https://github.com/japj" - }, - { - "name": "Jim Schlight", - "url": "https://github.com/jschlight" - }, - { - "name": "Jinho Bang", - "url": "https://github.com/romandev" - }, - { - "name": "José Expósito", - "url": "https://github.com/JoseExposito" - }, - { - "name": "joshgarde", - "url": "https://github.com/joshgarde" - }, - { - "name": "Julian Mesa", - "url": "https://github.com/julianmesa-gitkraken" - }, - { - "name": "Kasumi Hanazuki", - "url": "https://github.com/hanazuki" - }, - { - "name": "Kelvin", - "url": "https://github.com/kelvinhammond" - }, - { - "name": "Kevin Eady", - "url": "https://github.com/KevinEady" - }, - { - "name": "Kévin VOYER", - "url": "https://github.com/kecsou" - }, - { - "name": "kidneysolo", - "url": "https://github.com/kidneysolo" - }, - { - "name": "Koki Nishihara", - "url": "https://github.com/Nishikoh" - }, - { - "name": "Konstantin Tarkus", - "url": "https://github.com/koistya" - }, - { - "name": "Kyle Farnung", - "url": "https://github.com/kfarnung" - }, - { - "name": "Kyle Kovacs", - "url": "https://github.com/nullromo" - }, - { - "name": "legendecas", - "url": "https://github.com/legendecas" - }, - { - "name": "LongYinan", - "url": "https://github.com/Brooooooklyn" - }, - { - "name": "Lovell Fuller", - "url": "https://github.com/lovell" - }, - { - "name": "Luciano Martorella", - "url": "https://github.com/lmartorella" - }, - { - "name": "mastergberry", - "url": "https://github.com/mastergberry" - }, - { - "name": "Mathias Küsel", - "url": "https://github.com/mathiask88" - }, - { - "name": "Matteo Collina", - "url": "https://github.com/mcollina" - }, - { - "name": "Michael Dawson", - "url": "https://github.com/mhdawson" - }, - { - "name": "Michael Price", - "url": "https://github.com/mikepricedev" - }, - { - "name": "Michele Campus", - "url": "https://github.com/kYroL01" - }, - { - "name": "Mikhail Cheshkov", - "url": "https://github.com/mcheshkov" - }, - { - "name": "nempoBu4", - "url": "https://github.com/nempoBu4" - }, - { - "name": "Nicola Del Gobbo", - "url": "https://github.com/NickNaso" - }, - { - "name": "Nick Soggin", - "url": "https://github.com/iSkore" - }, - { - "name": "Nikolai Vavilov", - "url": "https://github.com/seishun" - }, - { - "name": "Nurbol Alpysbayev", - "url": "https://github.com/anurbol" - }, - { - "name": "pacop", - "url": "https://github.com/pacop" - }, - { - "name": "Peter Šándor", - "url": "https://github.com/petersandor" - }, - { - "name": "Philipp Renoth", - "url": "https://github.com/DaAitch" - }, - { - "name": "rgerd", - "url": "https://github.com/rgerd" - }, - { - "name": "Richard Lau", - "url": "https://github.com/richardlau" - }, - { - "name": "Rolf Timmermans", - "url": "https://github.com/rolftimmermans" - }, - { - "name": "Ross Weir", - "url": "https://github.com/ross-weir" - }, - { - "name": "Ryuichi Okumura", - "url": "https://github.com/okuryu" - }, - { - "name": "Saint Gabriel", - "url": "https://github.com/chineduG" - }, - { - "name": "Sampson Gao", - "url": "https://github.com/sampsongao" - }, - { - "name": "Sam Roberts", - "url": "https://github.com/sam-github" - }, - { - "name": "strager", - "url": "https://github.com/strager" - }, - { - "name": "Taylor Woll", - "url": "https://github.com/boingoing" - }, - { - "name": "Thomas Gentilhomme", - "url": "https://github.com/fraxken" - }, - { - "name": "Tim Rach", - "url": "https://github.com/timrach" - }, - { - "name": "Tobias Nießen", - "url": "https://github.com/tniessen" - }, - { - "name": "todoroff", - "url": "https://github.com/todoroff" - }, - { - "name": "Tux3", - "url": "https://github.com/tux3" - }, - { - "name": "Vlad Velmisov", - "url": "https://github.com/Velmisov" - }, - { - "name": "Vladimir Morozov", - "url": "https://github.com/vmoroz" - - }, - { - "name": "WenheLI", - "url": "https://github.com/WenheLI" - }, - { - "name": "Xuguang Mei", - "url": "https://github.com/meixg" - }, - { - "name": "Yohei Kishimoto", - "url": "https://github.com/morokosi" - }, - { - "name": "Yulong Wang", - "url": "https://github.com/fs-eire" - }, - { - "name": "Ziqiu Zhao", - "url": "https://github.com/ZzqiZQute" - }, - { - "name": "Feng Yu", - "url": "https://github.com/F3n67u" - } - ], - "description": "Node.js API (Node-API)", - "devDependencies": { - "benchmark": "^2.1.4", - "bindings": "^1.5.0", - "clang-format": "^1.4.0", - "eslint": "^7.32.0", - "eslint-config-semistandard": "^16.0.0", - "eslint-config-standard": "^16.0.3", - "eslint-plugin-import": "^2.24.2", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^5.1.0", - "fs-extra": "^9.0.1", - "path": "^0.12.7", - "pre-commit": "^1.2.2", - "safe-buffer": "^5.1.1" - }, - "directories": {}, - "gypfile": false, - "homepage": "https://github.com/nodejs/node-addon-api", - "keywords": [ - "n-api", - "napi", - "addon", - "native", - "bindings", - "c", - "c++", - "nan", - "node-addon-api" - ], - "license": "MIT", - "main": "index.js", - "name": "node-addon-api", - "readme": "README.md", - "repository": { - "type": "git", - "url": "git://github.com/nodejs/node-addon-api.git" - }, - "files": [ - "*.{c,h,gyp,gypi}", - "package-support.json", - "tools/" - ], - "scripts": { - "prebenchmark": "node-gyp rebuild -C benchmark", - "benchmark": "node benchmark", - "pretest": "node-gyp rebuild -C test", - "test": "node test", - "test:debug": "node-gyp rebuild -C test --debug && NODE_API_BUILD_CONFIG=Debug node ./test/index.js", - "predev": "node-gyp rebuild -C test --debug", - "dev": "node test", - "predev:incremental": "node-gyp configure build -C test --debug", - "dev:incremental": "node test", - "doc": "doxygen doc/Doxyfile", - "lint": "node tools/eslint-format && node tools/clang-format", - "lint:fix": "node tools/clang-format --fix && node tools/eslint-format --fix" - }, - "pre-commit": "lint", - "version": "5.1.0", - "support": true -} diff --git a/node_modules/node-addon-api/tools/check-napi.js b/node_modules/node-addon-api/tools/check-napi.js deleted file mode 100644 index 9199af33..00000000 --- a/node_modules/node-addon-api/tools/check-napi.js +++ /dev/null @@ -1,99 +0,0 @@ -'use strict'; -// Descend into a directory structure and, for each file matching *.node, output -// based on the imports found in the file whether it's an N-API module or not. - -const fs = require('fs'); -const path = require('path'); - -// Read the output of the command, break it into lines, and use the reducer to -// decide whether the file is an N-API module or not. -function checkFile (file, command, argv, reducer) { - const child = require('child_process').spawn(command, argv, { - stdio: ['inherit', 'pipe', 'inherit'] - }); - let leftover = ''; - let isNapi; - child.stdout.on('data', (chunk) => { - if (isNapi === undefined) { - chunk = (leftover + chunk.toString()).split(/[\r\n]+/); - leftover = chunk.pop(); - isNapi = chunk.reduce(reducer, isNapi); - if (isNapi !== undefined) { - child.kill(); - } - } - }); - child.on('close', (code, signal) => { - if ((code === null && signal !== null) || (code !== 0)) { - console.log( - command + ' exited with code: ' + code + ' and signal: ' + signal); - } else { - // Green if it's a N-API module, red otherwise. - console.log( - '\x1b[' + (isNapi ? '42' : '41') + 'm' + - (isNapi ? ' N-API' : 'Not N-API') + - '\x1b[0m: ' + file); - } - }); -} - -// Use nm -a to list symbols. -function checkFileUNIX (file) { - checkFile(file, 'nm', ['-a', file], (soFar, line) => { - if (soFar === undefined) { - line = line.match(/([0-9a-f]*)? ([a-zA-Z]) (.*$)/); - if (line[2] === 'U') { - if (/^napi/.test(line[3])) { - soFar = true; - } - } - } - return soFar; - }); -} - -// Use dumpbin /imports to list symbols. -function checkFileWin32 (file) { - checkFile(file, 'dumpbin', ['/imports', file], (soFar, line) => { - if (soFar === undefined) { - line = line.match(/([0-9a-f]*)? +([a-zA-Z0-9]) (.*$)/); - if (line && /^napi/.test(line[line.length - 1])) { - soFar = true; - } - } - return soFar; - }); -} - -// Descend into a directory structure and pass each file ending in '.node' to -// one of the above checks, depending on the OS. -function recurse (top) { - fs.readdir(top, (error, items) => { - if (error) { - throw new Error('error reading directory ' + top + ': ' + error); - } - items.forEach((item) => { - item = path.join(top, item); - fs.stat(item, ((item) => (error, stats) => { - if (error) { - throw new Error('error about ' + item + ': ' + error); - } - if (stats.isDirectory()) { - recurse(item); - } else if (/[.]node$/.test(item) && - // Explicitly ignore files called 'nothing.node' because they are - // artefacts of node-addon-api having identified a version of - // Node.js that ships with a correct implementation of N-API. - path.basename(item) !== 'nothing.node') { - process.platform === 'win32' - ? checkFileWin32(item) - : checkFileUNIX(item); - } - })(item)); - }); - }); -} - -// Start with the directory given on the command line or the current directory -// if nothing was given. -recurse(process.argv.length > 3 ? process.argv[2] : '.'); diff --git a/node_modules/node-addon-api/tools/conversion.js b/node_modules/node-addon-api/tools/conversion.js deleted file mode 100755 index f89245ac..00000000 --- a/node_modules/node-addon-api/tools/conversion.js +++ /dev/null @@ -1,301 +0,0 @@ -#! /usr/bin/env node - -'use strict'; - -const fs = require('fs'); -const path = require('path'); - -const args = process.argv.slice(2); -const dir = args[0]; -if (!dir) { - console.log('Usage: node ' + path.basename(__filename) + ' '); - process.exit(1); -} - -const NodeApiVersion = require('../package.json').version; - -const disable = args[1]; -let ConfigFileOperations; -if (disable !== '--disable' && dir !== '--disable') { - ConfigFileOperations = { - 'package.json': [ - [/([ ]*)"dependencies": {/g, '$1"dependencies": {\n$1 "node-addon-api": "' + NodeApiVersion + '",'], - [/[ ]*"nan": *"[^"]+"(,|)[\n\r]/g, ''] - ], - 'binding.gyp': [ - [/([ ]*)'include_dirs': \[/g, '$1\'include_dirs\': [\n$1 \'\s+(\w+)\s*=\s*Nan::New\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'], - [/Local\s+(\w+)\s*=\s*Nan::New\([\w\d:]+\);\s+(\w+)\.Reset\((\1)\);\s+\1->SetClassName\((Nan::String::New|Nan::New<(v8::)*String>)\("(.+?)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$6", {'], - [/Local\s+(\w+)\s*=\s*Nan::New\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'], - [/Nan::New\(([\w\d:]+)\)->GetFunction\(\)/g, 'Napi::Function::New(env, $1)'], - [/Nan::New\(([\w\d:]+)\)->GetFunction()/g, 'Napi::Function::New(env, $1);'], - [/Nan::New\(([\w\d:]+)\)/g, 'Napi::Function::New(env, $1)'], - [/Nan::New\(([\w\d:]+)\)/g, 'Napi::Function::New(env, $1)'], - - // FunctionTemplate to FunctionReference - [/Nan::Persistent<(v8::)*FunctionTemplate>/g, 'Napi::FunctionReference'], - [/Nan::Persistent<(v8::)*Function>/g, 'Napi::FunctionReference'], - [/v8::Local/g, 'Napi::FunctionReference'], - [/Local/g, 'Napi::FunctionReference'], - [/v8::FunctionTemplate/g, 'Napi::FunctionReference'], - [/FunctionTemplate/g, 'Napi::FunctionReference'], - - [/([ ]*)Nan::SetPrototypeMethod\(\w+, "(\w+)", (\w+)\);/g, '$1InstanceMethod("$2", &$3),'], - [/([ ]*)(?:\w+\.Reset\(\w+\);\s+)?\(target\)\.Set\("(\w+)",\s*Nan::GetFunction\((\w+)\)\);/gm, - '});\n\n' + - '$1constructor = Napi::Persistent($3);\n' + - '$1constructor.SuppressDestruct();\n' + - '$1target.Set("$2", $3);'], - - // TODO: Other attribute combinations - [/static_cast\(ReadOnly\s*\|\s*DontDelete\)/gm, - 'static_cast(napi_enumerable | napi_configurable)'], - - [/([\w\d:<>]+?)::Cast\((.+?)\)/g, '$2.As<$1>()'], - - [/\*Nan::Utf8String\(([^)]+)\)/g, '$1->As().Utf8Value().c_str()'], - [/Nan::Utf8String +(\w+)\(([^)]+)\)/g, 'std::string $1 = $2.As()'], - [/Nan::Utf8String/g, 'std::string'], - - [/v8::String::Utf8Value (.+?)\((.+?)\)/g, 'Napi::String $1(env, $2)'], - [/String::Utf8Value (.+?)\((.+?)\)/g, 'Napi::String $1(env, $2)'], - [/\.length\(\)/g, '.Length()'], - - [/Nan::MakeCallback\(([^,]+),[\s\\]+([^,]+),/gm, '$2.MakeCallback($1,'], - - [/class\s+(\w+)\s*:\s*public\s+Nan::ObjectWrap/g, 'class $1 : public Napi::ObjectWrap<$1>'], - [/(\w+)\(([^)]*)\)\s*:\s*Nan::ObjectWrap\(\)\s*(,)?/gm, '$1($2) : Napi::ObjectWrap<$1>()$3'], - - // HandleOKCallback to OnOK - [/HandleOKCallback/g, 'OnOK'], - // HandleErrorCallback to OnError - [/HandleErrorCallback/g, 'OnError'], - - // ex. .As() to .As() - [/\.As\(\)/g, '.As()'], - [/\.As<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>\(\)/g, '.As()'], - - // ex. Nan::New(info[0]) to Napi::Number::New(info[0]) - [/Nan::New<(v8::)*Integer>\((.+?)\)/g, 'Napi::Number::New(env, $2)'], - [/Nan::New\(([0-9.]+)\)/g, 'Napi::Number::New(env, $1)'], - [/Nan::New<(v8::)*String>\("(.+?)"\)/g, 'Napi::String::New(env, "$2")'], - [/Nan::New\("(.+?)"\)/g, 'Napi::String::New(env, "$1")'], - [/Nan::New<(v8::)*(.+?)>\(\)/g, 'Napi::$2::New(env)'], - [/Nan::New<(.+?)>\(\)/g, 'Napi::$1::New(env)'], - [/Nan::New<(v8::)*(.+?)>\(/g, 'Napi::$2::New(env, '], - [/Nan::New<(.+?)>\(/g, 'Napi::$1::New(env, '], - [/Nan::NewBuffer\(/g, 'Napi::Buffer::New(env, '], - // TODO: Properly handle this - [/Nan::New\(/g, 'Napi::New(env, '], - - [/\.IsInt32\(\)/g, '.IsNumber()'], - [/->IsInt32\(\)/g, '.IsNumber()'], - - [/(.+?)->BooleanValue\(\)/g, '$1.As().Value()'], - [/(.+?)->Int32Value\(\)/g, '$1.As().Int32Value()'], - [/(.+?)->Uint32Value\(\)/g, '$1.As().Uint32Value()'], - [/(.+?)->IntegerValue\(\)/g, '$1.As().Int64Value()'], - [/(.+?)->NumberValue\(\)/g, '$1.As().DoubleValue()'], - - // ex. Nan::To(info[0]) to info[0].Value() - [/Nan::To\((.+?)\)/g, '$2.To()'], - [/Nan::To<(Boolean|String|Number|Object|Array|Symbol|Function)>\((.+?)\)/g, '$2.To()'], - // ex. Nan::To(info[0]) to info[0].As().Value() - [/Nan::To\((.+?)\)/g, '$1.As().Value()'], - // ex. Nan::To(info[0]) to info[0].As().Int32Value() - [/Nan::To\((.+?)\)/g, '$1.As().Int32Value()'], - // ex. Nan::To(info[0]) to info[0].As().Int32Value() - [/Nan::To\((.+?)\)/g, '$1.As().Int32Value()'], - // ex. Nan::To(info[0]) to info[0].As().Uint32Value() - [/Nan::To\((.+?)\)/g, '$1.As().Uint32Value()'], - // ex. Nan::To(info[0]) to info[0].As().Int64Value() - [/Nan::To\((.+?)\)/g, '$1.As().Int64Value()'], - // ex. Nan::To(info[0]) to info[0].As().FloatValue() - [/Nan::To\((.+?)\)/g, '$1.As().FloatValue()'], - // ex. Nan::To(info[0]) to info[0].As().DoubleValue() - [/Nan::To\((.+?)\)/g, '$1.As().DoubleValue()'], - - [/Nan::New\((\w+)\)->HasInstance\((\w+)\)/g, '$2.InstanceOf($1.Value())'], - - [/Nan::Has\(([^,]+),\s*/gm, '($1).Has('], - [/\.Has\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\)/gm, '.Has($1)'], - [/\.Has\([\s|\\]*Nan::New\(([^)]+)\)\)/gm, '.Has($1)'], - - [/Nan::Get\(([^,]+),\s*/gm, '($1).Get('], - [/\.Get\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\)/gm, '.Get($1)'], - [/\.Get\([\s|\\]*Nan::New\(([^)]+)\)\)/gm, '.Get($1)'], - - [/Nan::Set\(([^,]+),\s*/gm, '($1).Set('], - [/\.Set\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\s*,/gm, '.Set($1,'], - [/\.Set\([\s|\\]*Nan::New\(([^)]+)\)\s*,/gm, '.Set($1,'], - - // ex. node::Buffer::HasInstance(info[0]) to info[0].IsBuffer() - [/node::Buffer::HasInstance\((.+?)\)/g, '$1.IsBuffer()'], - // ex. node::Buffer::Length(info[0]) to info[0].Length() - [/node::Buffer::Length\((.+?)\)/g, '$1.As>().Length()'], - // ex. node::Buffer::Data(info[0]) to info[0].Data() - [/node::Buffer::Data\((.+?)\)/g, '$1.As>().Data()'], - [/Nan::CopyBuffer\(/g, 'Napi::Buffer::Copy(env, '], - - // Nan::AsyncQueueWorker(worker) - [/Nan::AsyncQueueWorker\((.+)\);/g, '$1.Queue();'], - [/Nan::(Undefined|Null|True|False)\(\)/g, 'env.$1()'], - - // Nan::ThrowError(error) to Napi::Error::New(env, error).ThrowAsJavaScriptException() - [/([ ]*)return Nan::Throw(\w*?)Error\((.+?)\);/g, '$1Napi::$2Error::New(env, $3).ThrowAsJavaScriptException();\n$1return env.Null();'], - [/Nan::Throw(\w*?)Error\((.+?)\);\n(\s*)return;/g, 'Napi::$1Error::New(env, $2).ThrowAsJavaScriptException();\n$3return env.Null();'], - [/Nan::Throw(\w*?)Error\((.+?)\);/g, 'Napi::$1Error::New(env, $2).ThrowAsJavaScriptException();\n'], - // Nan::RangeError(error) to Napi::RangeError::New(env, error) - [/Nan::(\w*?)Error\((.+)\)/g, 'Napi::$1Error::New(env, $2)'], - - [/Nan::Set\((.+?),\n* *(.+?),\n* *(.+?),\n* *(.+?)\)/g, '$1.Set($2, $3, $4)'], - - [/Nan::(Escapable)?HandleScope\s+(\w+)\s*;/g, 'Napi::$1HandleScope $2(env);'], - [/Nan::(Escapable)?HandleScope/g, 'Napi::$1HandleScope'], - [/Nan::ForceSet\(([^,]+), ?/g, '$1->DefineProperty('], - [/\.ForceSet\(Napi::String::New\(env, "(\w+)"\),\s*?/g, '.DefineProperty("$1", '], - // [ /Nan::GetPropertyNames\(([^,]+)\)/, '$1->GetPropertyNames()' ], - [/Nan::Equals\(([^,]+),/g, '$1.StrictEquals('], - - [/(.+)->Set\(/g, '$1.Set('], - - [/Nan::Callback/g, 'Napi::FunctionReference'], - - [/Nan::Persistent/g, 'Napi::ObjectReference'], - [/Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE target/g, 'Napi::Env& env, Napi::Object& target'], - - [/(\w+)\*\s+(\w+)\s*=\s*Nan::ObjectWrap::Unwrap<\w+>\(info\.This\(\)\);/g, '$1* $2 = this;'], - [/Nan::ObjectWrap::Unwrap<(\w+)>\((.*)\);/g, '$2.Unwrap<$1>();'], - - [/Nan::NAN_METHOD_RETURN_TYPE/g, 'void'], - [/NAN_INLINE/g, 'inline'], - - [/Nan::NAN_METHOD_ARGS_TYPE/g, 'const Napi::CallbackInfo&'], - [/NAN_METHOD\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'], - [/static\s*NAN_GETTER\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'], - [/NAN_GETTER\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'], - [/static\s*NAN_SETTER\(([\w\d:]+?)\)/g, 'void $1(const Napi::CallbackInfo& info, const Napi::Value& value)'], - [/NAN_SETTER\(([\w\d:]+?)\)/g, 'void $1(const Napi::CallbackInfo& info, const Napi::Value& value)'], - [/void Init\((v8::)*Local<(v8::)*Object> exports\)/g, 'Napi::Object Init(Napi::Env env, Napi::Object exports)'], - [/NAN_MODULE_INIT\(([\w\d:]+?)\);/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports);'], - [/NAN_MODULE_INIT\(([\w\d:]+?)\)/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports)'], - - [/::(Init(?:ialize)?)\(target\)/g, '::$1(env, target, module)'], - [/constructor_template/g, 'constructor'], - - [/Nan::FunctionCallbackInfo<(v8::)?Value>[ ]*& [ ]*info\)[ ]*{\n*([ ]*)/gm, 'Napi::CallbackInfo& info) {\n$2Napi::Env env = info.Env();\n$2'], - [/Nan::FunctionCallbackInfo<(v8::)*Value>\s*&\s*info\);/g, 'Napi::CallbackInfo& info);'], - [/Nan::FunctionCallbackInfo<(v8::)*Value>\s*&/g, 'Napi::CallbackInfo&'], - - [/Buffer::HasInstance\(([^)]+)\)/g, '$1.IsBuffer()'], - - [/info\[(\d+)\]->/g, 'info[$1].'], - [/info\[([\w\d]+)\]->/g, 'info[$1].'], - [/info\.This\(\)->/g, 'info.This().'], - [/->Is(Object|String|Int32|Number)\(\)/g, '.Is$1()'], - [/info.GetReturnValue\(\).SetUndefined\(\)/g, 'return env.Undefined()'], - [/info\.GetReturnValue\(\)\.Set\(((\n|.)+?)\);/g, 'return $1;'], - - // ex. Local to Napi::Value - [/v8::Local/g, 'Napi::$1'], - [/Local<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>/g, 'Napi::$1'], - - // Declare an env in helper functions that take a Napi::Value - [/(\w+)\(Napi::Value (\w+)(,\s*[^()]+)?\)\s*{\n*([ ]*)/gm, '$1(Napi::Value $2$3) {\n$4Napi::Env env = $2.Env();\n$4'], - - // delete #include and/or - [/#include +(<|")(?:node|nan).h("|>)/g, '#include $1napi.h$2\n#include $1uv.h$2'], - // NODE_MODULE to NODE_API_MODULE - [/NODE_MODULE/g, 'NODE_API_MODULE'], - [/Nan::/g, 'Napi::'], - [/nan.h/g, 'napi.h'], - - // delete .FromJust() - [/\.FromJust\(\)/g, ''], - // delete .ToLocalCheck() - [/\.ToLocalChecked\(\)/g, ''], - [/^.*->SetInternalFieldCount\(.*$/gm, ''], - - // replace using node; and/or using v8; to using Napi; - [/using (node|v8);/g, 'using Napi;'], - [/using namespace (node|Nan|v8);/g, 'using namespace Napi;'], - // delete using v8::Local; - [/using v8::Local;\n/g, ''], - // replace using v8::XXX; with using Napi::XXX - [/using v8::([A-Za-z]+);/g, 'using Napi::$1;'] - -]; - -const paths = listFiles(dir); -paths.forEach(function (dirEntry) { - const filename = dirEntry.split('\\').pop().split('/').pop(); - - // Check whether the file is a source file or a config file - // then execute function accordingly - const sourcePattern = /.+\.h|.+\.cc|.+\.cpp/; - if (sourcePattern.test(filename)) { - convertFile(dirEntry, SourceFileOperations); - } else if (ConfigFileOperations[filename] != null) { - convertFile(dirEntry, ConfigFileOperations[filename]); - } -}); - -function listFiles (dir, filelist) { - const files = fs.readdirSync(dir); - filelist = filelist || []; - files.forEach(function (file) { - if (file === 'node_modules') { - return; - } - - if (fs.statSync(path.join(dir, file)).isDirectory()) { - filelist = listFiles(path.join(dir, file), filelist); - } else { - filelist.push(path.join(dir, file)); - } - }); - return filelist; -} - -function convert (content, operations) { - for (let i = 0; i < operations.length; i++) { - const operation = operations[i]; - content = content.replace(operation[0], operation[1]); - } - return content; -} - -function convertFile (fileName, operations) { - fs.readFile(fileName, 'utf-8', function (err, file) { - if (err) throw err; - - file = convert(file, operations); - - fs.writeFile(fileName, file, function (err) { - if (err) throw err; - }); - }); -} diff --git a/node_modules/node-addon-api/tools/eslint-format.js b/node_modules/node-addon-api/tools/eslint-format.js deleted file mode 100644 index 1dda4449..00000000 --- a/node_modules/node-addon-api/tools/eslint-format.js +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env node - -const spawn = require('child_process').spawnSync; - -const filesToCheck = '*.js'; -const FORMAT_START = process.env.FORMAT_START || 'main'; -const IS_WIN = process.platform === 'win32'; -const ESLINT_PATH = IS_WIN ? 'node_modules\\.bin\\eslint.cmd' : 'node_modules/.bin/eslint'; - -function main (args) { - let fix = false; - while (args.length > 0) { - switch (args[0]) { - case '-f': - case '--fix': - fix = true; - break; - default: - } - args.shift(); - } - - // Check js files that change on unstaged file - const fileUnStaged = spawn( - 'git', - ['diff', '--name-only', FORMAT_START, filesToCheck], - { - encoding: 'utf-8' - } - ); - - // Check js files that change on staged file - const fileStaged = spawn( - 'git', - ['diff', '--name-only', '--cached', FORMAT_START, filesToCheck], - { - encoding: 'utf-8' - } - ); - - const options = [ - ...fileStaged.stdout.split('\n').filter((f) => f !== ''), - ...fileUnStaged.stdout.split('\n').filter((f) => f !== '') - ]; - - if (fix) { - options.push('--fix'); - } - - const result = spawn(ESLINT_PATH, [...options], { - encoding: 'utf-8' - }); - - if (result.error && result.error.errno === 'ENOENT') { - console.error('Eslint not found! Eslint is supposed to be found at ', ESLINT_PATH); - return 2; - } - - if (result.status === 1) { - console.error('Eslint error:', result.stdout); - const fixCmd = 'npm run lint:fix'; - console.error(`ERROR: please run "${fixCmd}" to format changes in your commit - Note that when running the command locally, please keep your local - main branch and working branch up to date with nodejs/node-addon-api - to exclude un-related complains. - Or you can run "env FORMAT_START=upstream/main ${fixCmd}". - Also fix JS files by yourself if necessary.`); - return 1; - } - - if (result.stderr) { - console.error('Error running eslint:', result.stderr); - return 2; - } -} - -if (require.main === module) { - process.exitCode = main(process.argv.slice(2)); -} diff --git a/node_modules/tar/README.md b/node_modules/tar/README.md deleted file mode 100644 index f620568e..00000000 --- a/node_modules/tar/README.md +++ /dev/null @@ -1,1080 +0,0 @@ -# node-tar - -Fast and full-featured Tar for Node.js - -The API is designed to mimic the behavior of `tar(1)` on unix systems. -If you are familiar with how tar works, most of this will hopefully be -straightforward for you. If not, then hopefully this module can teach -you useful unix skills that may come in handy someday :) - -## Background - -A "tar file" or "tarball" is an archive of file system entries -(directories, files, links, etc.) The name comes from "tape archive". -If you run `man tar` on almost any Unix command line, you'll learn -quite a bit about what it can do, and its history. - -Tar has 5 main top-level commands: - -* `c` Create an archive -* `r` Replace entries within an archive -* `u` Update entries within an archive (ie, replace if they're newer) -* `t` List out the contents of an archive -* `x` Extract an archive to disk - -The other flags and options modify how this top level function works. - -## High-Level API - -These 5 functions are the high-level API. All of them have a -single-character name (for unix nerds familiar with `tar(1)`) as well -as a long name (for everyone else). - -All the high-level functions take the following arguments, all three -of which are optional and may be omitted. - -1. `options` - An optional object specifying various options -2. `paths` - An array of paths to add or extract -3. `callback` - Called when the command is completed, if async. (If - sync or no file specified, providing a callback throws a - `TypeError`.) - -If the command is sync (ie, if `options.sync=true`), then the -callback is not allowed, since the action will be completed immediately. - -If a `file` argument is specified, and the command is async, then a -`Promise` is returned. In this case, if async, a callback may be -provided which is called when the command is completed. - -If a `file` option is not specified, then a stream is returned. For -`create`, this is a readable stream of the generated archive. For -`list` and `extract` this is a writable stream that an archive should -be written into. If a file is not specified, then a callback is not -allowed, because you're already getting a stream to work with. - -`replace` and `update` only work on existing archives, and so require -a `file` argument. - -Sync commands without a file argument return a stream that acts on its -input immediately in the same tick. For readable streams, this means -that all of the data is immediately available by calling -`stream.read()`. For writable streams, it will be acted upon as soon -as it is provided, but this can be at any time. - -### Warnings and Errors - -Tar emits warnings and errors for recoverable and unrecoverable situations, -respectively. In many cases, a warning only affects a single entry in an -archive, or is simply informing you that it's modifying an entry to comply -with the settings provided. - -Unrecoverable warnings will always raise an error (ie, emit `'error'` on -streaming actions, throw for non-streaming sync actions, reject the -returned Promise for non-streaming async operations, or call a provided -callback with an `Error` as the first argument). Recoverable errors will -raise an error only if `strict: true` is set in the options. - -Respond to (recoverable) warnings by listening to the `warn` event. -Handlers receive 3 arguments: - -- `code` String. One of the error codes below. This may not match - `data.code`, which preserves the original error code from fs and zlib. -- `message` String. More details about the error. -- `data` Metadata about the error. An `Error` object for errors raised by - fs and zlib. All fields are attached to errors raisd by tar. Typically - contains the following fields, as relevant: - - `tarCode` The tar error code. - - `code` Either the tar error code, or the error code set by the - underlying system. - - `file` The archive file being read or written. - - `cwd` Working directory for creation and extraction operations. - - `entry` The entry object (if it could be created) for `TAR_ENTRY_INFO`, - `TAR_ENTRY_INVALID`, and `TAR_ENTRY_ERROR` warnings. - - `header` The header object (if it could be created, and the entry could - not be created) for `TAR_ENTRY_INFO` and `TAR_ENTRY_INVALID` warnings. - - `recoverable` Boolean. If `false`, then the warning will emit an - `error`, even in non-strict mode. - -#### Error Codes - -* `TAR_ENTRY_INFO` An informative error indicating that an entry is being - modified, but otherwise processed normally. For example, removing `/` or - `C:\` from absolute paths if `preservePaths` is not set. - -* `TAR_ENTRY_INVALID` An indication that a given entry is not a valid tar - archive entry, and will be skipped. This occurs when: - - a checksum fails, - - a `linkpath` is missing for a link type, or - - a `linkpath` is provided for a non-link type. - - If every entry in a parsed archive raises an `TAR_ENTRY_INVALID` error, - then the archive is presumed to be unrecoverably broken, and - `TAR_BAD_ARCHIVE` will be raised. - -* `TAR_ENTRY_ERROR` The entry appears to be a valid tar archive entry, but - encountered an error which prevented it from being unpacked. This occurs - when: - - an unrecoverable fs error happens during unpacking, - - an entry is trying to extract into an excessively deep - location (by default, limited to 1024 subfolders), - - an entry has `..` in the path and `preservePaths` is not set, or - - an entry is extracting through a symbolic link, when `preservePaths` is - not set. - -* `TAR_ENTRY_UNSUPPORTED` An indication that a given entry is - a valid archive entry, but of a type that is unsupported, and so will be - skipped in archive creation or extracting. - -* `TAR_ABORT` When parsing gzipped-encoded archives, the parser will - abort the parse process raise a warning for any zlib errors encountered. - Aborts are considered unrecoverable for both parsing and unpacking. - -* `TAR_BAD_ARCHIVE` The archive file is totally hosed. This can happen for - a number of reasons, and always occurs at the end of a parse or extract: - - - An entry body was truncated before seeing the full number of bytes. - - The archive contained only invalid entries, indicating that it is - likely not an archive, or at least, not an archive this library can - parse. - - `TAR_BAD_ARCHIVE` is considered informative for parse operations, but - unrecoverable for extraction. Note that, if encountered at the end of an - extraction, tar WILL still have extracted as much it could from the - archive, so there may be some garbage files to clean up. - -Errors that occur deeper in the system (ie, either the filesystem or zlib) -will have their error codes left intact, and a `tarCode` matching one of -the above will be added to the warning metadata or the raised error object. - -Errors generated by tar will have one of the above codes set as the -`error.code` field as well, but since errors originating in zlib or fs will -have their original codes, it's better to read `error.tarCode` if you wish -to see how tar is handling the issue. - -### Examples - -The API mimics the `tar(1)` command line functionality, with aliases -for more human-readable option and function names. The goal is that -if you know how to use `tar(1)` in Unix, then you know how to use -`require('tar')` in JavaScript. - -To replicate `tar czf my-tarball.tgz files and folders`, you'd do: - -```js -tar.c( - { - gzip: , - file: 'my-tarball.tgz' - }, - ['some', 'files', 'and', 'folders'] -).then(_ => { .. tarball has been created .. }) -``` - -To replicate `tar cz files and folders > my-tarball.tgz`, you'd do: - -```js -tar.c( // or tar.create - { - gzip: - }, - ['some', 'files', 'and', 'folders'] -).pipe(fs.createWriteStream('my-tarball.tgz')) -``` - -To replicate `tar xf my-tarball.tgz` you'd do: - -```js -tar.x( // or tar.extract( - { - file: 'my-tarball.tgz' - } -).then(_=> { .. tarball has been dumped in cwd .. }) -``` - -To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`: - -```js -fs.createReadStream('my-tarball.tgz').pipe( - tar.x({ - strip: 1, - C: 'some-dir' // alias for cwd:'some-dir', also ok - }) -) -``` - -To replicate `tar tf my-tarball.tgz`, do this: - -```js -tar.t({ - file: 'my-tarball.tgz', - onentry: entry => { .. do whatever with it .. } -}) -``` - -For example, to just get the list of filenames from an archive: - -```js -const getEntryFilenames = async tarballFilename => { - const filenames = [] - await tar.t({ - file: tarballFilename, - onentry: entry => filenames.push(entry.path), - }) - return filenames -} -``` - -To replicate `cat my-tarball.tgz | tar t` do: - -```js -fs.createReadStream('my-tarball.tgz') - .pipe(tar.t()) - .on('entry', entry => { .. do whatever with it .. }) -``` - -To do anything synchronous, add `sync: true` to the options. Note -that sync functions don't take a callback and don't return a promise. -When the function returns, it's already done. Sync methods without a -file argument return a sync stream, which flushes immediately. But, -of course, it still won't be done until you `.end()` it. - -```js -const getEntryFilenamesSync = tarballFilename => { - const filenames = [] - tar.t({ - file: tarballFilename, - onentry: entry => filenames.push(entry.path), - sync: true, - }) - return filenames -} -``` - -To filter entries, add `filter: ` to the options. -Tar-creating methods call the filter with `filter(path, stat)`. -Tar-reading methods (including extraction) call the filter with -`filter(path, entry)`. The filter is called in the `this`-context of -the `Pack` or `Unpack` stream object. - -The arguments list to `tar t` and `tar x` specify a list of filenames -to extract or list, so they're equivalent to a filter that tests if -the file is in the list. - -For those who _aren't_ fans of tar's single-character command names: - -``` -tar.c === tar.create -tar.r === tar.replace (appends to archive, file is required) -tar.u === tar.update (appends if newer, file is required) -tar.x === tar.extract -tar.t === tar.list -``` - -Keep reading for all the command descriptions and options, as well as -the low-level API that they are built on. - -### tar.c(options, fileList, callback) [alias: tar.create] - -Create a tarball archive. - -The `fileList` is an array of paths to add to the tarball. Adding a -directory also adds its children recursively. - -An entry in `fileList` that starts with an `@` symbol is a tar archive -whose entries will be added. To add a file that starts with `@`, -prepend it with `./`. - -The following options are supported: - -- `file` Write the tarball archive to the specified filename. If this - is specified, then the callback will be fired when the file has been - written, and a promise will be returned that resolves when the file - is written. If a filename is not specified, then a Readable Stream - will be returned which will emit the file data. [Alias: `f`] -- `sync` Act synchronously. If this is set, then any provided file - will be fully written after the call to `tar.c`. If this is set, - and a file is not provided, then the resulting stream will already - have the data ready to `read` or `emit('data')` as soon as you - request it. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. -- `cwd` The current working directory for creating the archive. - Defaults to `process.cwd()`. [Alias: `C`] -- `prefix` A path portion to prefix onto the entries in the archive. -- `gzip` Set to any truthy value to create a gzipped archive, or an - object with settings for `zlib.Gzip()` [Alias: `z`] -- `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, - or `false` to omit it. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. [Alias: `P`] -- `mode` The mode to set on the created file archive -- `noDirRecurse` Do not recursively archive the contents of - directories. [Alias: `n`] -- `follow` Set to true to pack the targets of symbolic links. Without - this option, symbolic links are archived as such. [Alias: `L`, `h`] -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - [Alias: `m`, `no-mtime`] -- `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. - -The following options are mostly internal, but can be modified in some -advanced use cases, such as re-using caches between runs. - -- `linkCache` A Map object containing the device and inode value for - any file whose nlink is > 1, to identify hard links. -- `statCache` A Map object that caches calls `lstat`. -- `readdirCache` A Map object that caches calls to `readdir`. -- `jobs` A number specifying how many concurrent jobs to run. - Defaults to 4. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. - -### tar.x(options, fileList, callback) [alias: tar.extract] - -Extract a tarball archive. - -The `fileList` is an array of paths to extract from the tarball. If -no paths are provided, then all the entries are extracted. - -If the archive is gzipped, then tar will detect this and unzip it. - -Note that all directories that are created will be forced to be -writable, readable, and listable by their owner, to avoid cases where -a directory prevents extraction of child entries by virtue of its -mode. - -Most extraction errors will cause a `warn` event to be emitted. If -the `cwd` is missing, or not a directory, then the extraction will -fail completely. - -The following options are supported: - -- `cwd` Extract files relative to the specified directory. Defaults - to `process.cwd()`. If provided, this must exist and must be a - directory. [Alias: `C`] -- `file` The archive file to extract. If not specified, then a - Writable stream is returned where the archive data should be - written. [Alias: `f`] -- `sync` Create files and directories synchronously. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `filter` A function that gets called with `(path, entry)` for each - entry being unpacked. Return `true` to unpack the entry from the - archive, or `false` to skip it. -- `newer` Set to true to keep the existing file on disk if it's newer - than the file in the archive. [Alias: `keep-newer`, - `keep-newer-files`] -- `keep` Do not overwrite existing files. In particular, if a file - appears more than once in an archive, later copies will not - overwrite earlier copies. [Alias: `k`, `keep-existing`] -- `preservePaths` Allow absolute paths, paths containing `..`, and - extracting through symbolic links. By default, `/` is stripped from - absolute paths, `..` paths are not extracted, and any file whose - location would be modified by a symbolic link is not extracted. - [Alias: `P`] -- `unlink` Unlink files before creating them. Without this option, - tar overwrites existing files, which preserves existing hardlinks. - With this option, existing hardlinks will be broken, as will any - symlink that would affect the location of an extracted file. [Alias: - `U`] -- `strip` Remove the specified number of leading path elements. - Pathnames with fewer elements will be silently skipped. Note that - the pathname is edited after applying the filter, but before - security checks. [Alias: `strip-components`, `stripComponents`] -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `preserveOwner` If true, tar will set the `uid` and `gid` of - extracted entries to the `uid` and `gid` fields in the archive. - This defaults to true when run as root, and false otherwise. If - false, then files and directories will be set with the owner and - group of the user running the process. This is similar to `-p` in - `tar(1)`, but ACLs and other system-specific data is never unpacked - in this implementation, and modes are set by default already. - [Alias: `p`] -- `uid` Set to a number to force ownership of all extracted files and - folders, and all implicitly created directories, to be owned by the - specified user id, regardless of the `uid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a - `gid` option. -- `gid` Set to a number to force ownership of all extracted files and - folders, and all implicitly created directories, to be owned by the - specified group id, regardless of the `gid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a - `uid` option. -- `noMtime` Set to true to omit writing `mtime` value for extracted - entries. [Alias: `m`, `no-mtime`] -- `transform` Provide a function that takes an `entry` object, and - returns a stream, or any falsey value. If a stream is provided, - then that stream's data will be written instead of the contents of - the archive entry. If a falsey value is provided, then the entry is - written to disk as normal. (To exclude items from extraction, use - the `filter` option described above.) -- `onentry` A function that gets called with `(entry)` for each entry - that passes the filter. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the - extracted file matches the entry mode. This also suppresses the call to - `process.umask()` to determine the default umask value, since tar will - extract with whatever mode is provided, and let the process `umask` apply - normally. -- `maxDepth` The maximum depth of subfolders to extract into. This - defaults to 1024. Anything deeper than the limit will raise a - warning and skip the entry. Set to `Infinity` to remove the - limitation. - -The following options are mostly internal, but can be modified in some -advanced use cases, such as re-using caches between runs. - -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `umask` Filter the modes of entries like `process.umask()`. -- `dmode` Default mode for directories -- `fmode` Default mode for files -- `dirCache` A Map object of which directories exist. -- `maxMetaEntrySize` The maximum size of meta entries that is - supported. Defaults to 1 MB. - -Note that using an asynchronous stream type with the `transform` -option will cause undefined behavior in sync extractions. -[MiniPass](http://npm.im/minipass)-based streams are designed for this -use case. - -### tar.t(options, fileList, callback) [alias: tar.list] - -List the contents of a tarball archive. - -The `fileList` is an array of paths to list from the tarball. If -no paths are provided, then all the entries are listed. - -If the archive is gzipped, then tar will detect this and unzip it. - -If the `file` option is _not_ provided, then returns an event emitter that -emits `entry` events with `tar.ReadEntry` objects. However, they don't -emit `'data'` or `'end'` events. (If you want to get actual readable -entries, use the `tar.Parse` class instead.) - -If a `file` option _is_ provided, then the return value will be a promise -that resolves when the file has been fully traversed in async mode, or -`undefined` if `sync: true` is set. Thus, you _must_ specify an `onentry` -method in order to do anything useful with the data it parses. - -The following options are supported: - -- `file` The archive file to list. If not specified, then a - Writable stream is returned where the archive data should be - written. [Alias: `f`] -- `sync` Read the specified file synchronously. (This has no effect - when a file option isn't specified, because entries are emitted as - fast as they are parsed from the stream anyway.) -- `strict` Treat warnings as crash-worthy errors. Default false. -- `filter` A function that gets called with `(path, entry)` for each - entry being listed. Return `true` to emit the entry from the - archive, or `false` to skip it. -- `onentry` A function that gets called with `(entry)` for each entry - that passes the filter. This is important for when `file` is set, - because there is no other way to do anything useful with this method. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `noResume` By default, `entry` streams are resumed immediately after - the call to `onentry`. Set `noResume: true` to suppress this - behavior. Note that by opting into this, the stream will never - complete until the entry data is consumed. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") - -### tar.u(options, fileList, callback) [alias: tar.update] - -Add files to an archive if they are newer than the entry already in -the tarball archive. - -The `fileList` is an array of paths to add to the tarball. Adding a -directory also adds its children recursively. - -An entry in `fileList` that starts with an `@` symbol is a tar archive -whose entries will be added. To add a file that starts with `@`, -prepend it with `./`. - -The following options are supported: - -- `file` Required. Write the tarball archive to the specified - filename. [Alias: `f`] -- `sync` Act synchronously. If this is set, then any provided file - will be fully written after the call to `tar.c`. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. -- `cwd` The current working directory for adding entries to the - archive. Defaults to `process.cwd()`. [Alias: `C`] -- `prefix` A path portion to prefix onto the entries in the archive. -- `gzip` Set to any truthy value to create a gzipped archive, or an - object with settings for `zlib.Gzip()` [Alias: `z`] -- `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, - or `false` to omit it. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. [Alias: `P`] -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `noDirRecurse` Do not recursively archive the contents of - directories. [Alias: `n`] -- `follow` Set to true to pack the targets of symbolic links. Without - this option, symbolic links are archived as such. [Alias: `L`, `h`] -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - [Alias: `m`, `no-mtime`] -- `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. - -### tar.r(options, fileList, callback) [alias: tar.replace] - -Add files to an existing archive. Because later entries override -earlier entries, this effectively replaces any existing entries. - -The `fileList` is an array of paths to add to the tarball. Adding a -directory also adds its children recursively. - -An entry in `fileList` that starts with an `@` symbol is a tar archive -whose entries will be added. To add a file that starts with `@`, -prepend it with `./`. - -The following options are supported: - -- `file` Required. Write the tarball archive to the specified - filename. [Alias: `f`] -- `sync` Act synchronously. If this is set, then any provided file - will be fully written after the call to `tar.c`. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. -- `cwd` The current working directory for adding entries to the - archive. Defaults to `process.cwd()`. [Alias: `C`] -- `prefix` A path portion to prefix onto the entries in the archive. -- `gzip` Set to any truthy value to create a gzipped archive, or an - object with settings for `zlib.Gzip()` [Alias: `z`] -- `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, - or `false` to omit it. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. [Alias: `P`] -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `noDirRecurse` Do not recursively archive the contents of - directories. [Alias: `n`] -- `follow` Set to true to pack the targets of symbolic links. Without - this option, symbolic links are archived as such. [Alias: `L`, `h`] -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - [Alias: `m`, `no-mtime`] -- `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. - - -## Low-Level API - -### class tar.Pack - -A readable tar stream. - -Has all the standard readable stream interface stuff. `'data'` and -`'end'` events, `read()` method, `pause()` and `resume()`, etc. - -#### constructor(options) - -The following options are supported: - -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `strict` Treat warnings as crash-worthy errors. Default false. -- `cwd` The current working directory for creating the archive. - Defaults to `process.cwd()`. -- `prefix` A path portion to prefix onto the entries in the archive. -- `gzip` Set to any truthy value to create a gzipped archive, or an - object with settings for `zlib.Gzip()` -- `filter` A function that gets called with `(path, stat)` for each - entry being added. Return `true` to add the entry to the archive, - or `false` to omit it. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. -- `linkCache` A Map object containing the device and inode value for - any file whose nlink is > 1, to identify hard links. -- `statCache` A Map object that caches calls `lstat`. -- `readdirCache` A Map object that caches calls to `readdir`. -- `jobs` A number specifying how many concurrent jobs to run. - Defaults to 4. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 16 MB. -- `noDirRecurse` Do not recursively archive the contents of - directories. -- `follow` Set to true to pack the targets of symbolic links. Without - this option, symbolic links are archived as such. -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. -- `mtime` Set to a `Date` object to force a specific `mtime` for - everything added to the archive. Overridden by `noMtime`. - -#### add(path) - -Adds an entry to the archive. Returns the Pack stream. - -#### write(path) - -Adds an entry to the archive. Returns true if flushed. - -#### end() - -Finishes the archive. - -### class tar.Pack.Sync - -Synchronous version of `tar.Pack`. - -### class tar.Unpack - -A writable stream that unpacks a tar archive onto the file system. - -All the normal writable stream stuff is supported. `write()` and -`end()` methods, `'drain'` events, etc. - -Note that all directories that are created will be forced to be -writable, readable, and listable by their owner, to avoid cases where -a directory prevents extraction of child entries by virtue of its -mode. - -`'close'` is emitted when it's done writing stuff to the file system. - -Most unpack errors will cause a `warn` event to be emitted. If the -`cwd` is missing, or not a directory, then an error will be emitted. - -#### constructor(options) - -- `cwd` Extract files relative to the specified directory. Defaults - to `process.cwd()`. If provided, this must exist and must be a - directory. -- `filter` A function that gets called with `(path, entry)` for each - entry being unpacked. Return `true` to unpack the entry from the - archive, or `false` to skip it. -- `newer` Set to true to keep the existing file on disk if it's newer - than the file in the archive. -- `keep` Do not overwrite existing files. In particular, if a file - appears more than once in an archive, later copies will not - overwrite earlier copies. -- `preservePaths` Allow absolute paths, paths containing `..`, and - extracting through symbolic links. By default, `/` is stripped from - absolute paths, `..` paths are not extracted, and any file whose - location would be modified by a symbolic link is not extracted. -- `unlink` Unlink files before creating them. Without this option, - tar overwrites existing files, which preserves existing hardlinks. - With this option, existing hardlinks will be broken, as will any - symlink that would affect the location of an extracted file. -- `strip` Remove the specified number of leading path elements. - Pathnames with fewer elements will be silently skipped. Note that - the pathname is edited after applying the filter, but before - security checks. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `umask` Filter the modes of entries like `process.umask()`. -- `dmode` Default mode for directories -- `fmode` Default mode for files -- `dirCache` A Map object of which directories exist. -- `maxMetaEntrySize` The maximum size of meta entries that is - supported. Defaults to 1 MB. -- `preserveOwner` If true, tar will set the `uid` and `gid` of - extracted entries to the `uid` and `gid` fields in the archive. - This defaults to true when run as root, and false otherwise. If - false, then files and directories will be set with the owner and - group of the user running the process. This is similar to `-p` in - `tar(1)`, but ACLs and other system-specific data is never unpacked - in this implementation, and modes are set by default already. -- `win32` True if on a windows platform. Causes behavior where - filenames containing `<|>?` chars are converted to - windows-compatible values while being unpacked. -- `uid` Set to a number to force ownership of all extracted files and - folders, and all implicitly created directories, to be owned by the - specified user id, regardless of the `uid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a - `gid` option. -- `gid` Set to a number to force ownership of all extracted files and - folders, and all implicitly created directories, to be owned by the - specified group id, regardless of the `gid` field in the archive. - Cannot be used along with `preserveOwner`. Requires also setting a - `uid` option. -- `noMtime` Set to true to omit writing `mtime` value for extracted - entries. -- `transform` Provide a function that takes an `entry` object, and - returns a stream, or any falsey value. If a stream is provided, - then that stream's data will be written instead of the contents of - the archive entry. If a falsey value is provided, then the entry is - written to disk as normal. (To exclude items from extraction, use - the `filter` option described above.) -- `strict` Treat warnings as crash-worthy errors. Default false. -- `onentry` A function that gets called with `(entry)` for each entry - that passes the filter. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the - extracted file matches the entry mode. This also suppresses the call to - `process.umask()` to determine the default umask value, since tar will - extract with whatever mode is provided, and let the process `umask` apply - normally. -- `maxDepth` The maximum depth of subfolders to extract into. This - defaults to 1024. Anything deeper than the limit will raise a - warning and skip the entry. Set to `Infinity` to remove the - limitation. - -### class tar.Unpack.Sync - -Synchronous version of `tar.Unpack`. - -Note that using an asynchronous stream type with the `transform` -option will cause undefined behavior in sync unpack streams. -[MiniPass](http://npm.im/minipass)-based streams are designed for this -use case. - -### class tar.Parse - -A writable stream that parses a tar archive stream. All the standard -writable stream stuff is supported. - -If the archive is gzipped, then tar will detect this and unzip it. - -Emits `'entry'` events with `tar.ReadEntry` objects, which are -themselves readable streams that you can pipe wherever. - -Each `entry` will not emit until the one before it is flushed through, -so make sure to either consume the data (with `on('data', ...)` or -`.pipe(...)`) or throw it away with `.resume()` to keep the stream -flowing. - -#### constructor(options) - -Returns an event emitter that emits `entry` events with -`tar.ReadEntry` objects. - -The following options are supported: - -- `strict` Treat warnings as crash-worthy errors. Default false. -- `filter` A function that gets called with `(path, entry)` for each - entry being listed. Return `true` to emit the entry from the - archive, or `false` to skip it. -- `onentry` A function that gets called with `(entry)` for each entry - that passes the filter. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") - -#### abort(error) - -Stop all parsing activities. This is called when there are zlib -errors. It also emits an unrecoverable warning with the error provided. - -### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass) - -A representation of an entry that is being read out of a tar archive. - -It has the following fields: - -- `extended` The extended metadata object provided to the constructor. -- `globalExtended` The global extended metadata object provided to the - constructor. -- `remain` The number of bytes remaining to be written into the - stream. -- `blockRemain` The number of 512-byte blocks remaining to be written - into the stream. -- `ignore` Whether this entry should be ignored. -- `meta` True if this represents metadata about the next entry, false - if it represents a filesystem object. -- All the fields from the header, extended header, and global extended - header are added to the ReadEntry object. So it has `path`, `type`, - `size`, `mode`, and so on. - -#### constructor(header, extended, globalExtended) - -Create a new ReadEntry object with the specified header, extended -header, and global extended header values. - -### class tar.WriteEntry extends [MiniPass](http://npm.im/minipass) - -A representation of an entry that is being written from the file -system into a tar archive. - -Emits data for the Header, and for the Pax Extended Header if one is -required, as well as any body data. - -Creating a WriteEntry for a directory does not also create -WriteEntry objects for all of the directory contents. - -It has the following fields: - -- `path` The path field that will be written to the archive. By - default, this is also the path from the cwd to the file system - object. -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `myuid` If supported, the uid of the user running the current - process. -- `myuser` The `env.USER` string if set, or `''`. Set as the entry - `uname` field if the file's `uid` matches `this.myuid`. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 1 MB. -- `linkCache` A Map object containing the device and inode value for - any file whose nlink is > 1, to identify hard links. -- `statCache` A Map object that caches calls `lstat`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. -- `cwd` The current working directory for creating the archive. - Defaults to `process.cwd()`. -- `absolute` The absolute path to the entry on the filesystem. By - default, this is `path.resolve(this.cwd, this.path)`, but it can be - overridden explicitly. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `win32` True if on a windows platform. Causes behavior where paths - replace `\` with `/` and filenames containing the windows-compatible - forms of `<|>?:` characters are converted to actual `<|>?:` characters - in the archive. -- `noPax` Suppress pax extended headers. Note that this means that - long paths and linkpaths will be truncated, and large or negative - numeric values may be interpreted incorrectly. -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - - -#### constructor(path, options) - -`path` is the path of the entry as it is written in the archive. - -The following options are supported: - -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `maxReadSize` The maximum buffer size for `fs.read()` operations. - Defaults to 1 MB. -- `linkCache` A Map object containing the device and inode value for - any file whose nlink is > 1, to identify hard links. -- `statCache` A Map object that caches calls `lstat`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. -- `cwd` The current working directory for creating the archive. - Defaults to `process.cwd()`. -- `absolute` The absolute path to the entry on the filesystem. By - default, this is `path.resolve(this.cwd, this.path)`, but it can be - overridden explicitly. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `win32` True if on a windows platform. Causes behavior where paths - replace `\` with `/`. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. -- `umask` Set to restrict the modes on the entries in the archive, - somewhat like how umask works on file creation. Defaults to - `process.umask()` on unix systems, or `0o22` on Windows. - -#### warn(message, data) - -If strict, emit an error with the provided message. - -Othewise, emit a `'warn'` event with the provided message and data. - -### class tar.WriteEntry.Sync - -Synchronous version of tar.WriteEntry - -### class tar.WriteEntry.Tar - -A version of tar.WriteEntry that gets its data from a tar.ReadEntry -instead of from the filesystem. - -#### constructor(readEntry, options) - -`readEntry` is the entry being read out of another archive. - -The following options are supported: - -- `portable` Omit metadata that is system-specific: `ctime`, `atime`, - `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note - that `mtime` is still included, because this is necessary for other - time-based operations. Additionally, `mode` is set to a "reasonable - default" for most unix systems, based on a `umask` value of `0o22`. -- `preservePaths` Allow absolute paths. By default, `/` is stripped - from absolute paths. -- `strict` Treat warnings as crash-worthy errors. Default false. -- `onwarn` A function that will get called with `(code, message, data)` for - any warnings encountered. (See "Warnings and Errors") -- `noMtime` Set to true to omit writing `mtime` values for entries. - Note that this prevents using other mtime-based features like - `tar.update` or the `keepNewer` option with the resulting tar archive. - -### class tar.Header - -A class for reading and writing header blocks. - -It has the following fields: - -- `nullBlock` True if decoding a block which is entirely composed of - `0x00` null bytes. (Useful because tar files are terminated by - at least 2 null blocks.) -- `cksumValid` True if the checksum in the header is valid, false - otherwise. -- `needPax` True if the values, as encoded, will require a Pax - extended header. -- `path` The path of the entry. -- `mode` The 4 lowest-order octal digits of the file mode. That is, - read/write/execute permissions for world, group, and owner, and the - setuid, setgid, and sticky bits. -- `uid` Numeric user id of the file owner -- `gid` Numeric group id of the file owner -- `size` Size of the file in bytes -- `mtime` Modified time of the file -- `cksum` The checksum of the header. This is generated by adding all - the bytes of the header block, treating the checksum field itself as - all ascii space characters (that is, `0x20`). -- `type` The human-readable name of the type of entry this represents, - or the alphanumeric key if unknown. -- `typeKey` The alphanumeric key for the type of entry this header - represents. -- `linkpath` The target of Link and SymbolicLink entries. -- `uname` Human-readable user name of the file owner -- `gname` Human-readable group name of the file owner -- `devmaj` The major portion of the device number. Always `0` for - files, directories, and links. -- `devmin` The minor portion of the device number. Always `0` for - files, directories, and links. -- `atime` File access time. -- `ctime` File change time. - -#### constructor(data, [offset=0]) - -`data` is optional. It is either a Buffer that should be interpreted -as a tar Header starting at the specified offset and continuing for -512 bytes, or a data object of keys and values to set on the header -object, and eventually encode as a tar Header. - -#### decode(block, offset) - -Decode the provided buffer starting at the specified offset. - -Buffer length must be greater than 512 bytes. - -#### set(data) - -Set the fields in the data object. - -#### encode(buffer, offset) - -Encode the header fields into the buffer at the specified offset. - -Returns `this.needPax` to indicate whether a Pax Extended Header is -required to properly encode the specified data. - -### class tar.Pax - -An object representing a set of key-value pairs in an Pax extended -header entry. - -It has the following fields. Where the same name is used, they have -the same semantics as the tar.Header field of the same name. - -- `global` True if this represents a global extended header, or false - if it is for a single entry. -- `atime` -- `charset` -- `comment` -- `ctime` -- `gid` -- `gname` -- `linkpath` -- `mtime` -- `path` -- `size` -- `uid` -- `uname` -- `dev` -- `ino` -- `nlink` - -#### constructor(object, global) - -Set the fields set in the object. `global` is a boolean that defaults -to false. - -#### encode() - -Return a Buffer containing the header and body for the Pax extended -header entry, or `null` if there is nothing to encode. - -#### encodeBody() - -Return a string representing the body of the pax extended header -entry. - -#### encodeField(fieldName) - -Return a string representing the key/value encoding for the specified -fieldName, or `''` if the field is unset. - -### tar.Pax.parse(string, extended, global) - -Return a new Pax object created by parsing the contents of the string -provided. - -If the `extended` object is set, then also add the fields from that -object. (This is necessary because multiple metadata entries can -occur in sequence.) - -### tar.types - -A translation table for the `type` field in tar headers. - -#### tar.types.name.get(code) - -Get the human-readable name for a given alphanumeric code. - -#### tar.types.code.get(name) - -Get the alphanumeric code for a given human-readable name. diff --git a/node_modules/tar/lib/normalize-unicode.js b/node_modules/tar/lib/normalize-unicode.js deleted file mode 100644 index 79e285ab..00000000 --- a/node_modules/tar/lib/normalize-unicode.js +++ /dev/null @@ -1,12 +0,0 @@ -// warning: extremely hot code path. -// This has been meticulously optimized for use -// within npm install on large package trees. -// Do not edit without careful benchmarking. -const normalizeCache = Object.create(null) -const { hasOwnProperty } = Object.prototype -module.exports = s => { - if (!hasOwnProperty.call(normalizeCache, s)) { - normalizeCache[s] = s.normalize('NFD') - } - return normalizeCache[s] -} diff --git a/node_modules/tar/lib/pack.js b/node_modules/tar/lib/pack.js deleted file mode 100644 index d533a068..00000000 --- a/node_modules/tar/lib/pack.js +++ /dev/null @@ -1,432 +0,0 @@ -'use strict' - -// A readable tar stream creator -// Technically, this is a transform stream that you write paths into, -// and tar format comes out of. -// The `add()` method is like `write()` but returns this, -// and end() return `this` as well, so you can -// do `new Pack(opt).add('files').add('dir').end().pipe(output) -// You could also do something like: -// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) - -class PackJob { - constructor (path, absolute) { - this.path = path || './' - this.absolute = absolute - this.entry = null - this.stat = null - this.readdir = null - this.pending = false - this.ignore = false - this.piped = false - } -} - -const { Minipass } = require('minipass') -const zlib = require('minizlib') -const ReadEntry = require('./read-entry.js') -const WriteEntry = require('./write-entry.js') -const WriteEntrySync = WriteEntry.Sync -const WriteEntryTar = WriteEntry.Tar -const Yallist = require('yallist') -const EOF = Buffer.alloc(1024) -const ONSTAT = Symbol('onStat') -const ENDED = Symbol('ended') -const QUEUE = Symbol('queue') -const CURRENT = Symbol('current') -const PROCESS = Symbol('process') -const PROCESSING = Symbol('processing') -const PROCESSJOB = Symbol('processJob') -const JOBS = Symbol('jobs') -const JOBDONE = Symbol('jobDone') -const ADDFSENTRY = Symbol('addFSEntry') -const ADDTARENTRY = Symbol('addTarEntry') -const STAT = Symbol('stat') -const READDIR = Symbol('readdir') -const ONREADDIR = Symbol('onreaddir') -const PIPE = Symbol('pipe') -const ENTRY = Symbol('entry') -const ENTRYOPT = Symbol('entryOpt') -const WRITEENTRYCLASS = Symbol('writeEntryClass') -const WRITE = Symbol('write') -const ONDRAIN = Symbol('ondrain') - -const fs = require('fs') -const path = require('path') -const warner = require('./warn-mixin.js') -const normPath = require('./normalize-windows-path.js') - -const Pack = warner(class Pack extends Minipass { - constructor (opt) { - super(opt) - opt = opt || Object.create(null) - this.opt = opt - this.file = opt.file || '' - this.cwd = opt.cwd || process.cwd() - this.maxReadSize = opt.maxReadSize - this.preservePaths = !!opt.preservePaths - this.strict = !!opt.strict - this.noPax = !!opt.noPax - this.prefix = normPath(opt.prefix || '') - this.linkCache = opt.linkCache || new Map() - this.statCache = opt.statCache || new Map() - this.readdirCache = opt.readdirCache || new Map() - - this[WRITEENTRYCLASS] = WriteEntry - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - - this.portable = !!opt.portable - this.zip = null - - if (opt.gzip || opt.brotli) { - if (opt.gzip && opt.brotli) { - throw new TypeError('gzip and brotli are mutually exclusive') - } - if (opt.gzip) { - if (typeof opt.gzip !== 'object') { - opt.gzip = {} - } - if (this.portable) { - opt.gzip.portable = true - } - this.zip = new zlib.Gzip(opt.gzip) - } - if (opt.brotli) { - if (typeof opt.brotli !== 'object') { - opt.brotli = {} - } - this.zip = new zlib.BrotliCompress(opt.brotli) - } - this.zip.on('data', chunk => super.write(chunk)) - this.zip.on('end', _ => super.end()) - this.zip.on('drain', _ => this[ONDRAIN]()) - this.on('resume', _ => this.zip.resume()) - } else { - this.on('drain', this[ONDRAIN]) - } - - this.noDirRecurse = !!opt.noDirRecurse - this.follow = !!opt.follow - this.noMtime = !!opt.noMtime - this.mtime = opt.mtime || null - - this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true - - this[QUEUE] = new Yallist() - this[JOBS] = 0 - this.jobs = +opt.jobs || 4 - this[PROCESSING] = false - this[ENDED] = false - } - - [WRITE] (chunk) { - return super.write(chunk) - } - - add (path) { - this.write(path) - return this - } - - end (path) { - if (path) { - this.write(path) - } - this[ENDED] = true - this[PROCESS]() - return this - } - - write (path) { - if (this[ENDED]) { - throw new Error('write after end') - } - - if (path instanceof ReadEntry) { - this[ADDTARENTRY](path) - } else { - this[ADDFSENTRY](path) - } - return this.flowing - } - - [ADDTARENTRY] (p) { - const absolute = normPath(path.resolve(this.cwd, p.path)) - // in this case, we don't have to wait for the stat - if (!this.filter(p.path, p)) { - p.resume() - } else { - const job = new PackJob(p.path, absolute, false) - job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) - job.entry.on('end', _ => this[JOBDONE](job)) - this[JOBS] += 1 - this[QUEUE].push(job) - } - - this[PROCESS]() - } - - [ADDFSENTRY] (p) { - const absolute = normPath(path.resolve(this.cwd, p)) - this[QUEUE].push(new PackJob(p, absolute)) - this[PROCESS]() - } - - [STAT] (job) { - job.pending = true - this[JOBS] += 1 - const stat = this.follow ? 'stat' : 'lstat' - fs[stat](job.absolute, (er, stat) => { - job.pending = false - this[JOBS] -= 1 - if (er) { - this.emit('error', er) - } else { - this[ONSTAT](job, stat) - } - }) - } - - [ONSTAT] (job, stat) { - this.statCache.set(job.absolute, stat) - job.stat = stat - - // now we have the stat, we can filter it. - if (!this.filter(job.path, stat)) { - job.ignore = true - } - - this[PROCESS]() - } - - [READDIR] (job) { - job.pending = true - this[JOBS] += 1 - fs.readdir(job.absolute, (er, entries) => { - job.pending = false - this[JOBS] -= 1 - if (er) { - return this.emit('error', er) - } - this[ONREADDIR](job, entries) - }) - } - - [ONREADDIR] (job, entries) { - this.readdirCache.set(job.absolute, entries) - job.readdir = entries - this[PROCESS]() - } - - [PROCESS] () { - if (this[PROCESSING]) { - return - } - - this[PROCESSING] = true - for (let w = this[QUEUE].head; - w !== null && this[JOBS] < this.jobs; - w = w.next) { - this[PROCESSJOB](w.value) - if (w.value.ignore) { - const p = w.next - this[QUEUE].removeNode(w) - w.next = p - } - } - - this[PROCESSING] = false - - if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { - if (this.zip) { - this.zip.end(EOF) - } else { - super.write(EOF) - super.end() - } - } - } - - get [CURRENT] () { - return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value - } - - [JOBDONE] (job) { - this[QUEUE].shift() - this[JOBS] -= 1 - this[PROCESS]() - } - - [PROCESSJOB] (job) { - if (job.pending) { - return - } - - if (job.entry) { - if (job === this[CURRENT] && !job.piped) { - this[PIPE](job) - } - return - } - - if (!job.stat) { - if (this.statCache.has(job.absolute)) { - this[ONSTAT](job, this.statCache.get(job.absolute)) - } else { - this[STAT](job) - } - } - if (!job.stat) { - return - } - - // filtered out! - if (job.ignore) { - return - } - - if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { - if (this.readdirCache.has(job.absolute)) { - this[ONREADDIR](job, this.readdirCache.get(job.absolute)) - } else { - this[READDIR](job) - } - if (!job.readdir) { - return - } - } - - // we know it doesn't have an entry, because that got checked above - job.entry = this[ENTRY](job) - if (!job.entry) { - job.ignore = true - return - } - - if (job === this[CURRENT] && !job.piped) { - this[PIPE](job) - } - } - - [ENTRYOPT] (job) { - return { - onwarn: (code, msg, data) => this.warn(code, msg, data), - noPax: this.noPax, - cwd: this.cwd, - absolute: job.absolute, - preservePaths: this.preservePaths, - maxReadSize: this.maxReadSize, - strict: this.strict, - portable: this.portable, - linkCache: this.linkCache, - statCache: this.statCache, - noMtime: this.noMtime, - mtime: this.mtime, - prefix: this.prefix, - } - } - - [ENTRY] (job) { - this[JOBS] += 1 - try { - return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) - .on('end', () => this[JOBDONE](job)) - .on('error', er => this.emit('error', er)) - } catch (er) { - this.emit('error', er) - } - } - - [ONDRAIN] () { - if (this[CURRENT] && this[CURRENT].entry) { - this[CURRENT].entry.resume() - } - } - - // like .pipe() but using super, because our write() is special - [PIPE] (job) { - job.piped = true - - if (job.readdir) { - job.readdir.forEach(entry => { - const p = job.path - const base = p === './' ? '' : p.replace(/\/*$/, '/') - this[ADDFSENTRY](base + entry) - }) - } - - const source = job.entry - const zip = this.zip - - if (zip) { - source.on('data', chunk => { - if (!zip.write(chunk)) { - source.pause() - } - }) - } else { - source.on('data', chunk => { - if (!super.write(chunk)) { - source.pause() - } - }) - } - } - - pause () { - if (this.zip) { - this.zip.pause() - } - return super.pause() - } -}) - -class PackSync extends Pack { - constructor (opt) { - super(opt) - this[WRITEENTRYCLASS] = WriteEntrySync - } - - // pause/resume are no-ops in sync streams. - pause () {} - resume () {} - - [STAT] (job) { - const stat = this.follow ? 'statSync' : 'lstatSync' - this[ONSTAT](job, fs[stat](job.absolute)) - } - - [READDIR] (job, stat) { - this[ONREADDIR](job, fs.readdirSync(job.absolute)) - } - - // gotta get it all in this tick - [PIPE] (job) { - const source = job.entry - const zip = this.zip - - if (job.readdir) { - job.readdir.forEach(entry => { - const p = job.path - const base = p === './' ? '' : p.replace(/\/*$/, '/') - this[ADDFSENTRY](base + entry) - }) - } - - if (zip) { - source.on('data', chunk => { - zip.write(chunk) - }) - } else { - source.on('data', chunk => { - super[WRITE](chunk) - }) - } - } -} - -Pack.Sync = PackSync - -module.exports = Pack diff --git a/node_modules/tar/lib/parse.js b/node_modules/tar/lib/parse.js deleted file mode 100644 index 94e53042..00000000 --- a/node_modules/tar/lib/parse.js +++ /dev/null @@ -1,552 +0,0 @@ -'use strict' - -// this[BUFFER] is the remainder of a chunk if we're waiting for -// the full 512 bytes of a header to come in. We will Buffer.concat() -// it to the next write(), which is a mem copy, but a small one. -// -// this[QUEUE] is a Yallist of entries that haven't been emitted -// yet this can only get filled up if the user keeps write()ing after -// a write() returns false, or does a write() with more than one entry -// -// We don't buffer chunks, we always parse them and either create an -// entry, or push it into the active entry. The ReadEntry class knows -// to throw data away if .ignore=true -// -// Shift entry off the buffer when it emits 'end', and emit 'entry' for -// the next one in the list. -// -// At any time, we're pushing body chunks into the entry at WRITEENTRY, -// and waiting for 'end' on the entry at READENTRY -// -// ignored entries get .resume() called on them straight away - -const warner = require('./warn-mixin.js') -const Header = require('./header.js') -const EE = require('events') -const Yallist = require('yallist') -const maxMetaEntrySize = 1024 * 1024 -const Entry = require('./read-entry.js') -const Pax = require('./pax.js') -const zlib = require('minizlib') -const { nextTick } = require('process') - -const gzipHeader = Buffer.from([0x1f, 0x8b]) -const STATE = Symbol('state') -const WRITEENTRY = Symbol('writeEntry') -const READENTRY = Symbol('readEntry') -const NEXTENTRY = Symbol('nextEntry') -const PROCESSENTRY = Symbol('processEntry') -const EX = Symbol('extendedHeader') -const GEX = Symbol('globalExtendedHeader') -const META = Symbol('meta') -const EMITMETA = Symbol('emitMeta') -const BUFFER = Symbol('buffer') -const QUEUE = Symbol('queue') -const ENDED = Symbol('ended') -const EMITTEDEND = Symbol('emittedEnd') -const EMIT = Symbol('emit') -const UNZIP = Symbol('unzip') -const CONSUMECHUNK = Symbol('consumeChunk') -const CONSUMECHUNKSUB = Symbol('consumeChunkSub') -const CONSUMEBODY = Symbol('consumeBody') -const CONSUMEMETA = Symbol('consumeMeta') -const CONSUMEHEADER = Symbol('consumeHeader') -const CONSUMING = Symbol('consuming') -const BUFFERCONCAT = Symbol('bufferConcat') -const MAYBEEND = Symbol('maybeEnd') -const WRITING = Symbol('writing') -const ABORTED = Symbol('aborted') -const DONE = Symbol('onDone') -const SAW_VALID_ENTRY = Symbol('sawValidEntry') -const SAW_NULL_BLOCK = Symbol('sawNullBlock') -const SAW_EOF = Symbol('sawEOF') -const CLOSESTREAM = Symbol('closeStream') - -const noop = _ => true - -module.exports = warner(class Parser extends EE { - constructor (opt) { - opt = opt || {} - super(opt) - - this.file = opt.file || '' - - // set to boolean false when an entry starts. 1024 bytes of \0 - // is technically a valid tarball, albeit a boring one. - this[SAW_VALID_ENTRY] = null - - // these BADARCHIVE errors can't be detected early. listen on DONE. - this.on(DONE, _ => { - if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) { - // either less than 1 block of data, or all entries were invalid. - // Either way, probably not even a tarball. - this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format') - } - }) - - if (opt.ondone) { - this.on(DONE, opt.ondone) - } else { - this.on(DONE, _ => { - this.emit('prefinish') - this.emit('finish') - this.emit('end') - }) - } - - this.strict = !!opt.strict - this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize - this.filter = typeof opt.filter === 'function' ? opt.filter : noop - // Unlike gzip, brotli doesn't have any magic bytes to identify it - // Users need to explicitly tell us they're extracting a brotli file - // Or we infer from the file extension - const isTBR = (opt.file && ( - opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'))) - // if it's a tbr file it MIGHT be brotli, but we don't know until - // we look at it and verify it's not a valid tar file. - this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli - : isTBR ? undefined - : false - - // have to set this so that streams are ok piping into it - this.writable = true - this.readable = false - - this[QUEUE] = new Yallist() - this[BUFFER] = null - this[READENTRY] = null - this[WRITEENTRY] = null - this[STATE] = 'begin' - this[META] = '' - this[EX] = null - this[GEX] = null - this[ENDED] = false - this[UNZIP] = null - this[ABORTED] = false - this[SAW_NULL_BLOCK] = false - this[SAW_EOF] = false - - this.on('end', () => this[CLOSESTREAM]()) - - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - if (typeof opt.onentry === 'function') { - this.on('entry', opt.onentry) - } - } - - [CONSUMEHEADER] (chunk, position) { - if (this[SAW_VALID_ENTRY] === null) { - this[SAW_VALID_ENTRY] = false - } - let header - try { - header = new Header(chunk, position, this[EX], this[GEX]) - } catch (er) { - return this.warn('TAR_ENTRY_INVALID', er) - } - - if (header.nullBlock) { - if (this[SAW_NULL_BLOCK]) { - this[SAW_EOF] = true - // ending an archive with no entries. pointless, but legal. - if (this[STATE] === 'begin') { - this[STATE] = 'header' - } - this[EMIT]('eof') - } else { - this[SAW_NULL_BLOCK] = true - this[EMIT]('nullBlock') - } - } else { - this[SAW_NULL_BLOCK] = false - if (!header.cksumValid) { - this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }) - } else if (!header.path) { - this.warn('TAR_ENTRY_INVALID', 'path is required', { header }) - } else { - const type = header.type - if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { - this.warn('TAR_ENTRY_INVALID', 'linkpath required', { header }) - } else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) { - this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { header }) - } else { - const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX]) - - // we do this for meta & ignored entries as well, because they - // are still valid tar, or else we wouldn't know to ignore them - if (!this[SAW_VALID_ENTRY]) { - if (entry.remain) { - // this might be the one! - const onend = () => { - if (!entry.invalid) { - this[SAW_VALID_ENTRY] = true - } - } - entry.on('end', onend) - } else { - this[SAW_VALID_ENTRY] = true - } - } - - if (entry.meta) { - if (entry.size > this.maxMetaEntrySize) { - entry.ignore = true - this[EMIT]('ignoredEntry', entry) - this[STATE] = 'ignore' - entry.resume() - } else if (entry.size > 0) { - this[META] = '' - entry.on('data', c => this[META] += c) - this[STATE] = 'meta' - } - } else { - this[EX] = null - entry.ignore = entry.ignore || !this.filter(entry.path, entry) - - if (entry.ignore) { - // probably valid, just not something we care about - this[EMIT]('ignoredEntry', entry) - this[STATE] = entry.remain ? 'ignore' : 'header' - entry.resume() - } else { - if (entry.remain) { - this[STATE] = 'body' - } else { - this[STATE] = 'header' - entry.end() - } - - if (!this[READENTRY]) { - this[QUEUE].push(entry) - this[NEXTENTRY]() - } else { - this[QUEUE].push(entry) - } - } - } - } - } - } - } - - [CLOSESTREAM] () { - nextTick(() => this.emit('close')) - } - - [PROCESSENTRY] (entry) { - let go = true - - if (!entry) { - this[READENTRY] = null - go = false - } else if (Array.isArray(entry)) { - this.emit.apply(this, entry) - } else { - this[READENTRY] = entry - this.emit('entry', entry) - if (!entry.emittedEnd) { - entry.on('end', _ => this[NEXTENTRY]()) - go = false - } - } - - return go - } - - [NEXTENTRY] () { - do {} while (this[PROCESSENTRY](this[QUEUE].shift())) - - if (!this[QUEUE].length) { - // At this point, there's nothing in the queue, but we may have an - // entry which is being consumed (readEntry). - // If we don't, then we definitely can handle more data. - // If we do, and either it's flowing, or it has never had any data - // written to it, then it needs more. - // The only other possibility is that it has returned false from a - // write() call, so we wait for the next drain to continue. - const re = this[READENTRY] - const drainNow = !re || re.flowing || re.size === re.remain - if (drainNow) { - if (!this[WRITING]) { - this.emit('drain') - } - } else { - re.once('drain', _ => this.emit('drain')) - } - } - } - - [CONSUMEBODY] (chunk, position) { - // write up to but no more than writeEntry.blockRemain - const entry = this[WRITEENTRY] - const br = entry.blockRemain - const c = (br >= chunk.length && position === 0) ? chunk - : chunk.slice(position, position + br) - - entry.write(c) - - if (!entry.blockRemain) { - this[STATE] = 'header' - this[WRITEENTRY] = null - entry.end() - } - - return c.length - } - - [CONSUMEMETA] (chunk, position) { - const entry = this[WRITEENTRY] - const ret = this[CONSUMEBODY](chunk, position) - - // if we finished, then the entry is reset - if (!this[WRITEENTRY]) { - this[EMITMETA](entry) - } - - return ret - } - - [EMIT] (ev, data, extra) { - if (!this[QUEUE].length && !this[READENTRY]) { - this.emit(ev, data, extra) - } else { - this[QUEUE].push([ev, data, extra]) - } - } - - [EMITMETA] (entry) { - this[EMIT]('meta', this[META]) - switch (entry.type) { - case 'ExtendedHeader': - case 'OldExtendedHeader': - this[EX] = Pax.parse(this[META], this[EX], false) - break - - case 'GlobalExtendedHeader': - this[GEX] = Pax.parse(this[META], this[GEX], true) - break - - case 'NextFileHasLongPath': - case 'OldGnuLongPath': - this[EX] = this[EX] || Object.create(null) - this[EX].path = this[META].replace(/\0.*/, '') - break - - case 'NextFileHasLongLinkpath': - this[EX] = this[EX] || Object.create(null) - this[EX].linkpath = this[META].replace(/\0.*/, '') - break - - /* istanbul ignore next */ - default: throw new Error('unknown meta: ' + entry.type) - } - } - - abort (error) { - this[ABORTED] = true - this.emit('abort', error) - // always throws, even in non-strict mode - this.warn('TAR_ABORT', error, { recoverable: false }) - } - - write (chunk) { - if (this[ABORTED]) { - return - } - - // first write, might be gzipped - const needSniff = this[UNZIP] === null || - this.brotli === undefined && this[UNZIP] === false - if (needSniff && chunk) { - if (this[BUFFER]) { - chunk = Buffer.concat([this[BUFFER], chunk]) - this[BUFFER] = null - } - if (chunk.length < gzipHeader.length) { - this[BUFFER] = chunk - return true - } - - // look for gzip header - for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) { - if (chunk[i] !== gzipHeader[i]) { - this[UNZIP] = false - } - } - - const maybeBrotli = this.brotli === undefined - if (this[UNZIP] === false && maybeBrotli) { - // read the first header to see if it's a valid tar file. If so, - // we can safely assume that it's not actually brotli, despite the - // .tbr or .tar.br file extension. - // if we ended before getting a full chunk, yes, def brotli - if (chunk.length < 512) { - if (this[ENDED]) { - this.brotli = true - } else { - this[BUFFER] = chunk - return true - } - } else { - // if it's tar, it's pretty reliably not brotli, chances of - // that happening are astronomical. - try { - new Header(chunk.slice(0, 512)) - this.brotli = false - } catch (_) { - this.brotli = true - } - } - } - - if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) { - const ended = this[ENDED] - this[ENDED] = false - this[UNZIP] = this[UNZIP] === null - ? new zlib.Unzip() - : new zlib.BrotliDecompress() - this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)) - this[UNZIP].on('error', er => this.abort(er)) - this[UNZIP].on('end', _ => { - this[ENDED] = true - this[CONSUMECHUNK]() - }) - this[WRITING] = true - const ret = this[UNZIP][ended ? 'end' : 'write'](chunk) - this[WRITING] = false - return ret - } - } - - this[WRITING] = true - if (this[UNZIP]) { - this[UNZIP].write(chunk) - } else { - this[CONSUMECHUNK](chunk) - } - this[WRITING] = false - - // return false if there's a queue, or if the current entry isn't flowing - const ret = - this[QUEUE].length ? false : - this[READENTRY] ? this[READENTRY].flowing : - true - - // if we have no queue, then that means a clogged READENTRY - if (!ret && !this[QUEUE].length) { - this[READENTRY].once('drain', _ => this.emit('drain')) - } - - return ret - } - - [BUFFERCONCAT] (c) { - if (c && !this[ABORTED]) { - this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c - } - } - - [MAYBEEND] () { - if (this[ENDED] && - !this[EMITTEDEND] && - !this[ABORTED] && - !this[CONSUMING]) { - this[EMITTEDEND] = true - const entry = this[WRITEENTRY] - if (entry && entry.blockRemain) { - // truncated, likely a damaged file - const have = this[BUFFER] ? this[BUFFER].length : 0 - this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${ - entry.blockRemain} more bytes, only ${have} available)`, { entry }) - if (this[BUFFER]) { - entry.write(this[BUFFER]) - } - entry.end() - } - this[EMIT](DONE) - } - } - - [CONSUMECHUNK] (chunk) { - if (this[CONSUMING]) { - this[BUFFERCONCAT](chunk) - } else if (!chunk && !this[BUFFER]) { - this[MAYBEEND]() - } else { - this[CONSUMING] = true - if (this[BUFFER]) { - this[BUFFERCONCAT](chunk) - const c = this[BUFFER] - this[BUFFER] = null - this[CONSUMECHUNKSUB](c) - } else { - this[CONSUMECHUNKSUB](chunk) - } - - while (this[BUFFER] && - this[BUFFER].length >= 512 && - !this[ABORTED] && - !this[SAW_EOF]) { - const c = this[BUFFER] - this[BUFFER] = null - this[CONSUMECHUNKSUB](c) - } - this[CONSUMING] = false - } - - if (!this[BUFFER] || this[ENDED]) { - this[MAYBEEND]() - } - } - - [CONSUMECHUNKSUB] (chunk) { - // we know that we are in CONSUMING mode, so anything written goes into - // the buffer. Advance the position and put any remainder in the buffer. - let position = 0 - const length = chunk.length - while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) { - switch (this[STATE]) { - case 'begin': - case 'header': - this[CONSUMEHEADER](chunk, position) - position += 512 - break - - case 'ignore': - case 'body': - position += this[CONSUMEBODY](chunk, position) - break - - case 'meta': - position += this[CONSUMEMETA](chunk, position) - break - - /* istanbul ignore next */ - default: - throw new Error('invalid state: ' + this[STATE]) - } - } - - if (position < length) { - if (this[BUFFER]) { - this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]]) - } else { - this[BUFFER] = chunk.slice(position) - } - } - } - - end (chunk) { - if (!this[ABORTED]) { - if (this[UNZIP]) { - this[UNZIP].end(chunk) - } else { - this[ENDED] = true - if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0) - this.write(chunk) - } - } - } -}) diff --git a/node_modules/tar/lib/path-reservations.js b/node_modules/tar/lib/path-reservations.js deleted file mode 100644 index 8d349d58..00000000 --- a/node_modules/tar/lib/path-reservations.js +++ /dev/null @@ -1,156 +0,0 @@ -// A path exclusive reservation system -// reserve([list, of, paths], fn) -// When the fn is first in line for all its paths, it -// is called with a cb that clears the reservation. -// -// Used by async unpack to avoid clobbering paths in use, -// while still allowing maximal safe parallelization. - -const assert = require('assert') -const normalize = require('./normalize-unicode.js') -const stripSlashes = require('./strip-trailing-slashes.js') -const { join } = require('path') - -const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform -const isWindows = platform === 'win32' - -module.exports = () => { - // path => [function or Set] - // A Set object means a directory reservation - // A fn is a direct reservation on that path - const queues = new Map() - - // fn => {paths:[path,...], dirs:[path, ...]} - const reservations = new Map() - - // return a set of parent dirs for a given path - // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] - const getDirs = path => { - const dirs = path.split('/').slice(0, -1).reduce((set, path) => { - if (set.length) { - path = join(set[set.length - 1], path) - } - set.push(path || '/') - return set - }, []) - return dirs - } - - // functions currently running - const running = new Set() - - // return the queues for each path the function cares about - // fn => {paths, dirs} - const getQueues = fn => { - const res = reservations.get(fn) - /* istanbul ignore if - unpossible */ - if (!res) { - throw new Error('function does not have any path reservations') - } - return { - paths: res.paths.map(path => queues.get(path)), - dirs: [...res.dirs].map(path => queues.get(path)), - } - } - - // check if fn is first in line for all its paths, and is - // included in the first set for all its dir queues - const check = fn => { - const { paths, dirs } = getQueues(fn) - return paths.every(q => q[0] === fn) && - dirs.every(q => q[0] instanceof Set && q[0].has(fn)) - } - - // run the function if it's first in line and not already running - const run = fn => { - if (running.has(fn) || !check(fn)) { - return false - } - running.add(fn) - fn(() => clear(fn)) - return true - } - - const clear = fn => { - if (!running.has(fn)) { - return false - } - - const { paths, dirs } = reservations.get(fn) - const next = new Set() - - paths.forEach(path => { - const q = queues.get(path) - assert.equal(q[0], fn) - if (q.length === 1) { - queues.delete(path) - } else { - q.shift() - if (typeof q[0] === 'function') { - next.add(q[0]) - } else { - q[0].forEach(fn => next.add(fn)) - } - } - }) - - dirs.forEach(dir => { - const q = queues.get(dir) - assert(q[0] instanceof Set) - if (q[0].size === 1 && q.length === 1) { - queues.delete(dir) - } else if (q[0].size === 1) { - q.shift() - - // must be a function or else the Set would've been reused - next.add(q[0]) - } else { - q[0].delete(fn) - } - }) - running.delete(fn) - - next.forEach(fn => run(fn)) - return true - } - - const reserve = (paths, fn) => { - // collide on matches across case and unicode normalization - // On windows, thanks to the magic of 8.3 shortnames, it is fundamentally - // impossible to determine whether two paths refer to the same thing on - // disk, without asking the kernel for a shortname. - // So, we just pretend that every path matches every other path here, - // effectively removing all parallelization on windows. - paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => { - // don't need normPath, because we skip this entirely for windows - return stripSlashes(join(normalize(p))).toLowerCase() - }) - - const dirs = new Set( - paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)) - ) - reservations.set(fn, { dirs, paths }) - paths.forEach(path => { - const q = queues.get(path) - if (!q) { - queues.set(path, [fn]) - } else { - q.push(fn) - } - }) - dirs.forEach(dir => { - const q = queues.get(dir) - if (!q) { - queues.set(dir, [new Set([fn])]) - } else if (q[q.length - 1] instanceof Set) { - q[q.length - 1].add(fn) - } else { - q.push(new Set([fn])) - } - }) - - return run(fn) - } - - return { check, reserve } -} diff --git a/node_modules/tar/lib/read-entry.js b/node_modules/tar/lib/read-entry.js deleted file mode 100644 index 6186266e..00000000 --- a/node_modules/tar/lib/read-entry.js +++ /dev/null @@ -1,107 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const normPath = require('./normalize-windows-path.js') - -const SLURP = Symbol('slurp') -module.exports = class ReadEntry extends Minipass { - constructor (header, ex, gex) { - super() - // read entries always start life paused. this is to avoid the - // situation where Minipass's auto-ending empty streams results - // in an entry ending before we're ready for it. - this.pause() - this.extended = ex - this.globalExtended = gex - this.header = header - this.startBlockSize = 512 * Math.ceil(header.size / 512) - this.blockRemain = this.startBlockSize - this.remain = header.size - this.type = header.type - this.meta = false - this.ignore = false - switch (this.type) { - case 'File': - case 'OldFile': - case 'Link': - case 'SymbolicLink': - case 'CharacterDevice': - case 'BlockDevice': - case 'Directory': - case 'FIFO': - case 'ContiguousFile': - case 'GNUDumpDir': - break - - case 'NextFileHasLongLinkpath': - case 'NextFileHasLongPath': - case 'OldGnuLongPath': - case 'GlobalExtendedHeader': - case 'ExtendedHeader': - case 'OldExtendedHeader': - this.meta = true - break - - // NOTE: gnutar and bsdtar treat unrecognized types as 'File' - // it may be worth doing the same, but with a warning. - default: - this.ignore = true - } - - this.path = normPath(header.path) - this.mode = header.mode - if (this.mode) { - this.mode = this.mode & 0o7777 - } - this.uid = header.uid - this.gid = header.gid - this.uname = header.uname - this.gname = header.gname - this.size = header.size - this.mtime = header.mtime - this.atime = header.atime - this.ctime = header.ctime - this.linkpath = normPath(header.linkpath) - this.uname = header.uname - this.gname = header.gname - - if (ex) { - this[SLURP](ex) - } - if (gex) { - this[SLURP](gex, true) - } - } - - write (data) { - const writeLen = data.length - if (writeLen > this.blockRemain) { - throw new Error('writing more to entry than is appropriate') - } - - const r = this.remain - const br = this.blockRemain - this.remain = Math.max(0, r - writeLen) - this.blockRemain = Math.max(0, br - writeLen) - if (this.ignore) { - return true - } - - if (r >= writeLen) { - return super.write(data) - } - - // r < writeLen - return super.write(data.slice(0, r)) - } - - [SLURP] (ex, global) { - for (const k in ex) { - // we slurp in everything except for the path attribute in - // a global extended header, because that's weird. - if (ex[k] !== null && ex[k] !== undefined && - !(global && k === 'path')) { - this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k] - } - } - } -} diff --git a/node_modules/tar/lib/replace.js b/node_modules/tar/lib/replace.js deleted file mode 100644 index 8db6800b..00000000 --- a/node_modules/tar/lib/replace.js +++ /dev/null @@ -1,246 +0,0 @@ -'use strict' - -// tar -r -const hlo = require('./high-level-opt.js') -const Pack = require('./pack.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const t = require('./list.js') -const path = require('path') - -// starting at the head of the file, read a Header -// If the checksum is invalid, that's our position to start writing -// If it is, jump forward by the specified size (round up to 512) -// and try again. -// Write the new Pack stream starting there. - -const Header = require('./header.js') - -module.exports = (opt_, files, cb) => { - const opt = hlo(opt_) - - if (!opt.file) { - throw new TypeError('file is required') - } - - if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) { - throw new TypeError('cannot append to compressed archives') - } - - if (!files || !Array.isArray(files) || !files.length) { - throw new TypeError('no files or directories specified') - } - - files = Array.from(files) - - return opt.sync ? replaceSync(opt, files) - : replace(opt, files, cb) -} - -const replaceSync = (opt, files) => { - const p = new Pack.Sync(opt) - - let threw = true - let fd - let position - - try { - try { - fd = fs.openSync(opt.file, 'r+') - } catch (er) { - if (er.code === 'ENOENT') { - fd = fs.openSync(opt.file, 'w+') - } else { - throw er - } - } - - const st = fs.fstatSync(fd) - const headBuf = Buffer.alloc(512) - - POSITION: for (position = 0; position < st.size; position += 512) { - for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { - bytes = fs.readSync( - fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos - ) - - if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { - throw new Error('cannot append to compressed archives') - } - - if (!bytes) { - break POSITION - } - } - - const h = new Header(headBuf) - if (!h.cksumValid) { - break - } - const entryBlockSize = 512 * Math.ceil(h.size / 512) - if (position + entryBlockSize + 512 > st.size) { - break - } - // the 512 for the header we just parsed will be added as well - // also jump ahead all the blocks for the body - position += entryBlockSize - if (opt.mtimeCache) { - opt.mtimeCache.set(h.path, h.mtime) - } - } - threw = false - - streamSync(opt, p, position, fd, files) - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } - } -} - -const streamSync = (opt, p, position, fd, files) => { - const stream = new fsm.WriteStreamSync(opt.file, { - fd: fd, - start: position, - }) - p.pipe(stream) - addFilesSync(p, files) -} - -const replace = (opt, files, cb) => { - files = Array.from(files) - const p = new Pack(opt) - - const getPos = (fd, size, cb_) => { - const cb = (er, pos) => { - if (er) { - fs.close(fd, _ => cb_(er)) - } else { - cb_(null, pos) - } - } - - let position = 0 - if (size === 0) { - return cb(null, 0) - } - - let bufPos = 0 - const headBuf = Buffer.alloc(512) - const onread = (er, bytes) => { - if (er) { - return cb(er) - } - bufPos += bytes - if (bufPos < 512 && bytes) { - return fs.read( - fd, headBuf, bufPos, headBuf.length - bufPos, - position + bufPos, onread - ) - } - - if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { - return cb(new Error('cannot append to compressed archives')) - } - - // truncated header - if (bufPos < 512) { - return cb(null, position) - } - - const h = new Header(headBuf) - if (!h.cksumValid) { - return cb(null, position) - } - - const entryBlockSize = 512 * Math.ceil(h.size / 512) - if (position + entryBlockSize + 512 > size) { - return cb(null, position) - } - - position += entryBlockSize + 512 - if (position >= size) { - return cb(null, position) - } - - if (opt.mtimeCache) { - opt.mtimeCache.set(h.path, h.mtime) - } - bufPos = 0 - fs.read(fd, headBuf, 0, 512, position, onread) - } - fs.read(fd, headBuf, 0, 512, position, onread) - } - - const promise = new Promise((resolve, reject) => { - p.on('error', reject) - let flag = 'r+' - const onopen = (er, fd) => { - if (er && er.code === 'ENOENT' && flag === 'r+') { - flag = 'w+' - return fs.open(opt.file, flag, onopen) - } - - if (er) { - return reject(er) - } - - fs.fstat(fd, (er, st) => { - if (er) { - return fs.close(fd, () => reject(er)) - } - - getPos(fd, st.size, (er, position) => { - if (er) { - return reject(er) - } - const stream = new fsm.WriteStream(opt.file, { - fd: fd, - start: position, - }) - p.pipe(stream) - stream.on('error', reject) - stream.on('close', resolve) - addFilesAsync(p, files) - }) - }) - } - fs.open(opt.file, flag, onopen) - }) - - return cb ? promise.then(cb, cb) : promise -} - -const addFilesSync = (p, files) => { - files.forEach(file => { - if (file.charAt(0) === '@') { - t({ - file: path.resolve(p.cwd, file.slice(1)), - sync: true, - noResume: true, - onentry: entry => p.add(entry), - }) - } else { - p.add(file) - } - }) - p.end() -} - -const addFilesAsync = (p, files) => { - while (files.length) { - const file = files.shift() - if (file.charAt(0) === '@') { - return t({ - file: path.resolve(p.cwd, file.slice(1)), - noResume: true, - onentry: entry => p.add(entry), - }).then(_ => addFilesAsync(p, files)) - } else { - p.add(file) - } - } - p.end() -} diff --git a/node_modules/tar/lib/unpack.js b/node_modules/tar/lib/unpack.js deleted file mode 100644 index 03172e2c..00000000 --- a/node_modules/tar/lib/unpack.js +++ /dev/null @@ -1,923 +0,0 @@ -'use strict' - -// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. -// but the path reservations are required to avoid race conditions where -// parallelized unpack ops may mess with one another, due to dependencies -// (like a Link depending on its target) or destructive operations (like -// clobbering an fs object to create one of a different type.) - -const assert = require('assert') -const Parser = require('./parse.js') -const fs = require('fs') -const fsm = require('fs-minipass') -const path = require('path') -const mkdir = require('./mkdir.js') -const wc = require('./winchars.js') -const pathReservations = require('./path-reservations.js') -const stripAbsolutePath = require('./strip-absolute-path.js') -const normPath = require('./normalize-windows-path.js') -const stripSlash = require('./strip-trailing-slashes.js') -const normalize = require('./normalize-unicode.js') - -const ONENTRY = Symbol('onEntry') -const CHECKFS = Symbol('checkFs') -const CHECKFS2 = Symbol('checkFs2') -const PRUNECACHE = Symbol('pruneCache') -const ISREUSABLE = Symbol('isReusable') -const MAKEFS = Symbol('makeFs') -const FILE = Symbol('file') -const DIRECTORY = Symbol('directory') -const LINK = Symbol('link') -const SYMLINK = Symbol('symlink') -const HARDLINK = Symbol('hardlink') -const UNSUPPORTED = Symbol('unsupported') -const CHECKPATH = Symbol('checkPath') -const MKDIR = Symbol('mkdir') -const ONERROR = Symbol('onError') -const PENDING = Symbol('pending') -const PEND = Symbol('pend') -const UNPEND = Symbol('unpend') -const ENDED = Symbol('ended') -const MAYBECLOSE = Symbol('maybeClose') -const SKIP = Symbol('skip') -const DOCHOWN = Symbol('doChown') -const UID = Symbol('uid') -const GID = Symbol('gid') -const CHECKED_CWD = Symbol('checkedCwd') -const crypto = require('crypto') -const getFlag = require('./get-write-flag.js') -const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform -const isWindows = platform === 'win32' -const DEFAULT_MAX_DEPTH = 1024 - -// Unlinks on Windows are not atomic. -// -// This means that if you have a file entry, followed by another -// file entry with an identical name, and you cannot re-use the file -// (because it's a hardlink, or because unlink:true is set, or it's -// Windows, which does not have useful nlink values), then the unlink -// will be committed to the disk AFTER the new file has been written -// over the old one, deleting the new file. -// -// To work around this, on Windows systems, we rename the file and then -// delete the renamed file. It's a sloppy kludge, but frankly, I do not -// know of a better way to do this, given windows' non-atomic unlink -// semantics. -// -// See: https://github.com/npm/node-tar/issues/183 -/* istanbul ignore next */ -const unlinkFile = (path, cb) => { - if (!isWindows) { - return fs.unlink(path, cb) - } - - const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') - fs.rename(path, name, er => { - if (er) { - return cb(er) - } - fs.unlink(name, cb) - }) -} - -/* istanbul ignore next */ -const unlinkFileSync = path => { - if (!isWindows) { - return fs.unlinkSync(path) - } - - const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') - fs.renameSync(path, name) - fs.unlinkSync(name) -} - -// this.gid, entry.gid, this.processUid -const uint32 = (a, b, c) => - a === a >>> 0 ? a - : b === b >>> 0 ? b - : c - -// clear the cache if it's a case-insensitive unicode-squashing match. -// we can't know if the current file system is case-sensitive or supports -// unicode fully, so we check for similarity on the maximally compatible -// representation. Err on the side of pruning, since all it's doing is -// preventing lstats, and it's not the end of the world if we get a false -// positive. -// Note that on windows, we always drop the entire cache whenever a -// symbolic link is encountered, because 8.3 filenames are impossible -// to reason about, and collisions are hazards rather than just failures. -const cacheKeyNormalize = path => stripSlash(normPath(normalize(path))) - .toLowerCase() - -const pruneCache = (cache, abs) => { - abs = cacheKeyNormalize(abs) - for (const path of cache.keys()) { - const pnorm = cacheKeyNormalize(path) - if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { - cache.delete(path) - } - } -} - -const dropCache = cache => { - for (const key of cache.keys()) { - cache.delete(key) - } -} - -class Unpack extends Parser { - constructor (opt) { - if (!opt) { - opt = {} - } - - opt.ondone = _ => { - this[ENDED] = true - this[MAYBECLOSE]() - } - - super(opt) - - this[CHECKED_CWD] = false - - this.reservations = pathReservations() - - this.transform = typeof opt.transform === 'function' ? opt.transform : null - - this.writable = true - this.readable = false - - this[PENDING] = 0 - this[ENDED] = false - - this.dirCache = opt.dirCache || new Map() - - if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { - // need both or neither - if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') { - throw new TypeError('cannot set owner without number uid and gid') - } - if (opt.preserveOwner) { - throw new TypeError( - 'cannot preserve owner in archive and also set owner explicitly') - } - this.uid = opt.uid - this.gid = opt.gid - this.setOwner = true - } else { - this.uid = null - this.gid = null - this.setOwner = false - } - - // default true for root - if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') { - this.preserveOwner = process.getuid && process.getuid() === 0 - } else { - this.preserveOwner = !!opt.preserveOwner - } - - this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ? - process.getuid() : null - this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ? - process.getgid() : null - - // prevent excessively deep nesting of subfolders - // set to `Infinity` to remove this restriction - this.maxDepth = typeof opt.maxDepth === 'number' - ? opt.maxDepth - : DEFAULT_MAX_DEPTH - - // mostly just for testing, but useful in some cases. - // Forcibly trigger a chown on every entry, no matter what - this.forceChown = opt.forceChown === true - - // turn > this[ONENTRY](entry)) - } - - // a bad or damaged archive is a warning for Parser, but an error - // when extracting. Mark those errors as unrecoverable, because - // the Unpack contract cannot be met. - warn (code, msg, data = {}) { - if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { - data.recoverable = false - } - return super.warn(code, msg, data) - } - - [MAYBECLOSE] () { - if (this[ENDED] && this[PENDING] === 0) { - this.emit('prefinish') - this.emit('finish') - this.emit('end') - } - } - - [CHECKPATH] (entry) { - const p = normPath(entry.path) - const parts = p.split('/') - - if (this.strip) { - if (parts.length < this.strip) { - return false - } - if (entry.type === 'Link') { - const linkparts = normPath(entry.linkpath).split('/') - if (linkparts.length >= this.strip) { - entry.linkpath = linkparts.slice(this.strip).join('/') - } else { - return false - } - } - parts.splice(0, this.strip) - entry.path = parts.join('/') - } - - if (isFinite(this.maxDepth) && parts.length > this.maxDepth) { - this.warn('TAR_ENTRY_ERROR', 'path excessively deep', { - entry, - path: p, - depth: parts.length, - maxDepth: this.maxDepth, - }) - return false - } - - if (!this.preservePaths) { - if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) { - this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { - entry, - path: p, - }) - return false - } - - // strip off the root - const [root, stripped] = stripAbsolutePath(p) - if (root) { - entry.path = stripped - this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { - entry, - path: p, - }) - } - } - - if (path.isAbsolute(entry.path)) { - entry.absolute = normPath(path.resolve(entry.path)) - } else { - entry.absolute = normPath(path.resolve(this.cwd, entry.path)) - } - - // if we somehow ended up with a path that escapes the cwd, and we are - // not in preservePaths mode, then something is fishy! This should have - // been prevented above, so ignore this for coverage. - /* istanbul ignore if - defense in depth */ - if (!this.preservePaths && - entry.absolute.indexOf(this.cwd + '/') !== 0 && - entry.absolute !== this.cwd) { - this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', { - entry, - path: normPath(entry.path), - resolvedPath: entry.absolute, - cwd: this.cwd, - }) - return false - } - - // an archive can set properties on the extraction directory, but it - // may not replace the cwd with a different kind of thing entirely. - if (entry.absolute === this.cwd && - entry.type !== 'Directory' && - entry.type !== 'GNUDumpDir') { - return false - } - - // only encode : chars that aren't drive letter indicators - if (this.win32) { - const { root: aRoot } = path.win32.parse(entry.absolute) - entry.absolute = aRoot + wc.encode(entry.absolute.slice(aRoot.length)) - const { root: pRoot } = path.win32.parse(entry.path) - entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)) - } - - return true - } - - [ONENTRY] (entry) { - if (!this[CHECKPATH](entry)) { - return entry.resume() - } - - assert.equal(typeof entry.absolute, 'string') - - switch (entry.type) { - case 'Directory': - case 'GNUDumpDir': - if (entry.mode) { - entry.mode = entry.mode | 0o700 - } - - // eslint-disable-next-line no-fallthrough - case 'File': - case 'OldFile': - case 'ContiguousFile': - case 'Link': - case 'SymbolicLink': - return this[CHECKFS](entry) - - case 'CharacterDevice': - case 'BlockDevice': - case 'FIFO': - default: - return this[UNSUPPORTED](entry) - } - } - - [ONERROR] (er, entry) { - // Cwd has to exist, or else nothing works. That's serious. - // Other errors are warnings, which raise the error in strict - // mode, but otherwise continue on. - if (er.name === 'CwdError') { - this.emit('error', er) - } else { - this.warn('TAR_ENTRY_ERROR', er, { entry }) - this[UNPEND]() - entry.resume() - } - } - - [MKDIR] (dir, mode, cb) { - mkdir(normPath(dir), { - uid: this.uid, - gid: this.gid, - processUid: this.processUid, - processGid: this.processGid, - umask: this.processUmask, - preserve: this.preservePaths, - unlink: this.unlink, - cache: this.dirCache, - cwd: this.cwd, - mode: mode, - noChmod: this.noChmod, - }, cb) - } - - [DOCHOWN] (entry) { - // in preserve owner mode, chown if the entry doesn't match process - // in set owner mode, chown if setting doesn't match process - return this.forceChown || - this.preserveOwner && - (typeof entry.uid === 'number' && entry.uid !== this.processUid || - typeof entry.gid === 'number' && entry.gid !== this.processGid) - || - (typeof this.uid === 'number' && this.uid !== this.processUid || - typeof this.gid === 'number' && this.gid !== this.processGid) - } - - [UID] (entry) { - return uint32(this.uid, entry.uid, this.processUid) - } - - [GID] (entry) { - return uint32(this.gid, entry.gid, this.processGid) - } - - [FILE] (entry, fullyDone) { - const mode = entry.mode & 0o7777 || this.fmode - const stream = new fsm.WriteStream(entry.absolute, { - flags: getFlag(entry.size), - mode: mode, - autoClose: false, - }) - stream.on('error', er => { - if (stream.fd) { - fs.close(stream.fd, () => {}) - } - - // flush all the data out so that we aren't left hanging - // if the error wasn't actually fatal. otherwise the parse - // is blocked, and we never proceed. - stream.write = () => true - this[ONERROR](er, entry) - fullyDone() - }) - - let actions = 1 - const done = er => { - if (er) { - /* istanbul ignore else - we should always have a fd by now */ - if (stream.fd) { - fs.close(stream.fd, () => {}) - } - - this[ONERROR](er, entry) - fullyDone() - return - } - - if (--actions === 0) { - fs.close(stream.fd, er => { - if (er) { - this[ONERROR](er, entry) - } else { - this[UNPEND]() - } - fullyDone() - }) - } - } - - stream.on('finish', _ => { - // if futimes fails, try utimes - // if utimes fails, fail with the original error - // same for fchown/chown - const abs = entry.absolute - const fd = stream.fd - - if (entry.mtime && !this.noMtime) { - actions++ - const atime = entry.atime || new Date() - const mtime = entry.mtime - fs.futimes(fd, atime, mtime, er => - er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er)) - : done()) - } - - if (this[DOCHOWN](entry)) { - actions++ - const uid = this[UID](entry) - const gid = this[GID](entry) - fs.fchown(fd, uid, gid, er => - er ? fs.chown(abs, uid, gid, er2 => done(er2 && er)) - : done()) - } - - done() - }) - - const tx = this.transform ? this.transform(entry) || entry : entry - if (tx !== entry) { - tx.on('error', er => { - this[ONERROR](er, entry) - fullyDone() - }) - entry.pipe(tx) - } - tx.pipe(stream) - } - - [DIRECTORY] (entry, fullyDone) { - const mode = entry.mode & 0o7777 || this.dmode - this[MKDIR](entry.absolute, mode, er => { - if (er) { - this[ONERROR](er, entry) - fullyDone() - return - } - - let actions = 1 - const done = _ => { - if (--actions === 0) { - fullyDone() - this[UNPEND]() - entry.resume() - } - } - - if (entry.mtime && !this.noMtime) { - actions++ - fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done) - } - - if (this[DOCHOWN](entry)) { - actions++ - fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done) - } - - done() - }) - } - - [UNSUPPORTED] (entry) { - entry.unsupported = true - this.warn('TAR_ENTRY_UNSUPPORTED', - `unsupported entry type: ${entry.type}`, { entry }) - entry.resume() - } - - [SYMLINK] (entry, done) { - this[LINK](entry, entry.linkpath, 'symlink', done) - } - - [HARDLINK] (entry, done) { - const linkpath = normPath(path.resolve(this.cwd, entry.linkpath)) - this[LINK](entry, linkpath, 'link', done) - } - - [PEND] () { - this[PENDING]++ - } - - [UNPEND] () { - this[PENDING]-- - this[MAYBECLOSE]() - } - - [SKIP] (entry) { - this[UNPEND]() - entry.resume() - } - - // Check if we can reuse an existing filesystem entry safely and - // overwrite it, rather than unlinking and recreating - // Windows doesn't report a useful nlink, so we just never reuse entries - [ISREUSABLE] (entry, st) { - return entry.type === 'File' && - !this.unlink && - st.isFile() && - st.nlink <= 1 && - !isWindows - } - - // check if a thing is there, and if so, try to clobber it - [CHECKFS] (entry) { - this[PEND]() - const paths = [entry.path] - if (entry.linkpath) { - paths.push(entry.linkpath) - } - this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)) - } - - [PRUNECACHE] (entry) { - // if we are not creating a directory, and the path is in the dirCache, - // then that means we are about to delete the directory we created - // previously, and it is no longer going to be a directory, and neither - // is any of its children. - // If a symbolic link is encountered, all bets are off. There is no - // reasonable way to sanitize the cache in such a way we will be able to - // avoid having filesystem collisions. If this happens with a non-symlink - // entry, it'll just fail to unpack, but a symlink to a directory, using an - // 8.3 shortname or certain unicode attacks, can evade detection and lead - // to arbitrary writes to anywhere on the system. - if (entry.type === 'SymbolicLink') { - dropCache(this.dirCache) - } else if (entry.type !== 'Directory') { - pruneCache(this.dirCache, entry.absolute) - } - } - - [CHECKFS2] (entry, fullyDone) { - this[PRUNECACHE](entry) - - const done = er => { - this[PRUNECACHE](entry) - fullyDone(er) - } - - const checkCwd = () => { - this[MKDIR](this.cwd, this.dmode, er => { - if (er) { - this[ONERROR](er, entry) - done() - return - } - this[CHECKED_CWD] = true - start() - }) - } - - const start = () => { - if (entry.absolute !== this.cwd) { - const parent = normPath(path.dirname(entry.absolute)) - if (parent !== this.cwd) { - return this[MKDIR](parent, this.dmode, er => { - if (er) { - this[ONERROR](er, entry) - done() - return - } - afterMakeParent() - }) - } - } - afterMakeParent() - } - - const afterMakeParent = () => { - fs.lstat(entry.absolute, (lstatEr, st) => { - if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { - this[SKIP](entry) - done() - return - } - if (lstatEr || this[ISREUSABLE](entry, st)) { - return this[MAKEFS](null, entry, done) - } - - if (st.isDirectory()) { - if (entry.type === 'Directory') { - const needChmod = !this.noChmod && - entry.mode && - (st.mode & 0o7777) !== entry.mode - const afterChmod = er => this[MAKEFS](er, entry, done) - if (!needChmod) { - return afterChmod() - } - return fs.chmod(entry.absolute, entry.mode, afterChmod) - } - // Not a dir entry, have to remove it. - // NB: the only way to end up with an entry that is the cwd - // itself, in such a way that == does not detect, is a - // tricky windows absolute path with UNC or 8.3 parts (and - // preservePaths:true, or else it will have been stripped). - // In that case, the user has opted out of path protections - // explicitly, so if they blow away the cwd, c'est la vie. - if (entry.absolute !== this.cwd) { - return fs.rmdir(entry.absolute, er => - this[MAKEFS](er, entry, done)) - } - } - - // not a dir, and not reusable - // don't remove if the cwd, we want that error - if (entry.absolute === this.cwd) { - return this[MAKEFS](null, entry, done) - } - - unlinkFile(entry.absolute, er => - this[MAKEFS](er, entry, done)) - }) - } - - if (this[CHECKED_CWD]) { - start() - } else { - checkCwd() - } - } - - [MAKEFS] (er, entry, done) { - if (er) { - this[ONERROR](er, entry) - done() - return - } - - switch (entry.type) { - case 'File': - case 'OldFile': - case 'ContiguousFile': - return this[FILE](entry, done) - - case 'Link': - return this[HARDLINK](entry, done) - - case 'SymbolicLink': - return this[SYMLINK](entry, done) - - case 'Directory': - case 'GNUDumpDir': - return this[DIRECTORY](entry, done) - } - } - - [LINK] (entry, linkpath, link, done) { - // XXX: get the type ('symlink' or 'junction') for windows - fs[link](linkpath, entry.absolute, er => { - if (er) { - this[ONERROR](er, entry) - } else { - this[UNPEND]() - entry.resume() - } - done() - }) - } -} - -const callSync = fn => { - try { - return [null, fn()] - } catch (er) { - return [er, null] - } -} -class UnpackSync extends Unpack { - [MAKEFS] (er, entry) { - return super[MAKEFS](er, entry, () => {}) - } - - [CHECKFS] (entry) { - this[PRUNECACHE](entry) - - if (!this[CHECKED_CWD]) { - const er = this[MKDIR](this.cwd, this.dmode) - if (er) { - return this[ONERROR](er, entry) - } - this[CHECKED_CWD] = true - } - - // don't bother to make the parent if the current entry is the cwd, - // we've already checked it. - if (entry.absolute !== this.cwd) { - const parent = normPath(path.dirname(entry.absolute)) - if (parent !== this.cwd) { - const mkParent = this[MKDIR](parent, this.dmode) - if (mkParent) { - return this[ONERROR](mkParent, entry) - } - } - } - - const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute)) - if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { - return this[SKIP](entry) - } - - if (lstatEr || this[ISREUSABLE](entry, st)) { - return this[MAKEFS](null, entry) - } - - if (st.isDirectory()) { - if (entry.type === 'Directory') { - const needChmod = !this.noChmod && - entry.mode && - (st.mode & 0o7777) !== entry.mode - const [er] = needChmod ? callSync(() => { - fs.chmodSync(entry.absolute, entry.mode) - }) : [] - return this[MAKEFS](er, entry) - } - // not a dir entry, have to remove it - const [er] = callSync(() => fs.rmdirSync(entry.absolute)) - this[MAKEFS](er, entry) - } - - // not a dir, and not reusable. - // don't remove if it's the cwd, since we want that error. - const [er] = entry.absolute === this.cwd ? [] - : callSync(() => unlinkFileSync(entry.absolute)) - this[MAKEFS](er, entry) - } - - [FILE] (entry, done) { - const mode = entry.mode & 0o7777 || this.fmode - - const oner = er => { - let closeError - try { - fs.closeSync(fd) - } catch (e) { - closeError = e - } - if (er || closeError) { - this[ONERROR](er || closeError, entry) - } - done() - } - - let fd - try { - fd = fs.openSync(entry.absolute, getFlag(entry.size), mode) - } catch (er) { - return oner(er) - } - const tx = this.transform ? this.transform(entry) || entry : entry - if (tx !== entry) { - tx.on('error', er => this[ONERROR](er, entry)) - entry.pipe(tx) - } - - tx.on('data', chunk => { - try { - fs.writeSync(fd, chunk, 0, chunk.length) - } catch (er) { - oner(er) - } - }) - - tx.on('end', _ => { - let er = null - // try both, falling futimes back to utimes - // if either fails, handle the first error - if (entry.mtime && !this.noMtime) { - const atime = entry.atime || new Date() - const mtime = entry.mtime - try { - fs.futimesSync(fd, atime, mtime) - } catch (futimeser) { - try { - fs.utimesSync(entry.absolute, atime, mtime) - } catch (utimeser) { - er = futimeser - } - } - } - - if (this[DOCHOWN](entry)) { - const uid = this[UID](entry) - const gid = this[GID](entry) - - try { - fs.fchownSync(fd, uid, gid) - } catch (fchowner) { - try { - fs.chownSync(entry.absolute, uid, gid) - } catch (chowner) { - er = er || fchowner - } - } - } - - oner(er) - }) - } - - [DIRECTORY] (entry, done) { - const mode = entry.mode & 0o7777 || this.dmode - const er = this[MKDIR](entry.absolute, mode) - if (er) { - this[ONERROR](er, entry) - done() - return - } - if (entry.mtime && !this.noMtime) { - try { - fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime) - } catch (er) {} - } - if (this[DOCHOWN](entry)) { - try { - fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry)) - } catch (er) {} - } - done() - entry.resume() - } - - [MKDIR] (dir, mode) { - try { - return mkdir.sync(normPath(dir), { - uid: this.uid, - gid: this.gid, - processUid: this.processUid, - processGid: this.processGid, - umask: this.processUmask, - preserve: this.preservePaths, - unlink: this.unlink, - cache: this.dirCache, - cwd: this.cwd, - mode: mode, - }) - } catch (er) { - return er - } - } - - [LINK] (entry, linkpath, link, done) { - try { - fs[link + 'Sync'](linkpath, entry.absolute) - done() - entry.resume() - } catch (er) { - return this[ONERROR](er, entry) - } - } -} - -Unpack.Sync = UnpackSync -module.exports = Unpack diff --git a/node_modules/tar/lib/update.js b/node_modules/tar/lib/update.js deleted file mode 100644 index 4d328543..00000000 --- a/node_modules/tar/lib/update.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict' - -// tar -u - -const hlo = require('./high-level-opt.js') -const r = require('./replace.js') -// just call tar.r with the filter and mtimeCache - -module.exports = (opt_, files, cb) => { - const opt = hlo(opt_) - - if (!opt.file) { - throw new TypeError('file is required') - } - - if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) { - throw new TypeError('cannot append to compressed archives') - } - - if (!files || !Array.isArray(files) || !files.length) { - throw new TypeError('no files or directories specified') - } - - files = Array.from(files) - - mtimeFilter(opt) - return r(opt, files, cb) -} - -const mtimeFilter = opt => { - const filter = opt.filter - - if (!opt.mtimeCache) { - opt.mtimeCache = new Map() - } - - opt.filter = filter ? (path, stat) => - filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime) - : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime) -} diff --git a/node_modules/tar/lib/write-entry.js b/node_modules/tar/lib/write-entry.js deleted file mode 100644 index 7d2f3eb1..00000000 --- a/node_modules/tar/lib/write-entry.js +++ /dev/null @@ -1,546 +0,0 @@ -'use strict' -const { Minipass } = require('minipass') -const Pax = require('./pax.js') -const Header = require('./header.js') -const fs = require('fs') -const path = require('path') -const normPath = require('./normalize-windows-path.js') -const stripSlash = require('./strip-trailing-slashes.js') - -const prefixPath = (path, prefix) => { - if (!prefix) { - return normPath(path) - } - path = normPath(path).replace(/^\.(\/|$)/, '') - return stripSlash(prefix) + '/' + path -} - -const maxReadSize = 16 * 1024 * 1024 -const PROCESS = Symbol('process') -const FILE = Symbol('file') -const DIRECTORY = Symbol('directory') -const SYMLINK = Symbol('symlink') -const HARDLINK = Symbol('hardlink') -const HEADER = Symbol('header') -const READ = Symbol('read') -const LSTAT = Symbol('lstat') -const ONLSTAT = Symbol('onlstat') -const ONREAD = Symbol('onread') -const ONREADLINK = Symbol('onreadlink') -const OPENFILE = Symbol('openfile') -const ONOPENFILE = Symbol('onopenfile') -const CLOSE = Symbol('close') -const MODE = Symbol('mode') -const AWAITDRAIN = Symbol('awaitDrain') -const ONDRAIN = Symbol('ondrain') -const PREFIX = Symbol('prefix') -const HAD_ERROR = Symbol('hadError') -const warner = require('./warn-mixin.js') -const winchars = require('./winchars.js') -const stripAbsolutePath = require('./strip-absolute-path.js') - -const modeFix = require('./mode-fix.js') - -const WriteEntry = warner(class WriteEntry extends Minipass { - constructor (p, opt) { - opt = opt || {} - super(opt) - if (typeof p !== 'string') { - throw new TypeError('path is required') - } - this.path = normPath(p) - // suppress atime, ctime, uid, gid, uname, gname - this.portable = !!opt.portable - // until node has builtin pwnam functions, this'll have to do - this.myuid = process.getuid && process.getuid() || 0 - this.myuser = process.env.USER || '' - this.maxReadSize = opt.maxReadSize || maxReadSize - this.linkCache = opt.linkCache || new Map() - this.statCache = opt.statCache || new Map() - this.preservePaths = !!opt.preservePaths - this.cwd = normPath(opt.cwd || process.cwd()) - this.strict = !!opt.strict - this.noPax = !!opt.noPax - this.noMtime = !!opt.noMtime - this.mtime = opt.mtime || null - this.prefix = opt.prefix ? normPath(opt.prefix) : null - - this.fd = null - this.blockLen = null - this.blockRemain = null - this.buf = null - this.offset = null - this.length = null - this.pos = null - this.remain = null - - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - - let pathWarn = false - if (!this.preservePaths) { - const [root, stripped] = stripAbsolutePath(this.path) - if (root) { - this.path = stripped - pathWarn = root - } - } - - this.win32 = !!opt.win32 || process.platform === 'win32' - if (this.win32) { - // force the \ to / normalization, since we might not *actually* - // be on windows, but want \ to be considered a path separator. - this.path = winchars.decode(this.path.replace(/\\/g, '/')) - p = p.replace(/\\/g, '/') - } - - this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p)) - - if (this.path === '') { - this.path = './' - } - - if (pathWarn) { - this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { - entry: this, - path: pathWarn + this.path, - }) - } - - if (this.statCache.has(this.absolute)) { - this[ONLSTAT](this.statCache.get(this.absolute)) - } else { - this[LSTAT]() - } - } - - emit (ev, ...data) { - if (ev === 'error') { - this[HAD_ERROR] = true - } - return super.emit(ev, ...data) - } - - [LSTAT] () { - fs.lstat(this.absolute, (er, stat) => { - if (er) { - return this.emit('error', er) - } - this[ONLSTAT](stat) - }) - } - - [ONLSTAT] (stat) { - this.statCache.set(this.absolute, stat) - this.stat = stat - if (!stat.isFile()) { - stat.size = 0 - } - this.type = getType(stat) - this.emit('stat', stat) - this[PROCESS]() - } - - [PROCESS] () { - switch (this.type) { - case 'File': return this[FILE]() - case 'Directory': return this[DIRECTORY]() - case 'SymbolicLink': return this[SYMLINK]() - // unsupported types are ignored. - default: return this.end() - } - } - - [MODE] (mode) { - return modeFix(mode, this.type === 'Directory', this.portable) - } - - [PREFIX] (path) { - return prefixPath(path, this.prefix) - } - - [HEADER] () { - if (this.type === 'Directory' && this.portable) { - this.noMtime = true - } - - this.header = new Header({ - path: this[PREFIX](this.path), - // only apply the prefix to hard links. - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - // only the permissions and setuid/setgid/sticky bitflags - // not the higher-order bits that specify file type - mode: this[MODE](this.stat.mode), - uid: this.portable ? null : this.stat.uid, - gid: this.portable ? null : this.stat.gid, - size: this.stat.size, - mtime: this.noMtime ? null : this.mtime || this.stat.mtime, - type: this.type, - uname: this.portable ? null : - this.stat.uid === this.myuid ? this.myuser : '', - atime: this.portable ? null : this.stat.atime, - ctime: this.portable ? null : this.stat.ctime, - }) - - if (this.header.encode() && !this.noPax) { - super.write(new Pax({ - atime: this.portable ? null : this.header.atime, - ctime: this.portable ? null : this.header.ctime, - gid: this.portable ? null : this.header.gid, - mtime: this.noMtime ? null : this.mtime || this.header.mtime, - path: this[PREFIX](this.path), - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - size: this.header.size, - uid: this.portable ? null : this.header.uid, - uname: this.portable ? null : this.header.uname, - dev: this.portable ? null : this.stat.dev, - ino: this.portable ? null : this.stat.ino, - nlink: this.portable ? null : this.stat.nlink, - }).encode()) - } - super.write(this.header.block) - } - - [DIRECTORY] () { - if (this.path.slice(-1) !== '/') { - this.path += '/' - } - this.stat.size = 0 - this[HEADER]() - this.end() - } - - [SYMLINK] () { - fs.readlink(this.absolute, (er, linkpath) => { - if (er) { - return this.emit('error', er) - } - this[ONREADLINK](linkpath) - }) - } - - [ONREADLINK] (linkpath) { - this.linkpath = normPath(linkpath) - this[HEADER]() - this.end() - } - - [HARDLINK] (linkpath) { - this.type = 'Link' - this.linkpath = normPath(path.relative(this.cwd, linkpath)) - this.stat.size = 0 - this[HEADER]() - this.end() - } - - [FILE] () { - if (this.stat.nlink > 1) { - const linkKey = this.stat.dev + ':' + this.stat.ino - if (this.linkCache.has(linkKey)) { - const linkpath = this.linkCache.get(linkKey) - if (linkpath.indexOf(this.cwd) === 0) { - return this[HARDLINK](linkpath) - } - } - this.linkCache.set(linkKey, this.absolute) - } - - this[HEADER]() - if (this.stat.size === 0) { - return this.end() - } - - this[OPENFILE]() - } - - [OPENFILE] () { - fs.open(this.absolute, 'r', (er, fd) => { - if (er) { - return this.emit('error', er) - } - this[ONOPENFILE](fd) - }) - } - - [ONOPENFILE] (fd) { - this.fd = fd - if (this[HAD_ERROR]) { - return this[CLOSE]() - } - - this.blockLen = 512 * Math.ceil(this.stat.size / 512) - this.blockRemain = this.blockLen - const bufLen = Math.min(this.blockLen, this.maxReadSize) - this.buf = Buffer.allocUnsafe(bufLen) - this.offset = 0 - this.pos = 0 - this.remain = this.stat.size - this.length = this.buf.length - this[READ]() - } - - [READ] () { - const { fd, buf, offset, length, pos } = this - fs.read(fd, buf, offset, length, pos, (er, bytesRead) => { - if (er) { - // ignoring the error from close(2) is a bad practice, but at - // this point we already have an error, don't need another one - return this[CLOSE](() => this.emit('error', er)) - } - this[ONREAD](bytesRead) - }) - } - - [CLOSE] (cb) { - fs.close(this.fd, cb) - } - - [ONREAD] (bytesRead) { - if (bytesRead <= 0 && this.remain > 0) { - const er = new Error('encountered unexpected EOF') - er.path = this.absolute - er.syscall = 'read' - er.code = 'EOF' - return this[CLOSE](() => this.emit('error', er)) - } - - if (bytesRead > this.remain) { - const er = new Error('did not encounter expected EOF') - er.path = this.absolute - er.syscall = 'read' - er.code = 'EOF' - return this[CLOSE](() => this.emit('error', er)) - } - - // null out the rest of the buffer, if we could fit the block padding - // at the end of this loop, we've incremented bytesRead and this.remain - // to be incremented up to the blockRemain level, as if we had expected - // to get a null-padded file, and read it until the end. then we will - // decrement both remain and blockRemain by bytesRead, and know that we - // reached the expected EOF, without any null buffer to append. - if (bytesRead === this.remain) { - for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) { - this.buf[i + this.offset] = 0 - bytesRead++ - this.remain++ - } - } - - const writeBuf = this.offset === 0 && bytesRead === this.buf.length ? - this.buf : this.buf.slice(this.offset, this.offset + bytesRead) - - const flushed = this.write(writeBuf) - if (!flushed) { - this[AWAITDRAIN](() => this[ONDRAIN]()) - } else { - this[ONDRAIN]() - } - } - - [AWAITDRAIN] (cb) { - this.once('drain', cb) - } - - write (writeBuf) { - if (this.blockRemain < writeBuf.length) { - const er = new Error('writing more data than expected') - er.path = this.absolute - return this.emit('error', er) - } - this.remain -= writeBuf.length - this.blockRemain -= writeBuf.length - this.pos += writeBuf.length - this.offset += writeBuf.length - return super.write(writeBuf) - } - - [ONDRAIN] () { - if (!this.remain) { - if (this.blockRemain) { - super.write(Buffer.alloc(this.blockRemain)) - } - return this[CLOSE](er => er ? this.emit('error', er) : this.end()) - } - - if (this.offset >= this.length) { - // if we only have a smaller bit left to read, alloc a smaller buffer - // otherwise, keep it the same length it was before. - this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length)) - this.offset = 0 - } - this.length = this.buf.length - this.offset - this[READ]() - } -}) - -class WriteEntrySync extends WriteEntry { - [LSTAT] () { - this[ONLSTAT](fs.lstatSync(this.absolute)) - } - - [SYMLINK] () { - this[ONREADLINK](fs.readlinkSync(this.absolute)) - } - - [OPENFILE] () { - this[ONOPENFILE](fs.openSync(this.absolute, 'r')) - } - - [READ] () { - let threw = true - try { - const { fd, buf, offset, length, pos } = this - const bytesRead = fs.readSync(fd, buf, offset, length, pos) - this[ONREAD](bytesRead) - threw = false - } finally { - // ignoring the error from close(2) is a bad practice, but at - // this point we already have an error, don't need another one - if (threw) { - try { - this[CLOSE](() => {}) - } catch (er) {} - } - } - } - - [AWAITDRAIN] (cb) { - cb() - } - - [CLOSE] (cb) { - fs.closeSync(this.fd) - cb() - } -} - -const WriteEntryTar = warner(class WriteEntryTar extends Minipass { - constructor (readEntry, opt) { - opt = opt || {} - super(opt) - this.preservePaths = !!opt.preservePaths - this.portable = !!opt.portable - this.strict = !!opt.strict - this.noPax = !!opt.noPax - this.noMtime = !!opt.noMtime - - this.readEntry = readEntry - this.type = readEntry.type - if (this.type === 'Directory' && this.portable) { - this.noMtime = true - } - - this.prefix = opt.prefix || null - - this.path = normPath(readEntry.path) - this.mode = this[MODE](readEntry.mode) - this.uid = this.portable ? null : readEntry.uid - this.gid = this.portable ? null : readEntry.gid - this.uname = this.portable ? null : readEntry.uname - this.gname = this.portable ? null : readEntry.gname - this.size = readEntry.size - this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime - this.atime = this.portable ? null : readEntry.atime - this.ctime = this.portable ? null : readEntry.ctime - this.linkpath = normPath(readEntry.linkpath) - - if (typeof opt.onwarn === 'function') { - this.on('warn', opt.onwarn) - } - - let pathWarn = false - if (!this.preservePaths) { - const [root, stripped] = stripAbsolutePath(this.path) - if (root) { - this.path = stripped - pathWarn = root - } - } - - this.remain = readEntry.size - this.blockRemain = readEntry.startBlockSize - - this.header = new Header({ - path: this[PREFIX](this.path), - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - // only the permissions and setuid/setgid/sticky bitflags - // not the higher-order bits that specify file type - mode: this.mode, - uid: this.portable ? null : this.uid, - gid: this.portable ? null : this.gid, - size: this.size, - mtime: this.noMtime ? null : this.mtime, - type: this.type, - uname: this.portable ? null : this.uname, - atime: this.portable ? null : this.atime, - ctime: this.portable ? null : this.ctime, - }) - - if (pathWarn) { - this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { - entry: this, - path: pathWarn + this.path, - }) - } - - if (this.header.encode() && !this.noPax) { - super.write(new Pax({ - atime: this.portable ? null : this.atime, - ctime: this.portable ? null : this.ctime, - gid: this.portable ? null : this.gid, - mtime: this.noMtime ? null : this.mtime, - path: this[PREFIX](this.path), - linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) - : this.linkpath, - size: this.size, - uid: this.portable ? null : this.uid, - uname: this.portable ? null : this.uname, - dev: this.portable ? null : this.readEntry.dev, - ino: this.portable ? null : this.readEntry.ino, - nlink: this.portable ? null : this.readEntry.nlink, - }).encode()) - } - - super.write(this.header.block) - readEntry.pipe(this) - } - - [PREFIX] (path) { - return prefixPath(path, this.prefix) - } - - [MODE] (mode) { - return modeFix(mode, this.type === 'Directory', this.portable) - } - - write (data) { - const writeLen = data.length - if (writeLen > this.blockRemain) { - throw new Error('writing more to entry than is appropriate') - } - this.blockRemain -= writeLen - return super.write(data) - } - - end () { - if (this.blockRemain) { - super.write(Buffer.alloc(this.blockRemain)) - } - return super.end() - } -}) - -WriteEntry.Sync = WriteEntrySync -WriteEntry.Tar = WriteEntryTar - -const getType = stat => - stat.isFile() ? 'File' - : stat.isDirectory() ? 'Directory' - : stat.isSymbolicLink() ? 'SymbolicLink' - : 'Unsupported' - -module.exports = WriteEntry diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json deleted file mode 100644 index f84a41cc..00000000 --- a/node_modules/tar/package.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "author": "GitHub Inc.", - "name": "tar", - "description": "tar for node", - "version": "6.2.1", - "repository": { - "type": "git", - "url": "https://github.com/isaacs/node-tar.git" - }, - "scripts": { - "genparse": "node scripts/generate-parse-fixtures.js", - "snap": "tap", - "test": "tap" - }, - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.0", - "chmodr": "^1.2.0", - "end-of-stream": "^1.4.3", - "events-to-array": "^2.0.3", - "mutate-fs": "^2.1.1", - "nock": "^13.2.9", - "rimraf": "^3.0.2", - "tap": "^16.0.1" - }, - "license": "ISC", - "engines": { - "node": ">=10" - }, - "files": [ - "bin/", - "lib/", - "index.js" - ], - "tap": { - "coverage-map": "map.js", - "timeout": 0, - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0", - "content": "scripts/template-oss", - "engines": ">=10", - "distPaths": [ - "index.js" - ], - "allowPaths": [ - "/index.js" - ], - "ciVersions": [ - "10.x", - "12.x", - "14.x", - "16.x", - "18.x" - ] - } -} diff --git a/package-lock.json b/package-lock.json index 3f928ea4..fc7c76a2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,12 +12,14 @@ "@adminjs/fastify": "^3.0.1", "@adminjs/mongoose": "^3.0.1", "@fastify/env": "^3.0.0", + "@fastify/formbody": "^8.0.2", "@fastify/jwt": "^6.5.0", + "@fastify/multipart": "^9.0.3", "@fastify/view": "^7.1.2", "@google-cloud/storage": "^6.10.1", "adminjs": "^6.7.2", "axios": "^1.7.2", - "bcrypt": "^5.1.1", + "bcryptjs": "^3.0.2", "body-parser": "^1.19.0", "boom": "^7.3.0", "chalk": "^4.1.0", @@ -35,12 +37,12 @@ "fastify-file-upload": "^4.0.0", "fastify-formbody": "^5.3.0", "fastify-jwt": "^4.2.0", - "fastify-multer": "^2.0.2", + "fastify-multer": "^2.0.3", "fastify-multipart": "^5.4.0", "fastify-static": "^4.7.0", "fastify-swagger": "^5.2.0", "file-type": "^18.5.0", - "firebase-admin": "^12.3.1", + "firebase-admin": "^13.0.2", "form-data": "^2.3.3", "formidable": "^2.1.1", "fs": "^0.0.1-security", @@ -48,6 +50,7 @@ "gridfile": "^1.1.3", "image-type": "^5.2.0", "libphonenumber-js": "^1.9.6", + "mime-types": "^3.0.1", "moment": "^2.29.4", "moment-timezone": "^0.5.43", "mongodb": "^5.0.1", @@ -55,7 +58,7 @@ "mqtt": "^5.10.1", "multer": "^1.4.5-lts.1", "mv": "^2.1.1", - "node-cron": "^3.0.2", + "node-cron": "^3.0.3", "node-schedule": "^2.1.1", "nodemon": "^2.0.20", "nunjucks": "^3.2.3", @@ -149,6 +152,31 @@ "fast-uri": "^2.0.0" } }, + "node_modules/@adminjs/fastify/node_modules/@fastify/formbody": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@fastify/formbody/-/formbody-7.4.0.tgz", + "integrity": "sha512-H3C6h1GN56/SMrZS8N2vCT2cZr7mIHzBHzOBa5OPpjfB/D6FzP9mMpE02ZzrFX0ANeh0BAJdoXKOF2e7IbV+Og==", + "dependencies": { + "fast-querystring": "^1.0.0", + "fastify-plugin": "^4.0.0" + } + }, + "node_modules/@adminjs/fastify/node_modules/@fastify/multipart": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-7.7.3.tgz", + "integrity": "sha512-MG4Gd9FNEXc8qx0OgqoXM10EGO/dN/0iVQ8SrpFMU3d6F6KUfcqD2ZyoQhkm9LWrbiMgdHv5a43x78lASdn5GA==", + "dependencies": { + "@fastify/busboy": "^1.0.0", + "@fastify/deepmerge": "^1.0.0", + "@fastify/error": "^3.0.0", + "@fastify/swagger": "^8.3.1", + "@fastify/swagger-ui": "^1.8.0", + "end-of-stream": "^1.4.4", + "fastify-plugin": "^4.0.0", + "secure-json-parse": "^2.4.0", + "stream-wormhole": "^1.1.0" + } + }, "node_modules/@adminjs/fastify/node_modules/avvio": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/avvio/-/avvio-8.2.0.tgz", @@ -215,6 +243,17 @@ "node": ">=10" } }, + "node_modules/@adminjs/fastify/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/@adminjs/fastify/node_modules/pino": { "version": "8.8.0", "resolved": "https://registry.npmjs.org/pino/-/pino-8.8.0.tgz", @@ -2235,6 +2274,14 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.47.tgz", "integrity": "sha512-OuJi8bIng4wYHHA3YpKauL58dZrPxro3d0tabPHyiNF8rKfGKuVfr83oFlPLmKri1cX+Z3cJP39GXmnqkP11Gw==" }, + "node_modules/@fastify/accept-negotiator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@fastify/accept-negotiator/-/accept-negotiator-1.1.0.tgz", + "integrity": "sha512-OIHZrb2ImZ7XG85HXOONLcJWGosv7sIvM2ifAPQVhg9Lv7qdmMBNVaai4QTdyuaqbKM5eO6sLSQOYI7wEQeCJQ==", + "engines": { + "node": ">=14" + } + }, "node_modules/@fastify/ajv-compiler": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@fastify/ajv-compiler/-/ajv-compiler-1.1.0.tgz", @@ -2329,14 +2376,29 @@ } }, "node_modules/@fastify/formbody": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@fastify/formbody/-/formbody-7.4.0.tgz", - "integrity": "sha512-H3C6h1GN56/SMrZS8N2vCT2cZr7mIHzBHzOBa5OPpjfB/D6FzP9mMpE02ZzrFX0ANeh0BAJdoXKOF2e7IbV+Og==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@fastify/formbody/-/formbody-8.0.2.tgz", + "integrity": "sha512-84v5J2KrkXzjgBpYnaNRPqwgMsmY7ZDjuj0YVuMR3NXCJRCgKEZy/taSP1wUYGn0onfxJpLyRGDLa+NMaDJtnA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], "dependencies": { - "fast-querystring": "^1.0.0", - "fastify-plugin": "^4.0.0" + "fast-querystring": "^1.1.2", + "fastify-plugin": "^5.0.0" } }, + "node_modules/@fastify/formbody/node_modules/fastify-plugin": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-5.0.1.tgz", + "integrity": "sha512-HCxs+YnRaWzCl+cWRYFnHmeRFyR5GVnJTAaCJQiYzQSDwK9MgJdyAsuL3nh0EWRCYMgQ5MeziymvmAhUHYHDUQ==" + }, "node_modules/@fastify/jwt": { "version": "6.5.0", "resolved": "https://registry.npmjs.org/@fastify/jwt/-/jwt-6.5.0.tgz", @@ -2350,18 +2412,103 @@ } }, "node_modules/@fastify/multipart": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-7.3.0.tgz", - "integrity": "sha512-tbzQiRFxoADCn0G10CqiQ/nDWWcfegtwg826Pfz2h7+XvuqJhGnko0TbafrWIY7hnGD+sNCGMdiTVsxxs6zigA==", + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-9.0.3.tgz", + "integrity": "sha512-pJogxQCrT12/6I5Fh6jr3narwcymA0pv4B0jbC7c6Bl9wnrxomEUnV0d26w6gUls7gSXmhG8JGRMmHFIPsxt1g==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], "dependencies": { - "@fastify/busboy": "^1.0.0", - "@fastify/deepmerge": "^1.0.0", - "@fastify/error": "^3.0.0", - "end-of-stream": "^1.4.4", - "fastify-plugin": "^4.0.0", - "hexoid": "^1.0.0", - "secure-json-parse": "^2.4.0", - "stream-wormhole": "^1.1.0" + "@fastify/busboy": "^3.0.0", + "@fastify/deepmerge": "^2.0.0", + "@fastify/error": "^4.0.0", + "fastify-plugin": "^5.0.0", + "secure-json-parse": "^3.0.0" + } + }, + "node_modules/@fastify/multipart/node_modules/@fastify/busboy": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.2.0.tgz", + "integrity": "sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==" + }, + "node_modules/@fastify/multipart/node_modules/@fastify/deepmerge": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@fastify/deepmerge/-/deepmerge-2.0.2.tgz", + "integrity": "sha512-3wuLdX5iiiYeZWP6bQrjqhrcvBIf0NHbQH1Ur1WbHvoiuTYUEItgygea3zs8aHpiitn0lOB8gX20u1qO+FDm7Q==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/@fastify/multipart/node_modules/@fastify/error": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.2.0.tgz", + "integrity": "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/@fastify/multipart/node_modules/fastify-plugin": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-5.0.1.tgz", + "integrity": "sha512-HCxs+YnRaWzCl+cWRYFnHmeRFyR5GVnJTAaCJQiYzQSDwK9MgJdyAsuL3nh0EWRCYMgQ5MeziymvmAhUHYHDUQ==" + }, + "node_modules/@fastify/multipart/node_modules/secure-json-parse": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-3.0.2.tgz", + "integrity": "sha512-H6nS2o8bWfpFEV6U38sOSjS7bTbdgbCGU9wEM6W14P5H0QOsz94KCusifV44GpHDTu2nqZbuDNhTzu+mjDSw1w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/@fastify/send": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@fastify/send/-/send-2.1.0.tgz", + "integrity": "sha512-yNYiY6sDkexoJR0D8IDy3aRP3+L4wdqCpvx5WP+VtEU58sn7USmKynBzDQex5X42Zzvw2gNzzYgP90UfWShLFA==", + "dependencies": { + "@lukeed/ms": "^2.0.1", + "escape-html": "~1.0.3", + "fast-decode-uri-component": "^1.0.1", + "http-errors": "2.0.0", + "mime": "^3.0.0" + } + }, + "node_modules/@fastify/send/node_modules/mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=10.0.0" } }, "node_modules/@fastify/session": { @@ -2380,6 +2527,129 @@ "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-3.0.1.tgz", "integrity": "sha512-qKcDXmuZadJqdTm6vlCqioEbyewF60b/0LOFCcYN1B6BIZGlYJumWWOYs70SFYLDAH4YqdE1cxH/RKMG7rFxgA==" }, + "node_modules/@fastify/static": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@fastify/static/-/static-6.12.0.tgz", + "integrity": "sha512-KK1B84E6QD/FcQWxDI2aiUCwHxMJBI1KeCUzm1BwYpPY1b742+jeKruGHP2uOluuM6OkBPI8CIANrXcCRtC2oQ==", + "dependencies": { + "@fastify/accept-negotiator": "^1.0.0", + "@fastify/send": "^2.0.0", + "content-disposition": "^0.5.3", + "fastify-plugin": "^4.0.0", + "glob": "^8.0.1", + "p-limit": "^3.1.0" + } + }, + "node_modules/@fastify/static/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@fastify/static/node_modules/glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@fastify/static/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@fastify/swagger": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/@fastify/swagger/-/swagger-8.15.0.tgz", + "integrity": "sha512-zy+HEEKFqPMS2sFUsQU5X0MHplhKJvWeohBwTCkBAJA/GDYGLGUWQaETEhptiqxK7Hs0fQB9B4MDb3pbwIiCwA==", + "dependencies": { + "fastify-plugin": "^4.0.0", + "json-schema-resolver": "^2.0.0", + "openapi-types": "^12.0.0", + "rfdc": "^1.3.0", + "yaml": "^2.2.2" + } + }, + "node_modules/@fastify/swagger-ui": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/@fastify/swagger-ui/-/swagger-ui-1.10.2.tgz", + "integrity": "sha512-f2mRqtblm6eRAFQ3e8zSngxVNEtiYY7rISKQVjPA++ZsWc5WYlPVTb6Bx0G/zy0BIoucNqDr/Q2Vb/kTYkOq1A==", + "dependencies": { + "@fastify/static": "^6.0.0", + "fastify-plugin": "^4.0.0", + "openapi-types": "^12.0.2", + "rfdc": "^1.3.0", + "yaml": "^2.2.2" + } + }, + "node_modules/@fastify/swagger-ui/node_modules/openapi-types": { + "version": "12.1.3", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", + "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==" + }, + "node_modules/@fastify/swagger-ui/node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, + "node_modules/@fastify/swagger/node_modules/json-schema-resolver": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/json-schema-resolver/-/json-schema-resolver-2.0.0.tgz", + "integrity": "sha512-pJ4XLQP4Q9HTxl6RVDLJ8Cyh1uitSs0CzDBAz1uoJ4sRD/Bk7cFSXL1FUXDW3zJ7YnfliJx6eu8Jn283bpZ4Yg==", + "dependencies": { + "debug": "^4.1.1", + "rfdc": "^1.1.4", + "uri-js": "^4.2.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/Eomm/json-schema-resolver?sponsor=1" + } + }, + "node_modules/@fastify/swagger/node_modules/openapi-types": { + "version": "12.1.3", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", + "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==" + }, + "node_modules/@fastify/swagger/node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, "node_modules/@fastify/view": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/@fastify/view/-/view-7.3.0.tgz", @@ -2390,79 +2660,94 @@ } }, "node_modules/@firebase/app-check-interop-types": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@firebase/app-check-interop-types/-/app-check-interop-types-0.3.2.tgz", - "integrity": "sha512-LMs47Vinv2HBMZi49C09dJxp0QT5LwDzFaVGf/+ITHe3BlIhUiLNttkATSXplc89A2lAaeTqjgqVkiRfUGyQiQ==" + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@firebase/app-check-interop-types/-/app-check-interop-types-0.3.3.tgz", + "integrity": "sha512-gAlxfPLT2j8bTI/qfe3ahl2I2YcBQ8cFIBdhAQA4I2f3TndcO+22YizyGYuttLHPQEpWkhmpFW60VCFEPg4g5A==" }, "node_modules/@firebase/app-types": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/@firebase/app-types/-/app-types-0.9.2.tgz", - "integrity": "sha512-oMEZ1TDlBz479lmABwWsWjzHwheQKiAgnuKxE0pz0IXCVx7/rtlkx1fQ6GfgK24WCrxDKMplZrT50Kh04iMbXQ==" + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/@firebase/app-types/-/app-types-0.9.3.tgz", + "integrity": "sha512-kRVpIl4vVGJ4baogMDINbyrIOtOxqhkZQg4jTq3l8Lw6WSk0xfpEYzezFu+Kl4ve4fbPl79dvwRtaFqAC/ucCw==" }, "node_modules/@firebase/auth-interop-types": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@firebase/auth-interop-types/-/auth-interop-types-0.2.3.tgz", - "integrity": "sha512-Fc9wuJGgxoxQeavybiuwgyi+0rssr76b+nHpj+eGhXFYAdudMWyfBHvFL/I5fEHniUM/UQdFzi9VXJK2iZF7FQ==" + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@firebase/auth-interop-types/-/auth-interop-types-0.2.4.tgz", + "integrity": "sha512-JPgcXKCuO+CWqGDnigBtvo09HeBs5u/Ktc2GaFj2m01hLarbxthLNm7Fk8iOP1aqAtXV+fnnGj7U28xmk7IwVA==" }, "node_modules/@firebase/component": { - "version": "0.6.8", - "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.6.8.tgz", - "integrity": "sha512-LcNvxGLLGjBwB0dJUsBGCej2fqAepWyBubs4jt1Tiuns7QLbXHuyObZ4aMeBjZjWx4m8g1LoVI9QFpSaq/k4/g==", + "version": "0.6.11", + "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.6.11.tgz", + "integrity": "sha512-eQbeCgPukLgsKD0Kw5wQgsMDX5LeoI1MIrziNDjmc6XDq5ZQnuUymANQgAb2wp1tSF9zDSXyxJmIUXaKgN58Ug==", "dependencies": { - "@firebase/util": "1.9.7", + "@firebase/util": "1.10.2", "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@firebase/database": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@firebase/database/-/database-1.0.7.tgz", - "integrity": "sha512-wjXr5AO8RPxVVg7rRCYffT7FMtBjHRfJ9KMwi19MbOf0vBf0H9YqW3WCgcnLpXI6ehiUcU3z3qgPnnU0nK6SnA==", - "dependencies": { - "@firebase/app-check-interop-types": "0.3.2", - "@firebase/auth-interop-types": "0.2.3", - "@firebase/component": "0.6.8", - "@firebase/logger": "0.4.2", - "@firebase/util": "1.9.7", + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/@firebase/database/-/database-1.0.10.tgz", + "integrity": "sha512-sWp2g92u7xT4BojGbTXZ80iaSIaL6GAL0pwvM0CO/hb0nHSnABAqsH7AhnWGsGvXuEvbPr7blZylPaR9J+GSuQ==", + "dependencies": { + "@firebase/app-check-interop-types": "0.3.3", + "@firebase/auth-interop-types": "0.2.4", + "@firebase/component": "0.6.11", + "@firebase/logger": "0.4.4", + "@firebase/util": "1.10.2", "faye-websocket": "0.11.4", "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@firebase/database-compat": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-1.0.7.tgz", - "integrity": "sha512-R/3B+VVzEFN5YcHmfWns3eitA8fHLTL03io+FIoMcTYkajFnrBdS3A+g/KceN9omP7FYYYGTQWF9lvbEx6eMEg==", - "dependencies": { - "@firebase/component": "0.6.8", - "@firebase/database": "1.0.7", - "@firebase/database-types": "1.0.4", - "@firebase/logger": "0.4.2", - "@firebase/util": "1.9.7", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-2.0.1.tgz", + "integrity": "sha512-IsFivOjdE1GrjTeKoBU/ZMenESKDXidFDzZzHBPQ/4P20ptGdrl3oLlWrV/QJqJ9lND4IidE3z4Xr5JyfUW1vg==", + "dependencies": { + "@firebase/component": "0.6.11", + "@firebase/database": "1.0.10", + "@firebase/database-types": "1.0.7", + "@firebase/logger": "0.4.4", + "@firebase/util": "1.10.2", "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@firebase/database-types": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-1.0.4.tgz", - "integrity": "sha512-mz9ZzbH6euFXbcBo+enuJ36I5dR5w+enJHHjy9Y5ThCdKUseqfDjW3vCp1YxE9zygFCSjJJ/z1cQ+zodvUcwPQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-1.0.7.tgz", + "integrity": "sha512-I7zcLfJXrM0WM+ksFmFdAMdlq/DFmpeMNa+/GNsLyFo5u/lX5zzkPzGe3srVWqaBQBY5KprylDGxOsP6ETfL0A==", "dependencies": { - "@firebase/app-types": "0.9.2", - "@firebase/util": "1.9.7" + "@firebase/app-types": "0.9.3", + "@firebase/util": "1.10.2" } }, "node_modules/@firebase/logger": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.4.2.tgz", - "integrity": "sha512-Q1VuA5M1Gjqrwom6I6NUU4lQXdo9IAQieXlujeHZWvRt1b7qQ0KwBaNAjgxG27jgF9/mUwsNmO8ptBCGVYhB0A==", + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.4.4.tgz", + "integrity": "sha512-mH0PEh1zoXGnaR8gD1DeGeNZtWFKbnz9hDO91dIml3iou1gpOnLqXQ2dJfB71dj6dpmUjcQ6phY3ZZJbjErr9g==", "dependencies": { "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@firebase/util": { - "version": "1.9.7", - "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.9.7.tgz", - "integrity": "sha512-fBVNH/8bRbYjqlbIhZ+lBtdAAS4WqZumx03K06/u7fJSpz1TGjEMm1ImvKD47w+xaFKIP2ori6z8BrbakRfjJA==", + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.10.2.tgz", + "integrity": "sha512-qnSHIoE9FK+HYnNhTI8q14evyqbc/vHRivfB4TgCIUOl4tosmKSQlp7ltymOlMP4xVIJTg5wrkfcZ60X4nUf7Q==", "dependencies": { "tslib": "^2.1.0" + }, + "engines": { + "node": ">=18.0.0" } }, "node_modules/@floating-ui/core": { @@ -2479,11 +2764,12 @@ } }, "node_modules/@google-cloud/firestore": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.9.0.tgz", - "integrity": "sha512-c4ALHT3G08rV7Zwv8Z2KG63gZh66iKdhCBeDfCpIkLrjX6EAjTD/szMdj14M+FnQuClZLFfW5bAgoOjfNmLtJg==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.0.tgz", + "integrity": "sha512-88uZ+jLsp1aVMj7gh3EKYH1aulTAMFAp8sH/v5a9w8q8iqSG27RiWLoxSAFr/XocZ9hGiWH1kEnBw+zl3xAgNA==", "optional": true, "dependencies": { + "@opentelemetry/api": "^1.3.0", "fast-deep-equal": "^3.1.1", "functional-red-black-tree": "^1.0.1", "google-gax": "^4.3.3", @@ -2559,10 +2845,21 @@ "node": ">=10.0.0" } }, + "node_modules/@google-cloud/storage/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/@grpc/grpc-js": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.11.1.tgz", - "integrity": "sha512-gyt/WayZrVPH2w/UTLansS7F9Nwld472JxxaETamrM8HNlsa+jSLNyKAZmhxI2Me4c3mQHFiS1wWHDY1g1Kthw==", + "version": "1.12.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz", + "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==", "optional": true, "dependencies": { "@grpc/proto-loader": "^0.7.13", @@ -2703,56 +3000,13 @@ "node": ">=8" } }, - "node_modules/@mapbox/node-pre-gyp": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", - "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", - "dependencies": { - "detect-libc": "^2.0.0", - "https-proxy-agent": "^5.0.0", - "make-dir": "^3.1.0", - "node-fetch": "^2.6.7", - "nopt": "^5.0.0", - "npmlog": "^5.0.1", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.11" - }, - "bin": { - "node-pre-gyp": "bin/node-pre-gyp" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", - "bin": { - "semver": "bin/semver.js" - }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "optional": true, "engines": { - "node": ">=10" + "node": ">=8.0.0" } }, "node_modules/@popperjs/core": { @@ -3664,11 +3918,11 @@ } }, "node_modules/@types/node": { - "version": "22.5.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.0.tgz", - "integrity": "sha512-DkFrJOe+rfdHTqqMg0bSNlGlQ85hSoh2TPzZyhHsXnMtligRWpxUySiyw8FY14ITt24HVCiQPWxS3KO/QlGmWg==", + "version": "22.10.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.6.tgz", + "integrity": "sha512-qNiuwC4ZDAUNcY47xgaSuS92cjf8JbSUoaKS77bmLG1rU7MlATVSiw/IlrjtIyyskXBZ8KkNfjK/P5na7rgXbQ==", "dependencies": { - "undici-types": "~6.19.2" + "undici-types": "~6.20.0" } }, "node_modules/@types/parse-json": { @@ -3925,6 +4179,17 @@ "node": ">= 6" } }, + "node_modules/adminjs/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", @@ -4011,11 +4276,6 @@ "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz", "integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==" }, - "node_modules/aproba": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" - }, "node_modules/archiver": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/archiver/-/archiver-5.3.1.tgz", @@ -4090,19 +4350,6 @@ "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==" }, - "node_modules/are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "deprecated": "This package is no longer supported.", - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -4235,6 +4482,17 @@ "node": ">= 6" } }, + "node_modules/axios/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/babel-plugin-macros": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz", @@ -4328,19 +4586,6 @@ } ] }, - "node_modules/bcrypt": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz", - "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==", - "hasInstallScript": true, - "dependencies": { - "@mapbox/node-pre-gyp": "^1.0.11", - "node-addon-api": "^5.0.0" - }, - "engines": { - "node": ">= 10.0.0" - } - }, "node_modules/bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", @@ -4349,6 +4594,14 @@ "tweetnacl": "^0.14.3" } }, + "node_modules/bcryptjs": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-3.0.2.tgz", + "integrity": "sha512-k38b3XOZKv60C4E2hVsXTolJWfkGRMbILBIe2IBITXciy5bOsTKot5kDrf3ZfufQtQOUN5mXceUEpU1rTl9Uog==", + "bin": { + "bcrypt": "bin/bcrypt" + } + }, "node_modules/big-integer": { "version": "1.6.51", "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", @@ -4730,14 +4983,6 @@ "fsevents": "~2.3.2" } }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "engines": { - "node": ">=10" - } - }, "node_modules/classnames": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", @@ -4868,14 +5113,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "node_modules/color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "bin": { - "color-support": "bin.js" - } - }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -4949,11 +5186,6 @@ "typedarray": "^0.0.6" } }, - "node_modules/console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" - }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -5283,11 +5515,6 @@ "node": ">=0.4.0" } }, - "node_modules/delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" - }, "node_modules/denque": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", @@ -5318,14 +5545,6 @@ "npm": "1.2.8000 || >= 1.4.16" } }, - "node_modules/detect-libc": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", - "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", - "engines": { - "node": ">=8" - } - }, "node_modules/dezalgo": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", @@ -5455,7 +5674,8 @@ "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "optional": true }, "node_modules/encodeurl": { "version": "1.0.2", @@ -5767,9 +5987,9 @@ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" }, "node_modules/fast-querystring": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.0.0.tgz", - "integrity": "sha512-3LQi62IhQoDlmt4ULCYmh17vRO2EtS7hTSsG4WwoKWgV7GLMKBOecEh+aiavASnLx8I2y89OD33AGLo0ccRhzA==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.1.2.tgz", + "integrity": "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg==", "dependencies": { "fast-decode-uri-component": "^1.0.1" } @@ -5810,9 +6030,9 @@ "integrity": "sha512-cIusKBIt/R/oI6z/1nyfe2FvGKVTohVRfvkOhvx0nCEW+xf5NoCXjAHcWp93uOUBchzYcsvPlrapAdX1uW+YGg==" }, "node_modules/fast-xml-parser": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", - "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.1.tgz", + "integrity": "sha512-y655CeyUQ+jj7KBbYMc4FG01V8ZQqjN+gDYGJ50RtfsUB8iG9AmwmwoAgeKLJdmueKKMrH1RJ7yXHTSoczdv5w==", "funding": [ { "type": "github", @@ -6103,6 +6323,7 @@ "version": "5.4.0", "resolved": "https://registry.npmjs.org/fastify-multipart/-/fastify-multipart-5.4.0.tgz", "integrity": "sha512-Pafy4mtcuFUnFM/t0kgCdL854KIEoDymNVdv4nD7uBfV7lBCQq/NVEuNnaNXAbuCTpeXzYRzi50lSDa9ZM838A==", + "deprecated": "Please use @fastify/multipart@6.0.0 instead", "dependencies": { "fastify-multipart-deprecated": "npm:fastify-multipart@5.3.1", "process-warning": "^1.0.0" @@ -6342,26 +6563,27 @@ } }, "node_modules/firebase-admin": { - "version": "12.3.1", - "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-12.3.1.tgz", - "integrity": "sha512-vEr3s3esl8nPIA9r/feDT4nzIXCfov1CyyCSpMQWp6x63Q104qke0MEGZlrHUZVROtl8FLus6niP/M9I1s4VBA==", + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-13.0.2.tgz", + "integrity": "sha512-YWVpoN+tZVSRXF0qC0gojoF5bSqvBRbnBk8+xUtFiguM2L4vB7f0moAwV1VVWDDHvTnvQ68OyTMpdp6wKo/clw==", "dependencies": { "@fastify/busboy": "^3.0.0", - "@firebase/database-compat": "^1.0.2", - "@firebase/database-types": "^1.0.0", - "@types/node": "^22.0.1", + "@firebase/database-compat": "^2.0.0", + "@firebase/database-types": "^1.0.6", + "@types/node": "^22.8.7", "farmhash-modern": "^1.1.0", + "google-auth-library": "^9.14.2", "jsonwebtoken": "^9.0.0", "jwks-rsa": "^3.1.0", "node-forge": "^1.3.1", - "uuid": "^10.0.0" + "uuid": "^11.0.2" }, "engines": { - "node": ">=14" + "node": ">=18" }, "optionalDependencies": { - "@google-cloud/firestore": "^7.7.0", - "@google-cloud/storage": "^7.7.0" + "@google-cloud/firestore": "^7.11.0", + "@google-cloud/storage": "^7.14.0" } }, "node_modules/firebase-admin/node_modules/@fastify/busboy": { @@ -6401,9 +6623,9 @@ } }, "node_modules/firebase-admin/node_modules/@google-cloud/storage": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.12.1.tgz", - "integrity": "sha512-Z3ZzOnF3YKLuvpkvF+TjQ6lztxcAyTILp+FjKonmVpEwPa9vFvxpZjubLR4sB6bf19i/8HL2AXRjA0YFgHFRmQ==", + "version": "7.15.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.15.0.tgz", + "integrity": "sha512-/j/+8DFuEOo33fbdX0V5wjooOoFahEaMEdImHBmM2tH9MPHJYNtmXOf2sGUmZmiufSukmBEvdlzYgDkkgeBiVQ==", "optional": true, "dependencies": { "@google-cloud/paginator": "^5.0.0", @@ -6448,7 +6670,6 @@ "version": "6.7.1", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", - "optional": true, "dependencies": { "extend": "^3.0.2", "https-proxy-agent": "^7.0.1", @@ -6461,24 +6682,19 @@ } }, "node_modules/firebase-admin/node_modules/gaxios/node_modules/agent-base": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", - "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", - "optional": true, - "dependencies": { - "debug": "^4.3.4" - }, + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", "engines": { "node": ">= 14" } }, "node_modules/firebase-admin/node_modules/gaxios/node_modules/https-proxy-agent": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", - "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", - "optional": true, + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "dependencies": { - "agent-base": "^7.0.2", + "agent-base": "^7.1.2", "debug": "4" }, "engines": { @@ -6493,7 +6709,6 @@ "https://github.com/sponsors/broofa", "https://github.com/sponsors/ctavan" ], - "optional": true, "bin": { "uuid": "dist/bin/uuid" } @@ -6502,7 +6717,6 @@ "version": "6.1.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", - "optional": true, "dependencies": { "gaxios": "^6.0.0", "json-bigint": "^1.0.0" @@ -6512,10 +6726,9 @@ } }, "node_modules/firebase-admin/node_modules/google-auth-library": { - "version": "9.14.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.14.0.tgz", - "integrity": "sha512-Y/eq+RWVs55Io/anIsm24sDS8X79Tq948zVLGaa7+KlJYYqaGwp1YI37w48nzrNi12RgnzMrQD4NzdmCowT90g==", - "optional": true, + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", "dependencies": { "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", @@ -6532,7 +6745,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", - "optional": true, "dependencies": { "jwa": "^2.0.0", "safe-buffer": "^5.0.1" @@ -6542,7 +6754,6 @@ "version": "7.1.0", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", - "optional": true, "dependencies": { "gaxios": "^6.0.0", "jws": "^4.0.0" @@ -6555,7 +6766,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", - "optional": true, "dependencies": { "jwa": "^2.0.0", "safe-buffer": "^5.0.1" @@ -6600,7 +6810,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", - "optional": true, "dependencies": { "buffer-equal-constant-time": "1.0.1", "ecdsa-sig-formatter": "1.0.11", @@ -6674,15 +6883,15 @@ } }, "node_modules/firebase-admin/node_modules/uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.0.5.tgz", + "integrity": "sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA==", "funding": [ "https://github.com/sponsors/broofa", "https://github.com/sponsors/ctavan" ], "bin": { - "uuid": "dist/bin/uuid" + "uuid": "dist/esm/bin/uuid" } }, "node_modules/flat": { @@ -6741,6 +6950,17 @@ "node": ">= 0.12" } }, + "node_modules/form-data/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/formidable": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/formidable/-/formidable-2.1.1.tgz", @@ -6823,33 +7043,6 @@ "node": ">=6 <7 || >=8" } }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/fs-minipass/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -6948,26 +7141,6 @@ "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", "optional": true }, - "node_modules/gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "deprecated": "This package is no longer supported.", - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/gaxios": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.0.tgz", @@ -7162,9 +7335,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/google-gax": { - "version": "4.3.9", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.3.9.tgz", - "integrity": "sha512-tcjQr7sXVGMdlvcG25wSv98ap1dtF4Z6mcV0rztGIddOcezw4YMb/uTXg72JPrLep+kXcVjaJjg6oo3KLf4itQ==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz", + "integrity": "sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==", "optional": true, "dependencies": { "@grpc/grpc-js": "^1.10.9", @@ -7194,13 +7367,10 @@ } }, "node_modules/google-gax/node_modules/agent-base": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", - "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", "optional": true, - "dependencies": { - "debug": "^4.3.4" - }, "engines": { "node": ">= 14" } @@ -7235,9 +7405,9 @@ } }, "node_modules/google-gax/node_modules/google-auth-library": { - "version": "9.14.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.14.0.tgz", - "integrity": "sha512-Y/eq+RWVs55Io/anIsm24sDS8X79Tq948zVLGaa7+KlJYYqaGwp1YI37w48nzrNi12RgnzMrQD4NzdmCowT90g==", + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", "optional": true, "dependencies": { "base64-js": "^1.3.0", @@ -7291,12 +7461,12 @@ } }, "node_modules/google-gax/node_modules/https-proxy-agent": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", - "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "optional": true, "dependencies": { - "agent-base": "^7.0.2", + "agent-base": "^7.1.2", "debug": "4" }, "engines": { @@ -7546,11 +7716,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" - }, "node_modules/hashlru": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/hashlru/-/hashlru-2.3.0.tgz", @@ -7664,9 +7829,9 @@ } }, "node_modules/http-parser-js": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz", - "integrity": "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==" + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.9.tgz", + "integrity": "sha512-n1XsPy3rXVxlqxVioEWdC+0+M+SQw0DpJynwtOPo1X+ZlvdzTLtDBIJJlDQTnwZIFJrZSzSGmIOUdP8tu+SgLw==" }, "node_modules/http-proxy-agent": { "version": "4.0.1", @@ -7916,6 +8081,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "optional": true, "engines": { "node": ">=8" } @@ -8543,9 +8709,9 @@ } }, "node_modules/long": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", - "integrity": "sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==", + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", + "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==", "optional": true }, "node_modules/long-timeout": { @@ -8695,16 +8861,24 @@ } }, "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", "dependencies": { - "mime-db": "1.52.0" + "mime-db": "^1.54.0" }, "engines": { "node": ">= 0.6" } }, + "node_modules/mime-types/node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", @@ -8737,42 +8911,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "node_modules/mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", @@ -9161,9 +9299,10 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "node_modules/multer": { - "version": "1.4.5-lts.1", - "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.1.tgz", - "integrity": "sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==", + "version": "1.4.5-lts.2", + "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.2.tgz", + "integrity": "sha512-VzGiVigcG9zUAoCNU+xShztrlr1auZOlurXynNvO9GiWD1/mTBbUljOKY+qMeazBqXgRnjzeEgJI/wyjJUHg9A==", + "deprecated": "Multer 1.x is impacted by a number of vulnerabilities, which have been patched in 2.x. You should upgrade to the latest 2.x version.", "dependencies": { "append-field": "^1.0.0", "busboy": "^1.0.0", @@ -9305,15 +9444,10 @@ "node": ">= 0.4.0" } }, - "node_modules/node-addon-api": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", - "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==" - }, "node_modules/node-cron": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.2.tgz", - "integrity": "sha512-iP8l0yGlNpE0e6q1o185yOApANRe47UPbLf4YxfbiNHt/RU5eBcGB/e0oudruheSf+LQeDMezqC5BVAb5wwRcQ==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.3.tgz", + "integrity": "sha512-dOal67//nohNgYWb+nWmg5dkFdIwDm8EpeGYMekPMrngV3637lqnX0lbUcCtgibHTz6SEz7DAIjKvKDFYCnO1A==", "dependencies": { "uuid": "8.3.2" }, @@ -9437,20 +9571,6 @@ "node": ">=4" } }, - "node_modules/nopt": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", - "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "dependencies": { - "abbrev": "1" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -9459,18 +9579,6 @@ "node": ">=0.10.0" } }, - "node_modules/npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "deprecated": "This package is no longer supported.", - "dependencies": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" - } - }, "node_modules/number-allocator": { "version": "1.0.14", "resolved": "https://registry.npmjs.org/number-allocator/-/number-allocator-1.0.14.tgz", @@ -10177,9 +10285,9 @@ } }, "node_modules/protobufjs": { - "version": "7.3.3", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.3.3.tgz", - "integrity": "sha512-HaYi2CVjiPoBR1d2zTVKVHXr9IUnpJizCjUu19vxdD3B8o4z+vfOHpIEB1358w8nv8dfUNEfDHFvMsH7QlLt/Q==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", "hasInstallScript": true, "optional": true, "dependencies": { @@ -10806,6 +10914,17 @@ "node": ">= 0.12" } }, + "node_modules/request/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/request/node_modules/qs": { "version": "6.5.3", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", @@ -11170,11 +11289,6 @@ "randombytes": "^2.1.0" } }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" - }, "node_modules/set-cookie-parser": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.5.1.tgz", @@ -11485,6 +11599,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "optional": true, "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -11654,22 +11769,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/tar-stream": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", @@ -11695,11 +11794,6 @@ "readable-stream": "^3.4.0" } }, - "node_modules/tar/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, "node_modules/teeny-request": { "version": "8.0.3", "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-8.0.3.tgz", @@ -12089,6 +12183,17 @@ "npm": ">=1.3.7" } }, + "node_modules/twilio/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/twilio/node_modules/oauth-sign": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", @@ -12226,6 +12331,17 @@ "node": ">= 0.6" } }, + "node_modules/type-is/node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", @@ -12253,9 +12369,9 @@ "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==" }, "node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==" }, "node_modules/unescape": { "version": "1.0.1", @@ -12629,14 +12745,6 @@ "webidl-conversions": "^3.0.0" } }, - "node_modules/wide-align": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", - "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "dependencies": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, "node_modules/win-release": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/win-release/-/win-release-1.1.1.tgz", @@ -12981,6 +13089,31 @@ "fast-uri": "^2.0.0" } }, + "@fastify/formbody": { + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/@fastify/formbody/-/formbody-7.4.0.tgz", + "integrity": "sha512-H3C6h1GN56/SMrZS8N2vCT2cZr7mIHzBHzOBa5OPpjfB/D6FzP9mMpE02ZzrFX0ANeh0BAJdoXKOF2e7IbV+Og==", + "requires": { + "fast-querystring": "^1.0.0", + "fastify-plugin": "^4.0.0" + } + }, + "@fastify/multipart": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-7.7.3.tgz", + "integrity": "sha512-MG4Gd9FNEXc8qx0OgqoXM10EGO/dN/0iVQ8SrpFMU3d6F6KUfcqD2ZyoQhkm9LWrbiMgdHv5a43x78lASdn5GA==", + "requires": { + "@fastify/busboy": "^1.0.0", + "@fastify/deepmerge": "^1.0.0", + "@fastify/error": "^3.0.0", + "@fastify/swagger": "^8.3.1", + "@fastify/swagger-ui": "^1.8.0", + "end-of-stream": "^1.4.4", + "fastify-plugin": "^4.0.0", + "secure-json-parse": "^2.4.0", + "stream-wormhole": "^1.1.0" + } + }, "avvio": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/avvio/-/avvio-8.2.0.tgz", @@ -13041,6 +13174,14 @@ "yallist": "^4.0.0" } }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, "pino": { "version": "8.8.0", "resolved": "https://registry.npmjs.org/pino/-/pino-8.8.0.tgz", @@ -14461,6 +14602,11 @@ } } }, + "@fastify/accept-negotiator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@fastify/accept-negotiator/-/accept-negotiator-1.1.0.tgz", + "integrity": "sha512-OIHZrb2ImZ7XG85HXOONLcJWGosv7sIvM2ifAPQVhg9Lv7qdmMBNVaai4QTdyuaqbKM5eO6sLSQOYI7wEQeCJQ==" + }, "@fastify/ajv-compiler": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/@fastify/ajv-compiler/-/ajv-compiler-1.1.0.tgz", @@ -14554,12 +14700,19 @@ } }, "@fastify/formbody": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/@fastify/formbody/-/formbody-7.4.0.tgz", - "integrity": "sha512-H3C6h1GN56/SMrZS8N2vCT2cZr7mIHzBHzOBa5OPpjfB/D6FzP9mMpE02ZzrFX0ANeh0BAJdoXKOF2e7IbV+Og==", + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@fastify/formbody/-/formbody-8.0.2.tgz", + "integrity": "sha512-84v5J2KrkXzjgBpYnaNRPqwgMsmY7ZDjuj0YVuMR3NXCJRCgKEZy/taSP1wUYGn0onfxJpLyRGDLa+NMaDJtnA==", "requires": { - "fast-querystring": "^1.0.0", - "fastify-plugin": "^4.0.0" + "fast-querystring": "^1.1.2", + "fastify-plugin": "^5.0.0" + }, + "dependencies": { + "fastify-plugin": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-5.0.1.tgz", + "integrity": "sha512-HCxs+YnRaWzCl+cWRYFnHmeRFyR5GVnJTAaCJQiYzQSDwK9MgJdyAsuL3nh0EWRCYMgQ5MeziymvmAhUHYHDUQ==" + } } }, "@fastify/jwt": { @@ -14574,19 +14727,62 @@ "steed": "^1.1.3" } }, - "@fastify/multipart": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-7.3.0.tgz", - "integrity": "sha512-tbzQiRFxoADCn0G10CqiQ/nDWWcfegtwg826Pfz2h7+XvuqJhGnko0TbafrWIY7hnGD+sNCGMdiTVsxxs6zigA==", + "@fastify/multipart": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/@fastify/multipart/-/multipart-9.0.3.tgz", + "integrity": "sha512-pJogxQCrT12/6I5Fh6jr3narwcymA0pv4B0jbC7c6Bl9wnrxomEUnV0d26w6gUls7gSXmhG8JGRMmHFIPsxt1g==", + "requires": { + "@fastify/busboy": "^3.0.0", + "@fastify/deepmerge": "^2.0.0", + "@fastify/error": "^4.0.0", + "fastify-plugin": "^5.0.0", + "secure-json-parse": "^3.0.0" + }, + "dependencies": { + "@fastify/busboy": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-3.2.0.tgz", + "integrity": "sha512-m9FVDXU3GT2ITSe0UaMA5rU3QkfC/UXtCU8y0gSN/GugTqtVldOBWIB5V6V3sbmenVZUIpU6f+mPEO2+m5iTaA==" + }, + "@fastify/deepmerge": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@fastify/deepmerge/-/deepmerge-2.0.2.tgz", + "integrity": "sha512-3wuLdX5iiiYeZWP6bQrjqhrcvBIf0NHbQH1Ur1WbHvoiuTYUEItgygea3zs8aHpiitn0lOB8gX20u1qO+FDm7Q==" + }, + "@fastify/error": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@fastify/error/-/error-4.2.0.tgz", + "integrity": "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ==" + }, + "fastify-plugin": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/fastify-plugin/-/fastify-plugin-5.0.1.tgz", + "integrity": "sha512-HCxs+YnRaWzCl+cWRYFnHmeRFyR5GVnJTAaCJQiYzQSDwK9MgJdyAsuL3nh0EWRCYMgQ5MeziymvmAhUHYHDUQ==" + }, + "secure-json-parse": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-3.0.2.tgz", + "integrity": "sha512-H6nS2o8bWfpFEV6U38sOSjS7bTbdgbCGU9wEM6W14P5H0QOsz94KCusifV44GpHDTu2nqZbuDNhTzu+mjDSw1w==" + } + } + }, + "@fastify/send": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@fastify/send/-/send-2.1.0.tgz", + "integrity": "sha512-yNYiY6sDkexoJR0D8IDy3aRP3+L4wdqCpvx5WP+VtEU58sn7USmKynBzDQex5X42Zzvw2gNzzYgP90UfWShLFA==", "requires": { - "@fastify/busboy": "^1.0.0", - "@fastify/deepmerge": "^1.0.0", - "@fastify/error": "^3.0.0", - "end-of-stream": "^1.4.4", - "fastify-plugin": "^4.0.0", - "hexoid": "^1.0.0", - "secure-json-parse": "^2.4.0", - "stream-wormhole": "^1.1.0" + "@lukeed/ms": "^2.0.1", + "escape-html": "~1.0.3", + "fast-decode-uri-component": "^1.0.1", + "http-errors": "2.0.0", + "mime": "^3.0.0" + }, + "dependencies": { + "mime": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", + "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==" + } } }, "@fastify/session": { @@ -14607,6 +14803,107 @@ } } }, + "@fastify/static": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@fastify/static/-/static-6.12.0.tgz", + "integrity": "sha512-KK1B84E6QD/FcQWxDI2aiUCwHxMJBI1KeCUzm1BwYpPY1b742+jeKruGHP2uOluuM6OkBPI8CIANrXcCRtC2oQ==", + "requires": { + "@fastify/accept-negotiator": "^1.0.0", + "@fastify/send": "^2.0.0", + "content-disposition": "^0.5.3", + "fastify-plugin": "^4.0.0", + "glob": "^8.0.1", + "p-limit": "^3.1.0" + }, + "dependencies": { + "brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "requires": { + "balanced-match": "^1.0.0" + } + }, + "glob": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + } + }, + "minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "requires": { + "brace-expansion": "^2.0.1" + } + } + } + }, + "@fastify/swagger": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/@fastify/swagger/-/swagger-8.15.0.tgz", + "integrity": "sha512-zy+HEEKFqPMS2sFUsQU5X0MHplhKJvWeohBwTCkBAJA/GDYGLGUWQaETEhptiqxK7Hs0fQB9B4MDb3pbwIiCwA==", + "requires": { + "fastify-plugin": "^4.0.0", + "json-schema-resolver": "^2.0.0", + "openapi-types": "^12.0.0", + "rfdc": "^1.3.0", + "yaml": "^2.2.2" + }, + "dependencies": { + "json-schema-resolver": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/json-schema-resolver/-/json-schema-resolver-2.0.0.tgz", + "integrity": "sha512-pJ4XLQP4Q9HTxl6RVDLJ8Cyh1uitSs0CzDBAz1uoJ4sRD/Bk7cFSXL1FUXDW3zJ7YnfliJx6eu8Jn283bpZ4Yg==", + "requires": { + "debug": "^4.1.1", + "rfdc": "^1.1.4", + "uri-js": "^4.2.2" + } + }, + "openapi-types": { + "version": "12.1.3", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", + "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==" + }, + "yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==" + } + } + }, + "@fastify/swagger-ui": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/@fastify/swagger-ui/-/swagger-ui-1.10.2.tgz", + "integrity": "sha512-f2mRqtblm6eRAFQ3e8zSngxVNEtiYY7rISKQVjPA++ZsWc5WYlPVTb6Bx0G/zy0BIoucNqDr/Q2Vb/kTYkOq1A==", + "requires": { + "@fastify/static": "^6.0.0", + "fastify-plugin": "^4.0.0", + "openapi-types": "^12.0.2", + "rfdc": "^1.3.0", + "yaml": "^2.2.2" + }, + "dependencies": { + "openapi-types": { + "version": "12.1.3", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-12.1.3.tgz", + "integrity": "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw==" + }, + "yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==" + } + } + }, "@fastify/view": { "version": "7.3.0", "resolved": "https://registry.npmjs.org/@fastify/view/-/view-7.3.0.tgz", @@ -14617,77 +14914,77 @@ } }, "@firebase/app-check-interop-types": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/@firebase/app-check-interop-types/-/app-check-interop-types-0.3.2.tgz", - "integrity": "sha512-LMs47Vinv2HBMZi49C09dJxp0QT5LwDzFaVGf/+ITHe3BlIhUiLNttkATSXplc89A2lAaeTqjgqVkiRfUGyQiQ==" + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@firebase/app-check-interop-types/-/app-check-interop-types-0.3.3.tgz", + "integrity": "sha512-gAlxfPLT2j8bTI/qfe3ahl2I2YcBQ8cFIBdhAQA4I2f3TndcO+22YizyGYuttLHPQEpWkhmpFW60VCFEPg4g5A==" }, "@firebase/app-types": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/@firebase/app-types/-/app-types-0.9.2.tgz", - "integrity": "sha512-oMEZ1TDlBz479lmABwWsWjzHwheQKiAgnuKxE0pz0IXCVx7/rtlkx1fQ6GfgK24WCrxDKMplZrT50Kh04iMbXQ==" + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/@firebase/app-types/-/app-types-0.9.3.tgz", + "integrity": "sha512-kRVpIl4vVGJ4baogMDINbyrIOtOxqhkZQg4jTq3l8Lw6WSk0xfpEYzezFu+Kl4ve4fbPl79dvwRtaFqAC/ucCw==" }, "@firebase/auth-interop-types": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/@firebase/auth-interop-types/-/auth-interop-types-0.2.3.tgz", - "integrity": "sha512-Fc9wuJGgxoxQeavybiuwgyi+0rssr76b+nHpj+eGhXFYAdudMWyfBHvFL/I5fEHniUM/UQdFzi9VXJK2iZF7FQ==" + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@firebase/auth-interop-types/-/auth-interop-types-0.2.4.tgz", + "integrity": "sha512-JPgcXKCuO+CWqGDnigBtvo09HeBs5u/Ktc2GaFj2m01hLarbxthLNm7Fk8iOP1aqAtXV+fnnGj7U28xmk7IwVA==" }, "@firebase/component": { - "version": "0.6.8", - "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.6.8.tgz", - "integrity": "sha512-LcNvxGLLGjBwB0dJUsBGCej2fqAepWyBubs4jt1Tiuns7QLbXHuyObZ4aMeBjZjWx4m8g1LoVI9QFpSaq/k4/g==", + "version": "0.6.11", + "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.6.11.tgz", + "integrity": "sha512-eQbeCgPukLgsKD0Kw5wQgsMDX5LeoI1MIrziNDjmc6XDq5ZQnuUymANQgAb2wp1tSF9zDSXyxJmIUXaKgN58Ug==", "requires": { - "@firebase/util": "1.9.7", + "@firebase/util": "1.10.2", "tslib": "^2.1.0" } }, "@firebase/database": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@firebase/database/-/database-1.0.7.tgz", - "integrity": "sha512-wjXr5AO8RPxVVg7rRCYffT7FMtBjHRfJ9KMwi19MbOf0vBf0H9YqW3WCgcnLpXI6ehiUcU3z3qgPnnU0nK6SnA==", - "requires": { - "@firebase/app-check-interop-types": "0.3.2", - "@firebase/auth-interop-types": "0.2.3", - "@firebase/component": "0.6.8", - "@firebase/logger": "0.4.2", - "@firebase/util": "1.9.7", + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/@firebase/database/-/database-1.0.10.tgz", + "integrity": "sha512-sWp2g92u7xT4BojGbTXZ80iaSIaL6GAL0pwvM0CO/hb0nHSnABAqsH7AhnWGsGvXuEvbPr7blZylPaR9J+GSuQ==", + "requires": { + "@firebase/app-check-interop-types": "0.3.3", + "@firebase/auth-interop-types": "0.2.4", + "@firebase/component": "0.6.11", + "@firebase/logger": "0.4.4", + "@firebase/util": "1.10.2", "faye-websocket": "0.11.4", "tslib": "^2.1.0" } }, "@firebase/database-compat": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-1.0.7.tgz", - "integrity": "sha512-R/3B+VVzEFN5YcHmfWns3eitA8fHLTL03io+FIoMcTYkajFnrBdS3A+g/KceN9omP7FYYYGTQWF9lvbEx6eMEg==", - "requires": { - "@firebase/component": "0.6.8", - "@firebase/database": "1.0.7", - "@firebase/database-types": "1.0.4", - "@firebase/logger": "0.4.2", - "@firebase/util": "1.9.7", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@firebase/database-compat/-/database-compat-2.0.1.tgz", + "integrity": "sha512-IsFivOjdE1GrjTeKoBU/ZMenESKDXidFDzZzHBPQ/4P20ptGdrl3oLlWrV/QJqJ9lND4IidE3z4Xr5JyfUW1vg==", + "requires": { + "@firebase/component": "0.6.11", + "@firebase/database": "1.0.10", + "@firebase/database-types": "1.0.7", + "@firebase/logger": "0.4.4", + "@firebase/util": "1.10.2", "tslib": "^2.1.0" } }, "@firebase/database-types": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-1.0.4.tgz", - "integrity": "sha512-mz9ZzbH6euFXbcBo+enuJ36I5dR5w+enJHHjy9Y5ThCdKUseqfDjW3vCp1YxE9zygFCSjJJ/z1cQ+zodvUcwPQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-1.0.7.tgz", + "integrity": "sha512-I7zcLfJXrM0WM+ksFmFdAMdlq/DFmpeMNa+/GNsLyFo5u/lX5zzkPzGe3srVWqaBQBY5KprylDGxOsP6ETfL0A==", "requires": { - "@firebase/app-types": "0.9.2", - "@firebase/util": "1.9.7" + "@firebase/app-types": "0.9.3", + "@firebase/util": "1.10.2" } }, "@firebase/logger": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.4.2.tgz", - "integrity": "sha512-Q1VuA5M1Gjqrwom6I6NUU4lQXdo9IAQieXlujeHZWvRt1b7qQ0KwBaNAjgxG27jgF9/mUwsNmO8ptBCGVYhB0A==", + "version": "0.4.4", + "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.4.4.tgz", + "integrity": "sha512-mH0PEh1zoXGnaR8gD1DeGeNZtWFKbnz9hDO91dIml3iou1gpOnLqXQ2dJfB71dj6dpmUjcQ6phY3ZZJbjErr9g==", "requires": { "tslib": "^2.1.0" } }, "@firebase/util": { - "version": "1.9.7", - "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.9.7.tgz", - "integrity": "sha512-fBVNH/8bRbYjqlbIhZ+lBtdAAS4WqZumx03K06/u7fJSpz1TGjEMm1ImvKD47w+xaFKIP2ori6z8BrbakRfjJA==", + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/@firebase/util/-/util-1.10.2.tgz", + "integrity": "sha512-qnSHIoE9FK+HYnNhTI8q14evyqbc/vHRivfB4TgCIUOl4tosmKSQlp7ltymOlMP4xVIJTg5wrkfcZ60X4nUf7Q==", "requires": { "tslib": "^2.1.0" } @@ -14706,11 +15003,12 @@ } }, "@google-cloud/firestore": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.9.0.tgz", - "integrity": "sha512-c4ALHT3G08rV7Zwv8Z2KG63gZh66iKdhCBeDfCpIkLrjX6EAjTD/szMdj14M+FnQuClZLFfW5bAgoOjfNmLtJg==", + "version": "7.11.0", + "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-7.11.0.tgz", + "integrity": "sha512-88uZ+jLsp1aVMj7gh3EKYH1aulTAMFAp8sH/v5a9w8q8iqSG27RiWLoxSAFr/XocZ9hGiWH1kEnBw+zl3xAgNA==", "optional": true, "requires": { + "@opentelemetry/api": "^1.3.0", "fast-deep-equal": "^3.1.1", "functional-red-black-tree": "^1.0.1", "google-gax": "^4.3.3", @@ -14764,13 +15062,21 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/mime/-/mime-3.0.0.tgz", "integrity": "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==" + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } } } }, "@grpc/grpc-js": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.11.1.tgz", - "integrity": "sha512-gyt/WayZrVPH2w/UTLansS7F9Nwld472JxxaETamrM8HNlsa+jSLNyKAZmhxI2Me4c3mQHFiS1wWHDY1g1Kthw==", + "version": "1.12.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.12.5.tgz", + "integrity": "sha512-d3iiHxdpg5+ZcJ6jnDSOT8Z0O0VMVGy34jAnYLUX8yd36b1qn8f1TwOA/Lc7TsOh03IkPJ38eGI5qD2EjNkoEA==", "optional": true, "requires": { "@grpc/proto-loader": "^0.7.13", @@ -14877,43 +15183,11 @@ "resolved": "https://registry.npmjs.org/@lukeed/ms/-/ms-2.0.1.tgz", "integrity": "sha512-Xs/4RZltsAL7pkvaNStUQt7netTkyxrS0K+RILcVr3TRMS/ToOg4I6uNfhB9SlGsnWBym4U+EaXq0f0cEMNkHA==" }, - "@mapbox/node-pre-gyp": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", - "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", - "requires": { - "detect-libc": "^2.0.0", - "https-proxy-agent": "^5.0.0", - "make-dir": "^3.1.0", - "node-fetch": "^2.6.7", - "nopt": "^5.0.0", - "npmlog": "^5.0.1", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.11" - }, - "dependencies": { - "make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "requires": { - "semver": "^6.0.0" - }, - "dependencies": { - "semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" - } - } - }, - "semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==" - } - } + "@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "optional": true }, "@popperjs/core": { "version": "2.11.6", @@ -15554,11 +15828,11 @@ } }, "@types/node": { - "version": "22.5.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.0.tgz", - "integrity": "sha512-DkFrJOe+rfdHTqqMg0bSNlGlQ85hSoh2TPzZyhHsXnMtligRWpxUySiyw8FY14ITt24HVCiQPWxS3KO/QlGmWg==", + "version": "22.10.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.6.tgz", + "integrity": "sha512-qNiuwC4ZDAUNcY47xgaSuS92cjf8JbSUoaKS77bmLG1rU7MlATVSiw/IlrjtIyyskXBZ8KkNfjK/P5na7rgXbQ==", "requires": { - "undici-types": "~6.19.2" + "undici-types": "~6.20.0" } }, "@types/parse-json": { @@ -15795,6 +16069,14 @@ "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } } } }, @@ -15857,11 +16139,6 @@ "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz", "integrity": "sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw==" }, - "aproba": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" - }, "archiver": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/archiver/-/archiver-5.3.1.tgz", @@ -15934,15 +16211,6 @@ "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==" }, - "are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "requires": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - } - }, "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -16056,6 +16324,14 @@ "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } } } }, @@ -16122,15 +16398,6 @@ "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, - "bcrypt": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz", - "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==", - "requires": { - "@mapbox/node-pre-gyp": "^1.0.11", - "node-addon-api": "^5.0.0" - } - }, "bcrypt-pbkdf": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", @@ -16139,6 +16406,11 @@ "tweetnacl": "^0.14.3" } }, + "bcryptjs": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/bcryptjs/-/bcryptjs-3.0.2.tgz", + "integrity": "sha512-k38b3XOZKv60C4E2hVsXTolJWfkGRMbILBIe2IBITXciy5bOsTKot5kDrf3ZfufQtQOUN5mXceUEpU1rTl9Uog==" + }, "big-integer": { "version": "1.6.51", "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", @@ -16408,11 +16680,6 @@ "readdirp": "~3.6.0" } }, - "chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==" - }, "classnames": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", @@ -16505,11 +16772,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==" - }, "combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -16568,11 +16830,6 @@ "typedarray": "^0.0.6" } }, - "console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" - }, "content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -16811,11 +17068,6 @@ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, - "delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" - }, "denque": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", @@ -16836,11 +17088,6 @@ "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" }, - "detect-libc": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", - "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==" - }, "dezalgo": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/dezalgo/-/dezalgo-1.0.4.tgz", @@ -16960,7 +17207,8 @@ "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "optional": true }, "encodeurl": { "version": "1.0.2", @@ -17199,9 +17447,9 @@ "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" }, "fast-querystring": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.0.0.tgz", - "integrity": "sha512-3LQi62IhQoDlmt4ULCYmh17vRO2EtS7hTSsG4WwoKWgV7GLMKBOecEh+aiavASnLx8I2y89OD33AGLo0ccRhzA==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.1.2.tgz", + "integrity": "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg==", "requires": { "fast-decode-uri-component": "^1.0.1" } @@ -17236,9 +17484,9 @@ "integrity": "sha512-cIusKBIt/R/oI6z/1nyfe2FvGKVTohVRfvkOhvx0nCEW+xf5NoCXjAHcWp93uOUBchzYcsvPlrapAdX1uW+YGg==" }, "fast-xml-parser": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz", - "integrity": "sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.5.1.tgz", + "integrity": "sha512-y655CeyUQ+jj7KBbYMc4FG01V8ZQqjN+gDYGJ50RtfsUB8iG9AmwmwoAgeKLJdmueKKMrH1RJ7yXHTSoczdv5w==", "optional": true, "requires": { "strnum": "^1.0.5" @@ -17712,21 +17960,22 @@ } }, "firebase-admin": { - "version": "12.3.1", - "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-12.3.1.tgz", - "integrity": "sha512-vEr3s3esl8nPIA9r/feDT4nzIXCfov1CyyCSpMQWp6x63Q104qke0MEGZlrHUZVROtl8FLus6niP/M9I1s4VBA==", + "version": "13.0.2", + "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-13.0.2.tgz", + "integrity": "sha512-YWVpoN+tZVSRXF0qC0gojoF5bSqvBRbnBk8+xUtFiguM2L4vB7f0moAwV1VVWDDHvTnvQ68OyTMpdp6wKo/clw==", "requires": { "@fastify/busboy": "^3.0.0", - "@firebase/database-compat": "^1.0.2", - "@firebase/database-types": "^1.0.0", - "@google-cloud/firestore": "^7.7.0", - "@google-cloud/storage": "^7.7.0", - "@types/node": "^22.0.1", + "@firebase/database-compat": "^2.0.0", + "@firebase/database-types": "^1.0.6", + "@google-cloud/firestore": "^7.11.0", + "@google-cloud/storage": "^7.14.0", + "@types/node": "^22.8.7", "farmhash-modern": "^1.1.0", + "google-auth-library": "^9.14.2", "jsonwebtoken": "^9.0.0", "jwks-rsa": "^3.1.0", "node-forge": "^1.3.1", - "uuid": "^10.0.0" + "uuid": "^11.0.2" }, "dependencies": { "@fastify/busboy": { @@ -17757,9 +18006,9 @@ "optional": true }, "@google-cloud/storage": { - "version": "7.12.1", - "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.12.1.tgz", - "integrity": "sha512-Z3ZzOnF3YKLuvpkvF+TjQ6lztxcAyTILp+FjKonmVpEwPa9vFvxpZjubLR4sB6bf19i/8HL2AXRjA0YFgHFRmQ==", + "version": "7.15.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-7.15.0.tgz", + "integrity": "sha512-/j/+8DFuEOo33fbdX0V5wjooOoFahEaMEdImHBmM2tH9MPHJYNtmXOf2sGUmZmiufSukmBEvdlzYgDkkgeBiVQ==", "optional": true, "requires": { "@google-cloud/paginator": "^5.0.0", @@ -17797,7 +18046,6 @@ "version": "6.7.1", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-6.7.1.tgz", "integrity": "sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==", - "optional": true, "requires": { "extend": "^3.0.2", "https-proxy-agent": "^7.0.1", @@ -17807,29 +18055,23 @@ }, "dependencies": { "agent-base": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", - "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", - "optional": true, - "requires": { - "debug": "^4.3.4" - } + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==" }, "https-proxy-agent": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", - "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", - "optional": true, + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "requires": { - "agent-base": "^7.0.2", + "agent-base": "^7.1.2", "debug": "4" } }, "uuid": { "version": "9.0.1", "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", - "optional": true + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==" } } }, @@ -17837,17 +18079,15 @@ "version": "6.1.0", "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-6.1.0.tgz", "integrity": "sha512-Jh/AIwwgaxan+7ZUUmRLCjtchyDiqh4KjBJ5tW3plBZb5iL/BPcso8A5DlzeD9qlw0duCamnNdpFjxwaT0KyKg==", - "optional": true, "requires": { "gaxios": "^6.0.0", "json-bigint": "^1.0.0" } }, "google-auth-library": { - "version": "9.14.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.14.0.tgz", - "integrity": "sha512-Y/eq+RWVs55Io/anIsm24sDS8X79Tq948zVLGaa7+KlJYYqaGwp1YI37w48nzrNi12RgnzMrQD4NzdmCowT90g==", - "optional": true, + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", "requires": { "base64-js": "^1.3.0", "ecdsa-sig-formatter": "^1.0.11", @@ -17861,7 +18101,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", - "optional": true, "requires": { "jwa": "^2.0.0", "safe-buffer": "^5.0.1" @@ -17873,7 +18112,6 @@ "version": "7.1.0", "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-7.1.0.tgz", "integrity": "sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==", - "optional": true, "requires": { "gaxios": "^6.0.0", "jws": "^4.0.0" @@ -17883,7 +18121,6 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/jws/-/jws-4.0.0.tgz", "integrity": "sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==", - "optional": true, "requires": { "jwa": "^2.0.0", "safe-buffer": "^5.0.1" @@ -17923,7 +18160,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/jwa/-/jwa-2.0.0.tgz", "integrity": "sha512-jrZ2Qx916EA+fq9cEAeCROWPTfCwi1IVHqT2tapuqLEVVDKFDENFw1oL+MwrTvH6msKxsd1YTDVw6uKEcsrLEA==", - "optional": true, "requires": { "buffer-equal-constant-time": "1.0.1", "ecdsa-sig-formatter": "1.0.11", @@ -17974,9 +18210,9 @@ } }, "uuid": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", - "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==" + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.0.5.tgz", + "integrity": "sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA==" } } }, @@ -18011,6 +18247,16 @@ "asynckit": "^0.4.0", "combined-stream": "^1.0.6", "mime-types": "^2.1.12" + }, + "dependencies": { + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + } } }, "formidable": { @@ -18076,29 +18322,6 @@ "universalify": "^0.1.0" } }, - "fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "requires": { - "minipass": "^3.0.0" - }, - "dependencies": { - "minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "requires": { - "yallist": "^4.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } - }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -18182,22 +18405,6 @@ "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==", "optional": true }, - "gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "requires": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - } - }, "gaxios": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-5.1.0.tgz", @@ -18358,9 +18565,9 @@ } }, "google-gax": { - "version": "4.3.9", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.3.9.tgz", - "integrity": "sha512-tcjQr7sXVGMdlvcG25wSv98ap1dtF4Z6mcV0rztGIddOcezw4YMb/uTXg72JPrLep+kXcVjaJjg6oo3KLf4itQ==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-4.4.1.tgz", + "integrity": "sha512-Phyp9fMfA00J3sZbJxbbB4jC55b7DBjE3F6poyL3wKMEBVKA79q6BGuHcTiM28yOzVql0NDbRL8MLLh8Iwk9Dg==", "optional": true, "requires": { "@grpc/grpc-js": "^1.10.9", @@ -18384,13 +18591,10 @@ "optional": true }, "agent-base": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.1.tgz", - "integrity": "sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==", - "optional": true, - "requires": { - "debug": "^4.3.4" - } + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.3.tgz", + "integrity": "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==", + "optional": true }, "gaxios": { "version": "6.7.1", @@ -18416,9 +18620,9 @@ } }, "google-auth-library": { - "version": "9.14.0", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.14.0.tgz", - "integrity": "sha512-Y/eq+RWVs55Io/anIsm24sDS8X79Tq948zVLGaa7+KlJYYqaGwp1YI37w48nzrNi12RgnzMrQD4NzdmCowT90g==", + "version": "9.15.0", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-9.15.0.tgz", + "integrity": "sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ==", "optional": true, "requires": { "base64-js": "^1.3.0", @@ -18462,12 +18666,12 @@ } }, "https-proxy-agent": { - "version": "7.0.5", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz", - "integrity": "sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw==", + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz", + "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==", "optional": true, "requires": { - "agent-base": "^7.0.2", + "agent-base": "^7.1.2", "debug": "4" } }, @@ -18665,11 +18869,6 @@ "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" }, - "has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" - }, "hashlru": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/hashlru/-/hashlru-2.3.0.tgz", @@ -18758,9 +18957,9 @@ } }, "http-parser-js": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz", - "integrity": "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q==" + "version": "0.5.9", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.9.tgz", + "integrity": "sha512-n1XsPy3rXVxlqxVioEWdC+0+M+SQw0DpJynwtOPo1X+ZlvdzTLtDBIJJlDQTnwZIFJrZSzSGmIOUdP8tu+SgLw==" }, "http-proxy-agent": { "version": "4.0.1", @@ -18918,7 +19117,8 @@ "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "optional": true }, "is-glob": { "version": "4.0.3", @@ -19464,9 +19664,9 @@ } }, "long": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", - "integrity": "sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==", + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/long/-/long-5.2.4.tgz", + "integrity": "sha512-qtzLbJE8hq7VabR3mISmVGtoXP8KGc2Z/AT8OuqlYD7JTR3oqrgwdjnk07wpj1twXxYmgDXgoKVWUG/fReSzHg==", "optional": true }, "long-timeout": { @@ -19592,11 +19792,18 @@ "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" }, "mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz", + "integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==", "requires": { - "mime-db": "1.52.0" + "mime-db": "^1.54.0" + }, + "dependencies": { + "mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==" + } } }, "mimic-fn": { @@ -19622,35 +19829,6 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==" }, - "minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==" - }, - "minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "requires": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "dependencies": { - "minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "requires": { - "yallist": "^4.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } - }, "mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", @@ -19923,9 +20101,9 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "multer": { - "version": "1.4.5-lts.1", - "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.1.tgz", - "integrity": "sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ==", + "version": "1.4.5-lts.2", + "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.2.tgz", + "integrity": "sha512-VzGiVigcG9zUAoCNU+xShztrlr1auZOlurXynNvO9GiWD1/mTBbUljOKY+qMeazBqXgRnjzeEgJI/wyjJUHg9A==", "requires": { "append-field": "^1.0.0", "busboy": "^1.0.0", @@ -20044,15 +20222,10 @@ "resolved": "https://registry.npmjs.org/netmask/-/netmask-2.0.2.tgz", "integrity": "sha512-dBpDMdxv9Irdq66304OLfEmQ9tbNRFnFTuZiLo+bD+r332bBmMJ8GBLXklIXXgxd3+v9+KUnZaUR5PJMa75Gsg==" }, - "node-addon-api": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", - "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==" - }, "node-cron": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.2.tgz", - "integrity": "sha512-iP8l0yGlNpE0e6q1o185yOApANRe47UPbLf4YxfbiNHt/RU5eBcGB/e0oudruheSf+LQeDMezqC5BVAb5wwRcQ==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/node-cron/-/node-cron-3.0.3.tgz", + "integrity": "sha512-dOal67//nohNgYWb+nWmg5dkFdIwDm8EpeGYMekPMrngV3637lqnX0lbUcCtgibHTz6SEz7DAIjKvKDFYCnO1A==", "requires": { "uuid": "8.3.2" } @@ -20135,30 +20308,11 @@ } } }, - "nopt": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", - "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "requires": { - "abbrev": "1" - } - }, "normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" }, - "npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "requires": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" - } - }, "number-allocator": { "version": "1.0.14", "resolved": "https://registry.npmjs.org/number-allocator/-/number-allocator-1.0.14.tgz", @@ -20714,9 +20868,9 @@ } }, "protobufjs": { - "version": "7.3.3", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.3.3.tgz", - "integrity": "sha512-HaYi2CVjiPoBR1d2zTVKVHXr9IUnpJizCjUu19vxdD3B8o4z+vfOHpIEB1358w8nv8dfUNEfDHFvMsH7QlLt/Q==", + "version": "7.4.0", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.4.0.tgz", + "integrity": "sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==", "optional": true, "requires": { "@protobufjs/aspromise": "^1.1.2", @@ -21169,6 +21323,14 @@ "mime-types": "^2.1.12" } }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, "qs": { "version": "6.5.3", "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", @@ -21440,11 +21602,6 @@ "randombytes": "^2.1.0" } }, - "set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" - }, "set-cookie-parser": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.5.1.tgz", @@ -21699,6 +21856,7 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "optional": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -21826,26 +21984,6 @@ "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" }, - "tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "requires": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "dependencies": { - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - } - } - }, "tar-stream": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", @@ -22169,6 +22307,14 @@ "sshpk": "^1.7.0" } }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + }, "oauth-sign": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz", @@ -22278,6 +22424,16 @@ "requires": { "media-typer": "0.3.0", "mime-types": "~2.1.24" + }, + "dependencies": { + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "requires": { + "mime-db": "1.52.0" + } + } } }, "typedarray": { @@ -22304,9 +22460,9 @@ "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==" }, "undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==" }, "unescape": { "version": "1.0.1", @@ -22604,14 +22760,6 @@ "webidl-conversions": "^3.0.0" } }, - "wide-align": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", - "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "requires": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, "win-release": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/win-release/-/win-release-1.1.1.tgz", diff --git a/package.json b/package.json index 8d38fe2e..8c14f404 100644 --- a/package.json +++ b/package.json @@ -13,12 +13,14 @@ "@adminjs/fastify": "^3.0.1", "@adminjs/mongoose": "^3.0.1", "@fastify/env": "^3.0.0", + "@fastify/formbody": "^8.0.2", "@fastify/jwt": "^6.5.0", + "@fastify/multipart": "^9.0.3", "@fastify/view": "^7.1.2", "@google-cloud/storage": "^6.10.1", "adminjs": "^6.7.2", "axios": "^1.7.2", - "bcrypt": "^5.1.1", + "bcryptjs": "^3.0.2", "body-parser": "^1.19.0", "boom": "^7.3.0", "chalk": "^4.1.0", @@ -36,12 +38,12 @@ "fastify-file-upload": "^4.0.0", "fastify-formbody": "^5.3.0", "fastify-jwt": "^4.2.0", - "fastify-multer": "^2.0.2", + "fastify-multer": "^2.0.3", "fastify-multipart": "^5.4.0", "fastify-static": "^4.7.0", "fastify-swagger": "^5.2.0", "file-type": "^18.5.0", - "firebase-admin": "^12.3.1", + "firebase-admin": "^13.0.2", "form-data": "^2.3.3", "formidable": "^2.1.1", "fs": "^0.0.1-security", @@ -49,6 +51,7 @@ "gridfile": "^1.1.3", "image-type": "^5.2.0", "libphonenumber-js": "^1.9.6", + "mime-types": "^3.0.1", "moment": "^2.29.4", "moment-timezone": "^0.5.43", "mongodb": "^5.0.1", @@ -56,7 +59,7 @@ "mqtt": "^5.10.1", "multer": "^1.4.5-lts.1", "mv": "^2.1.1", - "node-cron": "^3.0.2", + "node-cron": "^3.0.3", "node-schedule": "^2.1.1", "nodemon": "^2.0.20", "nunjucks": "^3.2.3", diff --git a/src/api-docs/api.html.save b/src/api-docs/api.html.save deleted file mode 100644 index 4bc403af..00000000 --- a/src/api-docs/api.html.save +++ /dev/null @@ -1,67 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/src/controllers/admincontroller.js b/src/controllers/admincontroller.js index af787c81..c3831bb5 100644 --- a/src/controllers/admincontroller.js +++ b/src/controllers/admincontroller.js @@ -1,15 +1,17 @@ const Admin = require('../models/admin') const boom = require("boom"); const jwt = require('jsonwebtoken') -const bcrypt = require('bcrypt') +//const bcrypt = require('bcrypt') +const bcrypt = require('bcryptjs'); const fastify = require("fastify"); -const { Tank, MotorData, IotData } = require('../models/tanks') +const { Tank, MotorData, IotData } = require('../models/tanks'); +const { Deparments, City, Branch } = require('../models/Department'); const JWT_SECRET = 'your-secret-key'; async function generateCustomerId(role) { - let customerId; + let adminId; let isUnique = false; let prefix; @@ -31,7 +33,7 @@ async function generateCustomerId(role) { while (!isUnique) { // Generate a random number or string for the customer ID const randomId = Math.floor(1000 + Math.random() * 9000).toString(); // Generates a random number between 1000 and 9999 - customerId = `${prefix}${randomId}`; + adminId = `${prefix}${randomId}`; // Check for uniqueness in the Admin collection const existingAdmin = await Admin.findOne({ customerId }); if (!existingAdmin) { @@ -39,7 +41,7 @@ async function generateCustomerId(role) { } } - return customerId; + return adminId; } @@ -58,26 +60,16 @@ exports.adminSignUp = async (request, reply) => { - // Check if an admin with the same phone number or username already exists - // const existingAdminUsername = await Admin.findOne({ username }); - // const existingAdmin = await Admin.findOne({ phone }); - - // if (existingAdmin) { - // return reply.status(400).send({ message: 'Phone already registered' }); - // } - // if (existingAdminUsername) { - // return reply.status(400).send({ message: 'Username already registered' }); - // } // Hash the password using bcrypt const hashedPassword = await bcrypt.hash(password, 10); - const customerId = await generateCustomerId(role); // Assuming you have this function defined elsewhere + const adminId = await generateCustomerId(role); // Assuming you have this function defined elsewhere //const building = 'ADMIN'; // You can customize this logic to derive from a parameter or a default value //const customerId = `AWSU${building}${c_id}`; // Construct the customer ID // Create a new admin object with the hashed password and role - const admin = new Admin({ phone, username, password: hashedPassword, customerId, role }); + const admin = new Admin({ phone, username, password: hashedPassword, adminId, role }); // Save the new admin to the database await admin.save(); @@ -88,35 +80,39 @@ exports.adminSignUp = async (request, reply) => { } }; +exports.editAdmin = async (request, reply) => { + try { + const { customerId } = request.params; + const { + + phone, + username, + picture, + + } = request.body; -// Admin Login Function (With Phone Number) -// exports.adminLogin = async (request, reply) => { -// try { -// const { phone, password } = request.body; + + const existing = await Admin.findOne({ customerId }); + if (!existing) { + return reply.status(404).send({ message: 'City not found' }); + } -// // Check if an admin with the phone number exists -// const admin = await Admin.findOne({ phone }); + existing.phone = phone || existing.phone; + existing.username = username || existing.username; + existing.picture = picture || existing.picture; + -// if (!admin) { -// return reply.status(401).send({ message: 'Invalid phone number or password' }); -// } -// // Compare the password entered by the user with the hashed password stored in the database -// const isPasswordValid = await bcrypt.compare(password, admin.password); -// if (!isPasswordValid) { -// return reply.status(401).send({ message: 'Invalid phone number or password' }); -// } + await existing.save(); + + reply.send({ message: 'Admin user updated successfully' }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; - -// // Generate a JWT token for the authenticated admin -// const token = jwt.sign({ phone: admin.phone, role: 'admin' }, JWT_SECRET, { expiresIn: '1h' }); -// return reply.send({ token, admin }); -// } catch (err) { -// reply.status(500).send({ message: err.message }); -// } -// }; exports.adminLogin = async (request, reply) => { try { @@ -161,7 +157,7 @@ exports.adminLogin = async (request, reply) => { access_token: token, phone: admin.phone, type: admin.role, - customerId: admin.customerId || null, + customerId: admin.adminId || null, username: admin.username || null, }, }; @@ -336,3 +332,669 @@ exports.createUser = async (request, reply) => { } } + + exports.getDepartmentDetailsByAdminAndName = async (req, reply) => { + try { + const { adminId } = req.params; + const { departmentName, reportingManager } = req.body; + + if (!adminId) { + return reply.status(400).send({ + simplydata: { error: true, message: "adminId is required in path params" } + }); + } + + if (!departmentName || !reportingManager) { + return reply.status(400).send({ + simplydata: { error: true, message: "departmentName and reportingManager are required in body" } + }); + } + + // ✅ Find department by adminId, departmentName and reportingManager + const department = await Deparments.findOne({ + adminId, + departmentName, + reportingManager + }).lean(); + + if (!department) { + return reply.status(404).send({ + simplydata: { error: true, message: "Department not found with given criteria" } + }); + } + + // ✅ Build response data + const responseData = { + phone: department.phone, + firstName: department.firstName, + lastName: department.lastName, + email: department.email + }; + + return reply.send({ + simplydata: { + error: false, + message: "Department details fetched successfully", + data: responseData + } + }); + } catch (err) { + console.error("Error fetching department details:", err); + reply.status(500).send({ + simplydata: { error: true, message: "Internal server error" } + }); + } +}; + + +exports.getAllCompanys = async (req, reply) => { + try { + const companyList = await City.find(); + + + return reply.send({ + status_code: 200, + message: "Fetched successfully", + data: companyList, + }); + } catch (err) { + console.error("Error fetching ", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + + + + + + +exports.getAllOffices = async (req, reply) => { + try { + const { officeName } = req.query; + + + if (!officeName) { + return reply.code(400).send({ + status_code: 400, + message: "officeName query param is required" + }); + } + + let headOffices, branches, departments; + + if (officeName.trim().toUpperCase() === "ALL") { + // ✅ Fetch all without filtering + [headOffices, branches, departments] = await Promise.all([ + City.find().lean(), + Branch.find().lean(), + Deparments.find().lean() + ]); + } else { + const nameRegex = new RegExp(officeName.trim(), "i"); + [headOffices, branches, departments] = await Promise.all([ + City.find({ officeName: nameRegex }).lean(), + Branch.find({ officeName: nameRegex }).lean(), + Deparments.find({ officeName: nameRegex }).lean() + ]); + } + + if (headOffices.length === 0 && branches.length === 0) { + return reply.code(404).send({ + status_code: 404, + message: "No offices found" + }); + } + + // 🏢 Group by officeName + const grouped = {}; + + // Head offices + headOffices.forEach(ho => { + const key = ho.officeName.trim().toLowerCase(); + if (!grouped[key]) grouped[key] = []; + + const matchingDepartments = departments.filter( + d => d.officeName?.trim().toLowerCase() === key + ); + + const employeeCount = matchingDepartments.reduce((count, dep) => { + const mainPerson = 1; + const subTeamCount = Array.isArray(dep?.team_member?.team_member) + ? dep.team_member.team_member.length + : 0; + return count + mainPerson + subTeamCount; + }, 0); + + grouped[key].push({ + officeType: "headOffice", + officeName: ho.officeName.trim(), + city: ho.city?.trim() || "", + cityId: ho.cityId || "", + employeeCount, + phone: ho.phone || "", + address: ho.office_address1 || "", + address2: ho.address2 || "", + state: ho.state || "", + country: ho.country || "", + pincode: ho.pincode || "", + email: ho.email || "", + latitude: ho.latitude || 0, + longitude: ho.longitude || 0, + googleLocation: ho.googleLocation || "", + createdAt: ho.createdAt || "", + updatedAt: ho.updatedAt || "" + }); + }); + + // Branches + branches.forEach(br => { + const key = br.officeName.trim().toLowerCase(); + if (!grouped[key]) grouped[key] = []; + + const matchingDepartments = departments.filter( + d => d.officeName?.trim().toLowerCase() === key && d.city === br.city + ); + + const employeeCount = matchingDepartments.reduce((count, dep) => { + const mainPerson = 1; + const subTeamCount = Array.isArray(dep?.team_member?.team_member) + ? dep.team_member.team_member.length + : 0; + return count + mainPerson + subTeamCount; + }, 0); + + grouped[key].push({ + officeType: "branchOffice", + branchId: br.branchId || "", + officeName: br.officeName?.trim() || "", + city: br.city?.trim() || "", + employeeCount, + phone: br.phone || "", + address: br.office_address1 || "", + address2: br.address2 || "", + state: br.state || "", + country: br.country || "", + pincode: br.pincode || "", + email: br.email || "", + contactPerson: br.nameoftheContactPerson || "", + latitude: br.latitude || 0, + longitude: br.longitude || 0, + googleLocation: br.googleLocation || "", + createdAt: br.createdAt || "", + updatedAt: br.updatedAt || "" + }); + }); + + // Convert grouped object into array + const result = Object.values(grouped).map(offices => ({ offices })); + + return reply.code(200).send({ + status_code: 200, + message: "Fetched successfully", + data: result + }); + + } catch (error) { + console.error("Error fetching city offices:", error); + return reply.code(500).send({ + status_code: 500, + message: "Internal server error" + }); + } +}; + +exports.getAllOfficesByCity = async (req, reply) => { + try { + const { city } = req.query; + + if (!city) { + return reply.code(400).send({ + status_code: 400, + message: "city query param is required", + }); + } + + const cityRegex = new RegExp(city.trim(), "i"); + + + // 🔹 Step 1: Find all headOffices in this city + const headOffices = await City.find({ city: cityRegex }).lean(); + + // 🔹 Step 2: Find all branchOffices in this city + const branchMatches = await Branch.find({ city: cityRegex }).lean(); + + if (!headOffices.length && !branchMatches.length) { + return reply.code(404).send({ + status_code: 404, + message: `No headOffice or branch found for city ${city}`, + }); + } + + // 🔹 Step 3: Collect all unique officeNames + const officeNames = [ + ...new Set([ + ...headOffices.map((ho) => ho.officeName.trim()), + ...branchMatches.map((br) => br.officeName.trim()), + ]), + ]; + + const finalResponse = []; + + // 🔹 Step 4: For each officeName, gather HO + Branches + for (const name of officeNames) { + const ho = await City.findOne({ + officeName: new RegExp(name, "i"), + }).lean(); + + // Get employee count for headOffice (if exists) + let employeeCount = 0; + if (ho) { + const departments = await Deparments.find({ city: ho.city }).lean(); + employeeCount = departments.reduce((count, dep) => { + const mainPerson = 1; + const subTeamCount = Array.isArray(dep?.team_member?.team_member) + ? dep.team_member.team_member.length + : 0; + return count + mainPerson + subTeamCount; + }, 0); + } + + // Get all branches for this officeName + const branches = await Branch.find({ + officeName: new RegExp(name, "i"), + }).lean(); + + const offices = []; + + // Add headOffice if found + if (ho) { + offices.push({ + officeType: "headOffice", + officeName: ho.officeName?.trim() || "", + city: ho.city?.trim() || "", + cityId: ho.cityId || "", + employeeCount, + phone: ho.phone || "", + address: ho.office_address1 || "", + address2: ho.address2 || "", + state: ho.state || "", + country: ho.country || "", + pincode: ho.pincode || "", + email: ho.email || "", + latitude: ho.latitude || 0, + longitude: ho.longitude || 0, + googleLocation: ho.googleLocation || "", + createdAt: ho.createdAt || "", + updatedAt: ho.updatedAt || "", + }); + } + + // Add all branchOffices + branches.forEach((br) => { + offices.push({ + officeType: "branchOffice", + branchId: br.branchId || "", + officeName: br.officeName?.trim() || "", + city: br.city?.trim() || "", + employeeCount, // using HO employee count (optional) + phone: br.phone || "", + address: br.office_address1 || "", + address2: br.address2 || "", + state: br.state || "", + country: br.country || "", + pincode: br.pincode || "", + email: br.email || "", + contactPerson: br.nameoftheContactPerson || "", + latitude: br.latitude || 0, + longitude: br.longitude || 0, + googleLocation: br.googleLocation || "", + createdAt: br.createdAt || "", + updatedAt: br.updatedAt || "", + }); + }); + + finalResponse.push({ + officeName: name, + city, + offices, + }); + } + + return reply.code(200).send({ + status_code: 200, + message: "Fetched successfully", + data: finalResponse, + }); + } catch (error) { + console.error("❌ Error in getAllOfficesByCity:", error); + return reply.code(500).send({ + status_code: 500, + message: "Internal server error", + error: error.message, + }); + } +}; + + + + +exports.getCityOffices = async (req, reply) => { + try { + const { officeName } = req.query; + + if (!officeName) { + return reply.code(400).send({ + status_code: 400, + message: "officeName query param is required" + }); + } + + const nameRegex = new RegExp(officeName.trim(), "i"); + + // Fetch head offices, branches, and departments + const [headOffices, branches, departments] = await Promise.all([ + City.find({ officeName: nameRegex }).lean(), + Branch.find({ officeName: nameRegex }).lean(), + Deparments.find({ officeName: nameRegex }).lean() + ]); + + if (headOffices.length === 0 && branches.length === 0) { + return reply.code(404).send({ + status_code: 404, + message: "No offices found for the given officeName" + }); + } + + const cityMap = {}; + + headOffices.forEach(ho => { + const officeNameTrimmed = ho.officeName.trim().toLowerCase(); + + // Get all department docs for this office + const matchingDepartments = departments.filter( + d => d.officeName?.trim().toLowerCase() === officeNameTrimmed + ); + + // Count employees: each department doc = 1 main person + sub-team members + const employeeCount = matchingDepartments.reduce((count, dep) => { + const mainPerson = 1; // the document itself + const subTeamCount = Array.isArray(dep?.team_member?.team_member) + ? dep.team_member.team_member.length + : 0; + return count + mainPerson + subTeamCount; + }, 0); + + cityMap[ho.city.trim().toLowerCase()] = { + city: ho.city.trim(), + headOffice: { + officeName: ho.officeName.trim(), + cityId: ho.cityId || "", + employeeCount, + phone: ho.phone || "", + address: ho.office_address1 || "", + state: ho.state || "", + country: ho.country || "", + pincode: ho.pincode || "", + email: ho.email || "" + }, + // branches: [] + }; + }); + + // Attach branches + branches.forEach(br => { + const cityKey = br.city.trim().toLowerCase(); + if (!cityMap[cityKey]) { + cityMap[cityKey] = { + city: br.city.trim(), + branches: [] + }; + } + cityMap[cityKey].branches.push({ + branchId: br.branchId || "", + officeName: br.officeName?.trim() || "", + zone: br.zone || "", + location: br.location || [], + phone: br.phone || "", + address: br.office_address1 || "", + address2: br.address2 || "", + state: br.state || "", + country: br.country || "", + pincode: br.pincode || "", + email: br.email || "", + contactPerson: br.nameoftheContactPerson || "", + createdAt: br.createdAt || "", + updatedAt: br.updatedAt || "" + }); + }); + + return reply.code(200).send({ + status_code: 200, + message: "Fetched successfully", + data: Object.values(cityMap) + }); + } catch (error) { + console.error("Error fetching city offices:", error); + return reply.code(500).send({ + status_code: 500, + message: "Internal server error" + }); + } +}; + + +exports.getOfficeDetails = async (req, reply) => { + try { + let { officeName, city } = req.params; + + if (!officeName || !city) { + return reply.code(400).send({ message: "officeName and city are required." }); + } + + // Normalize whitespace and case + officeName = officeName.trim().replace(/\s+/g, ' '); + city = city.trim().replace(/\s+/g, ' '); + + const filters = {}; + + if (officeName.toUpperCase() !== 'ALL') { + filters.officeName = { $regex: new RegExp(officeName.replace(/\s+/g, '\\s*'), 'i') }; + } + + if (city.toUpperCase() !== 'ALL') { + filters.city = { $regex: new RegExp(city.replace(/\s+/g, '\\s*'), 'i') }; + } + + // Query City collection + const cityResults = await City.find(filters).lean(); + + // Query Branch collection + const branchResults = await Branch.find(filters).lean(); + + const combinedResults = [...cityResults, ...branchResults]; + + if (combinedResults.length === 0) { + return reply.status(404).send({ message: "No office details found for the given filters." }); + } + + reply.send({ + status_code: 200, + message: "Office details fetched successfully.", + data: combinedResults, + }); + + } catch (error) { + console.error("Error in getOfficeDetails:", error); + reply.status(500).send({ + status_code: 500, + message: "Internal server error", + error: error.message, + }); + } +}; + + + exports.adminEditTeamMember = async (request, reply) => { + try { + const { departmentId, teamMemberId } = request.params; + const updateData = request.body; + + // Find the installation + const installation = await Deparments.findOne({ departmentId }); + + if (!installation) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Installation not found", + }, + }); + } + + // Find the team member + let teamMember = installation.team_member.team_member.find( + (member) => member.teamMemberId === teamMemberId + ); + + if (!teamMember) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Team member not found", + }, + }); + } + + // Update fields + Object.assign(teamMember, updateData); + + // Save changes + await installation.markModified("team_member.team_member"); + await installation.save(); + + return reply.send({ + simplydata: { + error: false, + message: "Team member updated successfully", + }, + }); + + } catch (err) { + console.error("Error updating team member:", err); + reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } + }; + + + exports.AdmindeleteTeamMember = async (request, reply) => { + try { + const { departmentId, teamMemberId } = request.params; + + // Find the installation + const installation = await Deparments.findOne({ departmentId }); + + if (!installation) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Installation not found", + }, + }); + } + + // Find index of the team member + const memberIndex = installation.team_member.team_member.findIndex( + (member) => member.teamMemberId === teamMemberId + ); + + if (memberIndex === -1) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Team member not found", + }, + }); + } + + // Remove the team member from the array + installation.team_member.team_member.splice(memberIndex, 1); + + // Save changes + await installation.markModified("team_member.team_member"); + await installation.save(); + + return reply.send({ + simplydata: { + error: false, + message: "Team member deleted successfully", + }, + }); + + } catch (err) { + console.error("Error deleting team member:", err); + reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } + }; + + + +exports.getCompanyCitiesByOffice = async (request, reply) => { + try { + const { officeName } = request.params; + + let cityList = []; + + if (officeName.toUpperCase() === "ALL") { + // 🔹 Get all cities from both schemas + const branchCities = await Branch.distinct("city"); + const headOfficeCities = await City.distinct("city"); + + cityList = [...branchCities, ...headOfficeCities]; + } else { + // 🔹 Case-insensitive regex for officeName + const nameRegex = new RegExp(`^\\s*${officeName.trim()}\\s*$`, "i"); + + const branchCities = await Branch.distinct("city", { + officeName: nameRegex, + }); + const headOfficeCities = await City.distinct("city", { + officeName: nameRegex, + }); + + cityList = [...branchCities, ...headOfficeCities]; + } + + // 🔹 Remove duplicates + filter out empty/null + cityList = [...new Set(cityList.filter((c) => c && c.trim()))]; + + // 🔹 Always add "ALL" as the first option + if (!cityList.includes("ALL")) { + cityList.unshift("ALL"); + } + + return reply.send({ + status_code: 200, + message: + cityList.length > 0 + ? "Cities fetched successfully" + : "No cities found for given officeName", + data: cityList, + }); + } catch (err) { + console.error("❌ Error fetching cities:", err); + return reply.status(500).send({ + status_code: 500, + message: "Internal server error", + }); + } +}; diff --git a/src/controllers/departmentController.js b/src/controllers/departmentController.js index f9d27eec..9e36916f 100644 --- a/src/controllers/departmentController.js +++ b/src/controllers/departmentController.js @@ -1,7 +1,11 @@ const boom = require("boom"); -const bcrypt = require('bcrypt'); +//const bcrypt = require('bcrypt'); +const bcrypt = require('bcryptjs'); + const jwt = require('jsonwebtoken'); const customJwtAuth = require("../customAuthJwt"); +const mongoose = require("mongoose"); + const fastify = require("fastify")({ logger: true, genReqId(req) { @@ -10,15 +14,7 @@ const fastify = require("fastify")({ }); const { Counter} = require('../models/User') -const {Department, Desgination, City, Deparments} = require('../models/Department') -// const generateDepartmentId = async (prefix) => { -// const result = await Counter.findOneAndUpdate( -// { _id: 'department_id' }, -// { $inc: { seq: 1 } }, -// { upsert: true, new: true } -// ); -// return `AW${prefix}${result.seq}`; -// }; +const {Department, Desgination, City, Deparments, Branch, Zone,IndianLocations} = require('../models/Department') const generateCityId = async () => { var result = await Counter.findOneAndUpdate( @@ -29,14 +25,15 @@ const generateCityId = async () => { return result.seq; }; -// const generateDesginationId = async (prefix) => { -// const result = await Counter.findOneAndUpdate( -// { _id: 'desgination_id' }, -// { $inc: { seq: 1 } }, -// { upsert: true, new: true } -// ); -// return `AW${prefix}${result.seq}`; -// }; + const generateBranchId = async () => { + var result = await Counter.findOneAndUpdate( + { _id: 'customer_id' }, + { $inc: { seq: 1 } }, + { upsert: true, new: true } + ); + + return result.seq; + }; const generateDepartmentId = async (city, departmentName) => { const cityPrefix = city.substring(0, 2).toUpperCase(); // Extract first two letters of city @@ -51,7 +48,6 @@ const generateDepartmentId = async (city, departmentName) => { return `AW${cityPrefix}${departmentPrefix}${result.seq}`; // Generate ID }; - exports.addCity = async (request, reply) => { try { const { @@ -67,22 +63,106 @@ const generateDepartmentId = async (city, departmentName) => { pincode, createdBy, updatedBy, + email, + gstNo, + googleLocation, + latitude, + longitude, + // nameoftheContactPerson, + + } = request.body; + + // Generate unique cityId + const c_id = await generateCityId(); + const cityId = `AWCI${c_id}`; + + // Check for existing records with specific fields + const existingPhone = await City.findOne({ phone }); + if (existingPhone) { + return reply.status(400).send({ message: 'Phone number already exists' }); + } + + const existingOfficeName = await City.findOne({ officeName }); + if (existingOfficeName) { + return reply.status(400).send({ message: 'Office name already exists' }); + } + + const existingCityId = await City.findOne({ cityId }); + if (existingCityId) { + return reply.status(400).send({ message: 'City ID already exists' }); + } + + // Create new city record + const citys = new City({ + cityId, + phone, + officeName, + location, + city, + office_address1, + address2, + state, + zone, + country, + pincode, + email, + gstNo, + googleLocation, + latitude, + longitude, + // nameoftheContactPerson, + createdBy, + updatedBy, + }); + + await citys.save(); + + reply.send({ citys, message: 'City Created Successfully' }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } + }; + + + exports.addBranch = async (request, reply) => { + try { + const { + phone, + land_line_number, + officeName, + location, + city, + state, + country, + zone, + office_address1, + address2, + pincode, + createdBy, + updatedBy, + email, + //nameoftheContactPerson, + googleLocation, + latitude, + longitude } = request.body; // Generate departmentId based on departmentName // const prefix = departmentName.substring(0, 2).toUpperCase(); // Extract first two letters and convert to uppercase - const cityId = await generateCityId(); - + const b_id = await generateBranchId(); + const branchId = `AWBR${b_id}`; + // Check for existing department - const existingStore = await City.findOne({ cityId }); + const existingStore = await Branch.findOne({ branchId }); if (existingStore) { - return reply.status(400).send({ message: 'City is already registered' }); + return reply.status(400).send({ message: 'Branch is already registered' }); } // Create new department - const citys = new City({ - cityId, + const branch = new Branch({ + branchId, phone, + land_line_number, officeName, location, city, @@ -92,88 +172,85 @@ const generateDepartmentId = async (city, departmentName) => { zone, country, pincode, + email, + // nameoftheContactPerson, + googleLocation, + latitude, + longitude, // departmentName, createdBy, updatedBy, }); - await citys.save(); + await branch.save(); - reply.send({ citys, message: 'Account Created Successfully' }); + reply.send({ branch, message: 'Account Created Successfully' }); } catch (err) { reply.status(500).send({ message: err.message }); } }; -// exports.getSinledepartmentData = async (req, reply) => { -// try { -// const { departmentId } = req.params; - -// const department = await Department.findOne({ departmentId: departmentId }); - -// if (!department) { -// return reply.code(404).send({ -// success: false, -// message: 'Department not found.' -// }); -// } - -// reply.code(200).send({ -// success: true, -// message: 'Department data retrieved successfully.', -// data: department -// }); -// } catch (error) { -// console.error('Error fetching department data:', error); -// reply.code(500).send({ -// success: false, -// message: 'Failed to retrieve department data.', -// error: error.message, -// }); -// } -// }; - - exports.getallcities = async (req, reply) => { + + exports.addZone = async (request, reply) => { try { - await City.find() - .exec() - .then((docs) => { - reply.send({ status_code: 200, data: docs, count: docs.length }); - }) - .catch((err) => { - console.log(err); - reply.send({ error: err }); - }); + const { + officeName, + location, + city, + zone, + area, + createdBy, + updatedBy, + } = request.body; + + // Generate departmentId based on departmentName + // const prefix = departmentName.substring(0, 2).toUpperCase(); // Extract first two letters and convert to uppercase + const b_id = await generateBranchId(); + const zoneId = `AWZN${b_id}`; + + // Check for existing department + const existingStore = await Zone.findOne({ zoneId }); + if (existingStore) { + return reply.status(400).send({ message: 'Branch is already registered' }); + } + + const zones = new Zone({ + zoneId, + officeName, + location, + city, + zone, + area, + createdBy, + updatedBy, + }); + + await zones.save(); + + reply.send({ zones, message: 'Account Created Successfully' }); } catch (err) { - throw boom.boomify(err); + reply.status(500).send({ message: err.message }); } }; - -// exports.getAllDepartmentsParticularFields = async (req, reply) => { -// try { -// const departments = await Department.find().exec(); - -// // Grouping the data -// const result = { -// cities: [...new Set(departments.map((doc) => doc.city))], -// zones: [...new Set(departments.map((doc) => doc.zone))], -// pincodes: [...new Set(departments.map((doc) => doc.pincode))], -// departments: [...new Set(departments.map((doc) => doc.departmentName))], -// states: [...new Set(departments.map((doc) => doc.state))], -// countries: [...new Set(departments.map((doc) => doc.country))], -// }; - -// // Sending the response -// reply.send({ -// status_code: 200, -// data: result, -// count: departments.length, -// }); -// } catch (err) { -// console.error(err); -// reply.send({ error: err.message }); -// } -// }; +exports.getallCompanyNames = async (req, reply) => { + try { + await City.find() + .select("officeName -_id") // Select only officeName and exclude _id + .exec() + .then((docs) => { + const officeNames = ["ALL", ...docs.map((doc) => doc.officeName)]; // Prepend "ALL" + reply.send({ status_code: 200, data: officeNames, count: officeNames.length }); + }) + .catch((err) => { + console.log(err); + reply.send({ error: err }); + }); + } catch (err) { + throw boom.boomify(err); + } +}; + + exports.deletecityInfo = async (req, reply) => { @@ -188,13 +265,23 @@ const generateDepartmentId = async (city, departmentName) => { } }; - + exports.deleteBranchInfo = async (req, reply) => { + try { + const branchId = req.params.branchId; + + const branch = await Branch.findOneAndDelete({ branchId:branchId }); + + reply.send({ status_code: 200, message: 'Delete Sucessfully', branch}); + } catch (err) { + throw boom.boomify(err); + } + }; exports.editcity = async (request, reply) => { try { const { cityId } = request.params; const { - // phone, + phone, city, state, country, @@ -202,7 +289,8 @@ const generateDepartmentId = async (city, departmentName) => { address1, address2, pincode, - // departmentName + email, + officeName } = request.body; @@ -218,17 +306,18 @@ const generateDepartmentId = async (city, departmentName) => { // } - // existing.phone = phone || existing.phone; + existing.phone = phone || existing.phone; existing.city = city || existing.city; existing.state = state || existing.state; existing.country = country || existing.country; existing.zone = zone || existing.zone; - // existing.departmentName = departmentName || existing.departmentName; + existing.officeName = officeName || existing.officeName; existing.pincode = pincode || existing.pincode; existing.address1 = address1 || existing.address1; existing.address2 = address2 || existing.address2; - + existing.email = email || existing.email; + await existing.save(); @@ -239,147 +328,220 @@ const generateDepartmentId = async (city, departmentName) => { } }; - -// exports.addDesgination = async (request, reply) => { -// try { -// const { -// phone, -// city, -// firstName, -// lastName, -// departmentName, -// reportingManager, -// email, -// state, -// password, -// country, -// zone, -// address1, -// address2, -// pincode, -// desginationName, -// location, -// createdBy, -// updatedBy, -// } = request.body; - -// // Generate desginationId based on desginationName -// const prefix = departmentName.substring(0, 2).toUpperCase(); -// const desginationId = await generateDesginationId(prefix); - -// // Check if the phone is already registered -// const existingStore = await Desgination.findOne({ phone }); -// if (existingStore) { -// return reply.status(400).send({ message: 'Phone is already registered' }); -// } - -// // Hash the password -// const hashedPassword = await bcrypt.hash(password, 10); - -// // Create a new designation -// const desgination = new Desgination({ -// desginationId, -// city, -// firstName, -// lastName, -// email, -// reportingManager, -// departmentName, -// phone, -// address1, -// address2, -// services: { password: { bcrypt: hashedPassword } }, -// state, -// zone, -// country, -// pincode, -// desginationName, -// location, -// createdBy, -// updatedBy, -// }); - -// await desgination.save(); - -// reply.send({ desgination, message: 'Account Created Successfully' }); -// } catch (err) { -// reply.status(500).send({ message: err.message }); -// } -// }; - - -exports.addDepartment = async (request, reply) => { + exports.editBranch = async (request, reply) => { try { + const { branchId } = request.params; const { + phone, - alternativeContactNumber, - gender, - personalEmail, + land_line_number, + officeName, city, - firstName, - lastName, - departmentName, - reportingManager, - email, state, - password, country, zone, address1, address2, pincode, - desginationName, - location, - createdBy, - updatedBy, + email + // departmentName + } = request.body; - // Generate desginationId - const departmentId = await generateDepartmentId(city, departmentName); - - // Check if the phone is already registered - const existingStore = await Deparments.findOne({ phone }); - if (existingStore) { - return reply.status(400).send({ message: 'Phone is already registered' }); + + const existing = await Branch.findOne({ branchId }); + if (!existing) { + return reply.status(404).send({ message: 'Branch not found' }); } - // Hash the password - const hashedPassword = await bcrypt.hash(password, 10); + // const phoneExists = await Department.findOne({ phone, departmentId: { $ne: departmentId } }); + // if (phoneExists) { + // return reply.status(400).send({ message: 'Phone is already registered to another user' }); + // } - // Create a new designation - const department = new Deparments({ - departmentId, - alternativeContactNumber, - gender, - city, - firstName, - lastName, - email, - personalEmail, - reportingManager, - departmentName, - phone, - address1, - address2, - services: { password: { bcrypt: hashedPassword } }, - state, - zone, - country, - pincode, - desginationName, - location, - createdBy, - updatedBy, - }); + + existing.phone = phone || existing.phone; + existing.land_line_number = land_line_number || existing.land_line_number; + existing.city = city || existing.city; + existing.state = state || existing.state; + existing.country = country || existing.country; + existing.zone = zone || existing.zone; + existing.officeName = officeName || existing.officeName; + existing.pincode = pincode || existing.pincode; + + existing.address1 = address1 || existing.address1; + existing.address2 = address2 || existing.address2; + existing.email = email || existing.email; + + - await department.save(); + await existing.save(); - reply.send({ department, message: 'Account Created Successfully' }); + reply.send({ message: 'Branch user updated successfully' }); } catch (err) { reply.status(500).send({ message: err.message }); } }; + + + +exports.addDepartment = async (request, reply) => { + try { + const { + phone, + officeName, + alternativeContactNumber, + gender, + personalEmail, + city, + personal_city, + reportingManager_mobile_number, + reportingManager_email, + firstName, + lastName, + departmentName, + reportingManager, + email, + state, + password, + country, + zone, + address1, + address2, + pincode, + desginationName, + location, + picture, + dateOfJoin, + employeeType, + createdBy, + updatedBy, + } = request.body; + + // Generate departmentId + const departmentId = await generateDepartmentId(city, departmentName); + + // Check if the phone is already registered + const existingStore = await Deparments.findOne({ phone }); + if (existingStore) { + return reply.status(400).send({ message: "Phone is already registered" }); + } + + // Hash the password + const hashedPassword = await bcrypt.hash(password, 10); + + // 🟢 Handle reportingManager "Self" + let finalReportingManager = reportingManager; + let finalReportingManagerMobile = reportingManager_mobile_number; + let finalReportingManagerEmail = reportingManager_email; + + if (reportingManager?.toLowerCase() === "self") { + // Default format + let managerString = `${firstName || ""} ${lastName || ""} - (${phone}) - ${city}`; + + // If departmentName is "Head Office" or "Branch Office" → add departmentName + if ( + ["head office", "branch office"].includes( + (departmentName || "").toLowerCase().trim() + ) + ) { + managerString += ` - ${departmentName}`; + } + + finalReportingManager = managerString; + finalReportingManagerMobile = phone; + finalReportingManagerEmail = email; + } + + // Create new department + const department = new Deparments({ + departmentId, + alternativeContactNumber, + officeName, + reportingManager_mobile_number: finalReportingManagerMobile, + reportingManager_email: finalReportingManagerEmail, + personal_city, + gender, + city, + firstName, + lastName, + email, + personalEmail, + reportingManager: finalReportingManager, + departmentName, + phone, + address1, + address2, + services: { password: { bcrypt: hashedPassword } }, + state, + zone, + country, + pincode, + desginationName, + location, + picture, + dateOfJoin, + employeeType, + createdBy, + updatedBy, + }); + + await department.save(); + + reply.send({ department, message: "Account Created Successfully" }); + } catch (err) { + console.error("❌ Error in addDepartment:", err); + reply.status(500).send({ message: err.message }); + } +}; + + + exports.getDetails = async (request, reply) => { + try { + const { id } = request.params; + let data; + + if (id.startsWith('AWBR')) { + data = await Branch.findOne({ branchId: id }); + } else { + data = await City.findOne({ cityId: id }); + } + + if (!data) { + return reply.status(404).send({ message: 'Not found' }); + } + + reply.send({ data }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; + exports.getCityDetails = async (request, reply) => { + try { + const cityId = request.params.cityId; + const data = await City.findOne({ cityId }); + if (!data) { + return reply.status(404).send({ message: 'City not found' }); + } + reply.send({ data }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; + +exports.getBranchDetails = async (request, reply) => { + try { + const branchId = request.params.branchId; + const data = await Branch.findOne({ branchId }); + if (!data) { + return reply.status(404).send({ message: 'Branch not found' }); + } + reply.send({ data }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; exports.getSinledepartmentData = async (req, reply) => { try { const { departmentId } = req.params; @@ -423,6 +585,56 @@ exports.addDepartment = async (request, reply) => { throw boom.boomify(err); } }; + + exports.getallCitiesData = async (req, reply) => { + try { + console.log("Fetching all cities..."); // Debug log + const cities = await City.distinct('city'); // Fetch distinct city names from the database + + // Normalize the city names to avoid duplicates + const normalizedCities = [...new Set(cities.map(city => city.trim().toUpperCase()))]; + + console.log("Cities fetched:", normalizedCities); // Log the cleaned cities + reply.send({ status_code: 200, data: normalizedCities, count: normalizedCities.length }); + } catch (err) { + console.error("Error fetching cities:", err); // Log the error for debugging + throw boom.boomify(err); + } + }; + + + exports.getallZonesData = async (req, reply) => { + try { + console.log("Fetching all zones..."); // Debug log + const zones = await City.distinct('zone'); // Fetch distinct zone names from the database + + // Normalize the zone names to avoid duplicates + const normalizedZones = [...new Set(zones.map(zone => zone.trim().toUpperCase()))]; + + console.log("Zones fetched:", normalizedZones); // Log the cleaned zones + reply.send({ status_code: 200, data: normalizedZones, count: normalizedZones.length }); + } catch (err) { + console.error("Error fetching zones:", err); // Log the error for debugging + throw boom.boomify(err); + } + }; + + exports.getallLocationData = async (req, reply) => { + try { + console.log("Fetching all locations..."); // Debug log + const locations = await City.distinct('location'); // Fetch distinct locations from the database + + // Normalize the location names to uppercase and remove duplicates + const normalizedLocations = [...new Set(locations.map(location => location.trim().toUpperCase()))]; + + console.log("Locations fetched:", normalizedLocations); // Log the cleaned locations + reply.send({ status_code: 200, data: normalizedLocations, count: normalizedLocations.length }); + } catch (err) { + console.error("Error fetching locations:", err); // Log the error for debugging + throw boom.boomify(err); + } + }; + exports.deletedepartmentInfo = async (req, reply) => { try { @@ -437,146 +649,885 @@ exports.addDepartment = async (request, reply) => { }; - exports.editdepartment = async (request, reply) => { - try { - const { departmentId } = request.params; - const { - - phone, - alternativeContactNumber, - gender, - personalEmail, - city, - firstName, - lastName, - email, - reportingManager, - departmentName, - state, - country, - zone, - address1, - address2, - pincode, - desginationName +exports.editdepartment = async (request, reply) => { + try { + const { departmentId } = request.params; + const { + phone, + alternativeContactNumber, + gender, + personalEmail, + city, + firstName, + lastName, + email, + reportingManager, + departmentName, + state, + country, + zone, + address1, + address2, + pincode, + desginationName, + personal_city, + reportingManager_mobile_number, + reportingManager_email, + officeName, + picture, + employeeType + } = request.body; - } = request.body; - - - const existing = await Deparments.findOne({ departmentId }); - if (!existing) { - return reply.status(404).send({ message: 'Department not found' }); + const existing = await Deparments.findOne({ departmentId }); + if (!existing) { + return reply.status(404).send({ message: "Department not found" }); + } + + const phoneExists = await Deparments.findOne({ + phone, + departmentId: { $ne: departmentId }, + }); + if (phoneExists) { + return reply + .status(400) + .send({ message: "Phone is already registered to another user" }); + } + + // 🟢 Handle reportingManager "Self" + let finalReportingManager = reportingManager || existing.reportingManager; + let finalReportingManagerMobile = + reportingManager_mobile_number || existing.reportingManager_mobile_number; + let finalReportingManagerEmail = + reportingManager_email || existing.reportingManager_email; + + if (reportingManager?.toLowerCase() === "self") { + finalReportingManager = `${firstName || existing.firstName || ""} ${ + lastName || existing.lastName || "" + } - (${phone || existing.phone}) - ${city || existing.city}`; + finalReportingManagerMobile = phone || existing.phone; + finalReportingManagerEmail = email || existing.email; + } + + // 🔹 Update fields + existing.phone = phone || existing.phone; + existing.alternativeContactNumber = + alternativeContactNumber || existing.alternativeContactNumber; + existing.personalEmail = personalEmail || existing.personalEmail; + existing.gender = gender || existing.gender; + existing.city = city || existing.city; + existing.state = state || existing.state; + existing.country = country || existing.country; + existing.zone = zone || existing.zone; + existing.desginationName = desginationName || existing.desginationName; + existing.pincode = pincode || existing.pincode; + existing.address1 = address1 || existing.address1; + existing.address2 = address2 || existing.address2; + existing.email = email || existing.email; + existing.firstName = firstName || existing.firstName; + existing.lastName = lastName || existing.lastName; + existing.departmentName = departmentName || existing.departmentName; + existing.personal_city = personal_city || existing.personal_city; + existing.officeName = officeName || existing.officeName; + existing.picture = picture || existing.picture; + existing.employeeType = employeeType || existing.employeeType; + + // 🔹 Assign formatted reportingManager + existing.reportingManager = finalReportingManager; + existing.reportingManager_mobile_number = finalReportingManagerMobile; + existing.reportingManager_email = finalReportingManagerEmail; + + await existing.save(); + + reply.send({ message: "Department user updated successfully" }); + } catch (err) { + console.error("❌ Error in editdepartment:", err); + reply.status(500).send({ message: err.message }); + } +}; + + +const getLocationsByCityZoneOffice = async (city, zone, officeName) => { + try { + // Build matchCondition dynamically + const matchCondition = {}; + + // City filter + if (city.trim().toUpperCase() !== "ALL") { + matchCondition.city = { $regex: `^${city.trim().toLowerCase()}$`, $options: "i" }; + } + + // Zone filter + if (zone.trim().toUpperCase() !== "ALL") { + matchCondition.zone = zone.trim(); + } + + // Office name filter + if (officeName && officeName.trim().toUpperCase() !== "ALL") { + matchCondition.officeName = { $regex: `^${officeName.trim()}$`, $options: "i" }; + } + + const result = await Zone.aggregate([ + { + $project: { + city: { $toLower: { $trim: { input: "$city" } } }, + zone: { $trim: { input: "$zone" } }, + officeName: { $trim: { input: "$officeName" } }, + location: 1 + } + }, + { $match: matchCondition }, + { + $group: { + _id: { + city: "$city", + officeName: "$officeName" + }, + locations: { $push: "$location" } + } + }, + { + $project: { + _id: 0, + city: "$_id.city", + officeName: "$_id.officeName", + locations: { + $reduce: { + input: "$locations", + initialValue: [], + in: { $concatArrays: ["$$value", "$$this"] } + } + } + } + } + ]); + + if (result.length) { + // Flatten all locations from all offices if city/zone are ALL + let allLocations = [...new Set(result.flatMap(r => r.locations))]; + + // Ensure "ALL" at the top + if (!allLocations.includes("ALL")) { + allLocations.unshift("ALL"); } + + return { + city: city.trim().toUpperCase(), + officeName: officeName ? officeName.trim().toUpperCase() : "ALL", + locations: allLocations + }; + } else { + return { + city: city.trim().toUpperCase(), + officeName: officeName ? officeName.trim().toUpperCase() : "ALL", + locations: ["ALL"] + }; + } + } catch (err) { + console.error(err); + throw new Error("Error fetching locations."); + } +}; + +exports.getZonebasedLocations = async (req, reply) => { + try { + const { city, zone, officeName } = req.query; + console.log("Received City:", `"${city}"`, "Zone:", `"${zone}"`, "Office:", `"${officeName}"`); + + if (!city || !zone) { + return reply.status(400).send({ message: "City and zone are required." }); + } + + const locations = await getLocationsByCityZoneOffice( + city.trim(), + zone.trim(), + officeName ? officeName.trim() : "ALL" + ); + + if (!locations) { + return reply.send({ status_code: 404, message: "No data found." }); + } + + reply.send({ status_code: 200, data: locations }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; + + + const getLocationsByZone = async (zone) => { + try { + const result = await City.aggregate([ + { + $match: { + zone: { $regex: `^${zone}$`, $options: "i" }, // Case-insensitive match for the zone + }, + }, + { + $unwind: "$location" // Unwind the location field if it is an array + }, + { + $group: { + _id: "$zone", // Group by zone + locations: { + $addToSet: { + $toUpper: { $trim: { input: "$location" } } // Convert to uppercase and trim whitespace + } + }, + }, + }, + { + $project: { + _id: 0, // Exclude the _id field + zone: "$_id", // Include zone + locations: 1 // Return locations + }, + }, + ]); + + return result; + } catch (err) { + console.error(err); + throw new Error("Error fetching locations."); + } +}; + + - const phoneExists = await Deparments.findOne({ phone, departmentId: { $ne: departmentId } }); - if (phoneExists) { - return reply.status(400).send({ message: 'Phone is already registered to another user' }); + exports.getLocationsByZone = async (req, reply) => { + try { + const { zone } = req.params; // Get zone from path params + + if (!zone) { + return reply.status(400).send({ message: "Zone is required." }); } - - existing.phone = phone || existing.phone; - existing.alternativeContactNumber = alternativeContactNumber || existing.alternativeContactNumber; - existing.personalEmail = personalEmail || existing.personalEmail; - existing.gender = gender || existing.gender; - existing.city = city || existing.city; - existing.state = state || existing.state; - existing.country = country || existing.country; - existing.zone = zone || existing.zone; - existing.desginationName = desginationName || existing.desginationName; - existing.pincode = pincode || existing.pincode; + const locations = await getLocationsByZone(zone); + reply.send({ status_code: 200, data: locations }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } + }; + - existing.address1 = address1 || existing.address1; - existing.address2 = address2 || existing.address2; - - existing.email = email || existing.email; - existing.firstName = firstName || existing.firstName; - existing.lastName = lastName || existing.lastName; - existing.departmentName = departmentName || existing.departmentName; - existing.reportingManager = reportingManager || existing.reportingManager - + + const getZonesByCityAndOffice = async (city, officeName) => { + try { + const result = await Zone.aggregate([ + { + $project: { + city: { $trim: { input: "$city" } }, // Trim city + officeName: { $trim: { input: "$officeName" } }, // Trim officeName + zone: 1 + } + }, + { + $match: { + ...(city && city !== "ALL" ? { city: { $regex: `^${city.trim()}$`, $options: "i" } } : {}), + ...(officeName && officeName !== "ALL" ? { officeName: { $regex: `^${officeName.trim()}$`, $options: "i" } } : {}) + } + }, + { + $group: { + _id: { + city: { $toUpper: "$city" }, + officeName: { $toUpper: "$officeName" } + }, + zones: { $addToSet: "$zone" } + } + }, + { + $project: { + _id: 0, + city: "$_id.city", + officeName: "$_id.officeName", + zones: 1 + } + } + ]); + + // Add "ALL" to zones and sort + result.forEach(item => { + item.zones = ["ALL", ...new Set(item.zones)].sort((a, b) => + a === "ALL" ? -1 : b.localeCompare(a) + ); + }); + + + return result; + } catch (err) { + console.error("Error fetching zones:", err); + throw new Error("Error fetching zones."); + } +}; + +exports.getZonesByCityAndOffice = async (req, reply) => { + try { + const { city, officeName } = req.params; + + if (!city || city.trim() === "" || !officeName || officeName.trim() === "") { + return reply.status(400).send({ message: "City and Office Name are required." }); + } + + const zones = await getZonesByCityAndOffice(city.trim(), officeName.trim()); + + if (zones.length === 0) { + return reply.status(404).send({ message: "No zones found for the specified city and office." }); + } + + reply.send({ status_code: 200, data: zones }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; + + const getAreasByCitys = async (city) => { + try { + const result = await Zone.aggregate([ + { + $project: { + city: { $trim: { input: "$city" } }, // Trim city field in DB + area: 1 // Keep zone field + } + }, + { + $match: { + city: { $regex: `^${city.trim()}$`, $options: "i" }, // Trim & case-insensitive + } + }, + { + $group: { + _id: { $toUpper: "$city" }, // Normalize city name + areas: { $addToSet: "$area" } // Collect unique zones + } + }, + { + $project: { + _id: 0, // Exclude _id + city: "$_id", // Return city name + areas: 1 // Return collected zones + } + } + ]); - await existing.save(); + // Add "ALL" to the zones array and sort it + result.forEach(item => { + item.areas = ["ALL", ...new Set(item.areas)].sort((a, b) => (a === "ALL" ? -1 : a - b)); + }); - reply.send({ message: 'Department user updated successfully' }); + return result; + } catch (err) { + console.error("Error fetching areas:", err); + throw new Error("Error fetching areas."); + } + }; + exports.getAreasByCity = async (req, reply) => { + try { + const { city } = req.params; + + if (!city || city.trim() === "") { + return reply.status(400).send({ message: "City is required." }); + } + + const zones = await getAreasByCitys(city.trim()); // Trim input + + if (zones.length === 0) { + return reply.status(404).send({ message: "No zones found for the specified city." }); + } + + reply.send({ status_code: 200, data: zones }); } catch (err) { reply.status(500).send({ message: err.message }); } }; -// exports.getAllDesignationsParticularFields = async (req, reply) => { -// try { -// const departments = await Desgination.find().exec(); - -// // Grouping the data -// const result = { -// cities: [...new Set(departments.map((doc) => doc.city))], -// zones: [...new Set(departments.map((doc) => doc.zone))], -// pincodes: [...new Set(departments.map((doc) => doc.pincode))], -// departments: [...new Set(departments.map((doc) => doc.departmentName))], -// states: [...new Set(departments.map((doc) => doc.state))], -// countries: [...new Set(departments.map((doc) => doc.country))], -// designations: [...new Set(departments.map((doc) => doc.desginationName))], -// reportingMangers: [...new Set(departments.map((doc) => doc.reportingManager))], - -// }; - -// // Sending the response -// reply.send({ -// status_code: 200, -// data: result, -// count: departments.length, -// }); -// } catch (err) { -// console.error(err); -// reply.send({ error: err.message }); -// } -// }; - - - const getLocationsByCityAndZone = async (city, zone) => { + + const getZonesByArea = async (area) => { try { - const result = await City.aggregate([ + const result = await Zone.aggregate([ + { + $project: { + area: { $trim: { input: "$area" } }, // Trim area field in DB + zone: 1 // Keep zone field + } + }, { $match: { - city: city, // Match documents with the same city - zone: zone, // Match documents with the same zone - }, + area: { $regex: `^${area.trim()}$`, $options: "i" } // Case-insensitive match on area + } }, { $group: { - _id: { city: "$city", zone: "$zone" }, // Group by city and zone - locations: { $push: "$location" }, // Collect all location arrays - }, + _id: { $toUpper: "$area" }, // Normalize area name + zones: { $addToSet: "$zone" } // Collect unique zones + } }, { $project: { - _id: 0, // Exclude the _id field - city: "$_id.city", - zone: "$_id.zone", - locations: { - $reduce: { - input: "$locations", - initialValue: [], - in: { $concatArrays: ["$$value", "$$this"] }, // Flatten the location arrays - }, + _id: 0, // Exclude _id + area: "$_id", + zones: 1 // Return collected zones + } + } + ]); + + // ✅ Correct sorting: "ALL" first, then other zones in ascending order + result.forEach(item => { + item.zones = ["ALL", ...item.zones.filter(z => z !== "ALL").sort((a, b) => a.localeCompare(b))]; + }); + + return result; + } catch (err) { + console.error("Error fetching zones:", err); + throw new Error("Error fetching zones."); + } + }; + + + // Fastify route handler to get zones based on area only + exports.getZonesByArea = async (req, reply) => { + try { + const { area } = req.params; + + if (!area || area.trim() === "") { + return reply.status(400).send({ message: "Area is required." }); + } + + const zones = await getZonesByArea(area.trim()); + + if (zones.length === 0) { + return reply.status(404).send({ message: "No zones found for the specified area." }); + } + + reply.send({ status_code: 200, data: zones }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } + }; + + + +exports.getDepartments = async (req, reply) => { + try { + console.log("Request Params:", req.params); + + let { departmentName, city, officeName, employeeType } = req.params; + + if (!departmentName || !city || !officeName || !employeeType) { + return reply.status(400).send({ + message: "Department Name, City, Office Name, and Employee Type are required.", + }); + } + + const departments = await getDepartmentsByName(officeName, city, departmentName, employeeType); + + if (departments.length === 0) { + return reply.status(404).send({ + message: "No departments found for the specified parameters.", + }); + } + + reply.send({ status_code: 200, data: departments }); + } catch (err) { + console.error("API Error:", err); + reply.status(500).send({ message: err.message }); + } +}; + +const getDepartmentsByName = async (officeName, city, departmentName, employeeType) => { + try { + const query = {}; + + if (officeName && officeName.trim().toUpperCase() !== "ALL") { + query.officeName = { $regex: officeName.trim(), $options: "i" }; + } + + if (city && city.trim().toUpperCase() !== "ALL") { + query.city = { $regex: city.trim(), $options: "i" }; + } + + if (departmentName && departmentName.trim().toUpperCase() !== "ALL") { + query.departmentName = { $regex: departmentName.trim(), $options: "i" }; + } + + if (employeeType && employeeType.trim().toUpperCase() !== "ALL") { + query.employeeType = { $regex: `^${employeeType.trim()}$`, $options: "i" }; +} + + console.log("MongoDB Query:", JSON.stringify(query, null, 2)); + + const result = await Deparments.find(query).lean(); + console.log("Query Result:", result); + + return result; + } catch (err) { + console.error("Error fetching department data:", err); + throw new Error("Error fetching department data."); + } +}; + + + const getDepartmentNames = async () => { + try { + const result = await Deparments.aggregate([ + { + $group: { + _id: { + $toUpper: { $trim: { input: "$departmentName" } }, // Convert to uppercase & trim spaces }, }, }, + { + $sort: { _id: 1 } // Sort alphabetically + }, + { + $group: { + _id: null, + departmentNames: { $addToSet: "$_id" } // Collect unique values in an array + } + }, + { + $project: { + _id: 0, + departmentNames: 1 // Return only the array + }, + } ]); - return result; + return result.length > 0 ? result[0].departmentNames : []; // Return an empty array if no data } catch (err) { console.error(err); - throw new Error("Error fetching locations."); + throw new Error("Error fetching department names."); + } + }; + + // API Route + exports.getAllDepartmentNames = async (req, reply) => { + try { + const departments = await getDepartmentNames(); + reply.send({ status_code: 200, data: departments }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } + }; + + +exports.getCitiesByOfficeName = async (req, reply) => { + try { + let { officeName } = req.params; + + if (!officeName) { + return reply.code(400).send({ error: "officeName is required" }); + } + + // Split by comma and normalize names + let officeNames = officeName.split(',').map(name => + name.trim().replace(/\s+/g, ' ') + ); + + // Handle "All" — fetch all cities from both collections + if (officeNames.includes('All')) { + const allCityDocs = await City.find().select("city -_id").lean(); + const allBranchDocs = await Branch.find().select("city -_id").lean(); + + const allCities = [...new Set([ + ...allCityDocs.map(doc => doc.city), + ...allBranchDocs.map(doc => doc.city), + ])]; + + allCities.unshift("ALL"); // ✅ Add "ALL" at the beginning + + return reply.send({ status_code: 200, data: allCities }); + } + + // Build regex conditions for each office name + const regexConditions = officeNames.map(name => ({ + officeName: { $regex: new RegExp(name.replace(/\s+/g, '\\s*'), 'i') } + })); + + // Query both collections + const cityResults = await City.find({ $or: regexConditions }).select("city -_id").lean(); + const branchResults = await Branch.find({ $or: regexConditions }).select("city -_id").lean(); + + // Extract and merge unique city names + const cityNames = [...new Set([ + ...cityResults.map(c => c.city), + ...branchResults.map(b => b.city) + ])]; + + cityNames.unshift("ALL"); // ✅ Add "ALL" at the beginning + + reply.send({ status_code: 200, data: cityNames }); + } catch (err) { + console.error("Error fetching cities:", err); + reply.send({ error: err.message }); + } +}; + + // Helper function to fetch department names by city + const getDepartmentNamesByCity = async (city) => { + try { + const trimmedCity = city.trim(); + + const query = { + $or: [ + { city: { $regex: `^\\s*${trimmedCity}\\s*$`, $options: "i" } }, + // { officeName: { $regex: `^\\s*${trimmedCity}\\s*$`, $options: "i" } } + ] + }; + + console.log("MongoDB Query:", JSON.stringify(query, null, 2)); + + const result = await Deparments.find(query) + .select("departmentName -_id") + .lean(); + + // Remove duplicate department names + return [...new Set(result.map(doc => doc.departmentName))]; + } catch (error) { + console.error("Error fetching departments by city or officeName:", error); + throw new Error("Error fetching departments by city or officeName."); + } +}; + +exports.getOffices = async (req, reply) => { + try { + let { officeName, city } = req.params; + + const filter = {}; + + // Apply officeName filter only if not ALL + if (officeName && officeName.toUpperCase() !== 'ALL') { + const officeNames = officeName.split(',').map(name => + new RegExp(name.trim().replace(/\s+/g, '\\s*'), 'i') // fuzzy match + ); + filter.officeName = { $in: officeNames }; + } + + // Apply city filter only if not ALL + if (city && city.toUpperCase() !== 'ALL') { + const cities = city.split(',').map(name => + new RegExp(name.trim().replace(/\s+/g, '\\s*'), 'i') + ); + filter.city = { $in: cities }; + } + + console.log("Filter being applied:", filter); + + // Fetch offices from DB + const offices = await Deparments.find(filter).lean(); + + // Extract unique department names + let departmentNames = [...new Set(offices.map(o => o.departmentName))]; + + // Always include "ALL" at the start of the list + departmentNames = ['ALL', ...departmentNames]; + + reply.send({ + status_code: 200, + message: "Fetched successfully", + data: departmentNames, + }); + + } catch (error) { + console.error("Error in getOffices:", error); + reply.code(500).send({ + status_code: 500, + message: "Internal server error", + error: error.message, + }); + } +}; + + exports.getDepartmentsByCity = async (req, reply) => { + try { + const { city } = req.params; + if (!city || city.trim() === "") { + return reply.status(400).send({ message: "City is required." }); + } + + const departmentNames = await getDepartmentNamesByCity(city); + + if (departmentNames.length === 0) { + return reply.status(404).send({ message: "No departments found for the specified city." }); + } + + reply.send({ status_code: 200, data: departmentNames }); + } catch (error) { + console.error("API Error:", error); + reply.status(500).send({ message: error.message }); } }; + - exports.getZonebasedLocations = async (req, reply) => { + exports.getAllStates = async (req, reply) => { try { - const { city, zone } = req.query; - const locations = await getLocationsByCityAndZone(city, zone); - reply.send({ status_code: 200, data: locations }); - } catch (err) { - reply.status(500).send({ message: err.message }); + const states = await IndianLocations.find({}, "state").sort({ state: 1 }); // Sorting A to Z + reply.send(states); + } catch (err) { + reply.status(500).send({ error: "Failed to fetch states" }); + } + }; + + exports.getAllCities = async (req, reply) => { + try { + // Fetch only the majorCities field from all documents + const docs = await IndianLocations.find({}, "majorCities").lean(); + + // Flatten the array of arrays and remove duplicates + const cities = [...new Set(docs.flatMap(doc => doc.majorCities))]; + + // Sort alphabetically (case-insensitive) + cities.sort((a, b) => a.localeCompare(b, undefined, { sensitivity: "base" })); + + reply.send(cities); + + } catch (err) { + console.error("Error fetching cities:", err); + reply.status(500).send({ error: "Failed to fetch cities" }); + } +}; + + + exports.getStaeBasedCites = async (request, reply) => { + try { + const { stateName } = request.params; + const state = await IndianLocations.findOne({ state: stateName }, "majorCities"); + + if (!state) { + return reply.status(404).send({ error: "State not found" }); + } + + reply.send(state.majorCities); + } catch (err) { + reply.status(500).send({ error: "Failed to fetch cities" }); + } + }; + + +exports.getCitiesBasedState = async (request, reply) => { + try { + // Match the param name from the route exactly + const { majorcities } = request.params; + + if (!majorcities) { + return reply.status(400).send({ error: "majorcities param is required" }); + } + + // Case-insensitive regex match against elements in the array + const stateDoc = await IndianLocations.findOne( + { majorCities: { $regex: new RegExp(`^${majorcities.trim()}$`, "i") } }, + "state" + ).lean(); + + if (!stateDoc) { + return reply.status(404).send({ error: "City not found" }); + } + + reply.send({ state: stateDoc.state }); + + } catch (err) { + console.error("Error fetching state:", err); + reply.status(500).send({ error: "Failed to fetch state" }); + } +}; + + +exports.getStaffDepartmentDetails = async (request, reply) => { + try { + const { officeName, city } = request.params; + + const department = await Deparments.find({ + officeName: { $regex: officeName.trim(), $options: "i" }, // no ^$ anchors + city: { $regex: city.trim(), $options: "i" } + }); + + if (!department.length) { + return reply.status(404).send({ message: "Department not found" }); + } + + reply.send({ department }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; + + +exports.updateBranchOrCompanyDetails = async (request, reply) => { + try { + const { id } = request.params; + const updateData = request.body; + let updatedDoc; + + if (id.startsWith("AWBR")) { + // Find existing Branch before update + const existing = await Branch.findOne({ branchId: id }); + + updatedDoc = await Branch.findOneAndUpdate( + { branchId: id }, + { ...updateData, updatedAt: new Date() }, + { new: true } + ); + + // 🔄 Cascade update to Department employees if city/officeName changed + if (updatedDoc && existing) { + if ( + (updateData.city && updateData.city !== existing.city) || + (updateData.officeName && updateData.officeName !== existing.officeName) + ) { + await Deparments.updateMany( + { officeName: existing.officeName, city: existing.city }, + { + $set: { + city: updateData.city || existing.city, + officeName: updateData.officeName || existing.officeName, + }, + } + ); + } } - }; \ No newline at end of file + } else if (id.startsWith("AWCI")) { + // Find existing City before update + const existing = await City.findOne({ cityId: id }); + + updatedDoc = await City.findOneAndUpdate( + { cityId: id }, + { ...updateData, updatedAt: new Date() }, + { new: true } + ); + + // 🔄 Cascade update to Department employees if city/officeName changed + if (updatedDoc && existing) { + if ( + (updateData.city && updateData.city !== existing.city) || + (updateData.officeName && updateData.officeName !== existing.officeName) + ) { + await Deparments.updateMany( + { officeName: existing.officeName, city: existing.city }, + { + $set: { + city: updateData.city || existing.city, + officeName: updateData.officeName || existing.officeName, + }, + } + ); + } + } + } else { + return reply.code(400).send({ error: "Invalid ID format" }); + } + + if (!updatedDoc) { + return reply.code(404).send({ message: "Record not found" }); + } + + return reply.send({ + message: "Details updated successfully", + data: updatedDoc, + }); + } catch (err) { + request.log.error(err); + return reply + .code(500) + .send({ error: "Failed to update details", details: err.message }); + } +}; diff --git a/src/controllers/installationController.js b/src/controllers/installationController.js new file mode 100644 index 00000000..d07c96ed --- /dev/null +++ b/src/controllers/installationController.js @@ -0,0 +1,6683 @@ +const boom = require("boom"); +//const bcrypt = require('bcrypt'); +const bcrypt = require('bcryptjs'); + +const jwt = require('jsonwebtoken'); +const customJwtAuth = require("../customAuthJwt"); +const { Deparments, City, Branch } = require("../models/Department"); +const { Install, SensorStock, SensorQuotation, Order, Insensors, MasterSlaveData, ElectrictyWorkPictures, PlumbingWorkPictures, MaterialRecievedPictures, Support, Repairorder } = require("../models/store"); +const { Counter, User } = require("../models/User"); +const { IotData, Tank } = require("../models/tanks"); +const moment = require('moment-timezone'); + +const fastify = require("fastify")({ + logger: true, + //disableRequestLogging: true, + genReqId(req) { + // you get access to the req here if you need it - must be a synchronous function + return uuidv4(); + }, +}); + + +const generateTeamMemberId = async () => { + var result = await Counter.findOneAndUpdate( + { _id: 'teamMemberId_id' }, + { $inc: { seq: 1 } }, + { upsert: true, new: true } + ); + + return result.seq; + }; + + +exports.createTeamMember = async (req, reply) => { + try { + const { adminId } = req.params; + const { departmentId, firstName, phone, password, email, alternativePhone, status ,officeName,city} = req.body; + + if (!adminId) { + return reply.status(400).send({ simplydata: { error: true, message: "adminId is required in path params" } }); + } + + // Check if admin exists + const admin = await Admin.findOne({ adminId }); + if (!admin) { + return reply.status(404).send({ simplydata: { error: true, message: "Admin not found" } }); + } + + // Check if department exists + const department = await Deparments.findOne({ departmentId }); + if (!department) { + return reply.status(404).send({ simplydata: { error: true, message: "Department not found" } }); + } + + // ✅ Update adminId in department (if needed) + department.adminId = adminId; + + // Check if phone already exists + const existingMember = department.team_member.team_member.find(member => member.phone === phone); + if (existingMember) { + return reply.status(400).send({ simplydata: { error: true, message: "Phone number already exists in the team" } }); + } + + // Generate new teamMemberId + const c_id = await generateTeamMemberId(); + const teamMemberId = `AWTM${c_id}`; + + // Hash password + const hashedPassword = await bcrypt.hash(password, 10); + + // Create new member + const newTeamMember = { + teamMemberId, + firstName, + phone, + email, + alternativePhone, + installationTeamMemId: departmentId, + departmentId, + password: hashedPassword, + status: status || "active", + officeName,city + }; + + // Add to team_member array + department.team_member.team_member.push(newTeamMember); + + // ✅ Save the department with updated adminId and new member + await department.save(); + + return reply.send({ + simplydata: { + error: false, + message: "Team member created successfully", + teamMemberId: newTeamMember.teamMemberId, + } + }); + } catch (err) { + console.error("Error creating team member:", err); + reply.status(500).send({ simplydata: { error: true, message: "Internal server error" } }); + } +}; + + + + + + +exports.getTeamMembers = async (request, reply) => { + try { + const { departmentId, officeName, city } = request.params; + + // Build query dynamically + const query = { + departmentId, + officeName: { $regex: new RegExp(`^\\s*${officeName.trim()}\\s*$`, "i") }, + }; + + // Only add city filter if city !== "ALL" + if (city && city.toUpperCase() !== "ALL") { + query.city = { $regex: new RegExp(`^\\s*${city.trim()}\\s*$`, "i") }; + } + + const departments = await Deparments.find(query).lean(); + + if (!departments.length) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "No departments found for given filters", + }, + }); + } + + // Collect team members from all matching departments + const teamMembers = departments.flatMap(dep => dep.team_member?.team_member || []); + + return reply.send({ + simplydata: { + error: false, + message: "Team members retrieved successfully", + teamMembers, + }, + }); + + } catch (err) { + console.error("Error fetching team members:", err); + reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } +}; + + +// exports.getAllDepartments = async (request, reply) => { +// try { +// const { officeName, city } = request.params; + +// if (!officeName || !city) { +// return reply.status(400).send({ +// simplydata: { +// error: true, +// message: "officeName and city are required in path params", +// }, +// }); +// } + +// // Case-insensitive regex without start/end anchors to avoid trailing space issues +// const nameRegex = new RegExp(officeName.trim().replace(/\s+/g, "\\s*"), "i"); +// const cityRegex = new RegExp(city.trim().replace(/\s+/g, "\\s*"), "i"); + +// // 1️⃣ Branch match +// const branchMatch = await Branch.findOne({ +// officeName: nameRegex, +// city: cityRegex, +// }).lean(); + +// // 2️⃣ City match +// const cityMatch = await City.findOne({ +// officeName: nameRegex, +// city: cityRegex, +// }).lean(); + +// // 3️⃣ Departments +// let departments = await Deparments.find({ +// officeName: nameRegex, +// city: cityRegex, +// }).lean(); + +// // 🔹 Add nameoftheContactPerson for departments +// departments = departments.map(dep => ({ +// ...dep, +// nameoftheContactPerson: `${(dep.firstName || "").trim()} ${(dep.lastName || "").trim()}`.trim() +// })); + +// const responseData = [{ firstName: "Self" }]; + +// if (branchMatch) { +// responseData.push({ +// officeType: "branch", +// ...branchMatch +// }); +// } +// if (cityMatch) { +// responseData.push({ +// officeType: "headOffice", +// ...cityMatch +// }); +// } + +// // Push modified departments +// responseData.push(...departments); + +// return reply.send({ +// simplydata: { +// error: false, +// message: +// departments.length || branchMatch || cityMatch +// ? "Data retrieved successfully" +// : "No data found for the given officeName and city", +// data: responseData, +// }, +// }); + +// } catch (err) { +// console.error("Error fetching departments:", err); +// return reply.status(500).send({ +// simplydata: { +// error: true, +// message: "Internal server error", +// }, +// }); +// } +// }; + +exports.getAllDepartments = async (request, reply) => { + try { + const { officeName } = request.params; + + if (!officeName ) { + return reply.status(400).send({ + simplydata: { + error: true, + message: "officeName and city are required in path params", + }, + }); + } + + // Regex for officeName (case-insensitive, flexible spaces) + const nameRegex = new RegExp( + officeName.trim().replace(/\s+/g, "\\s*"), + "i" + ); + + // // If city === "ALL" → no filter, else regex + // const cityFilter = + // city.toUpperCase() === "ALL" + // ? {} + // : { city: new RegExp(city.trim().replace(/\s+/g, "\\s*"), "i") }; + + // 1️⃣ Branch match (all branches for that officeName) + const branchMatches = await Branch.find({ + officeName: nameRegex, + //...cityFilter, + }).lean(); + + // 2️⃣ City (headOffice) match + const cityMatches = await City.find({ + officeName: nameRegex, + //...cityFilter, + }).lean(); + + // 3️⃣ Departments (all matching officeName + city filter) + let departments = await Deparments.find({ + officeName: nameRegex, + // ...cityFilter, + }).lean(); + + // Add contactPerson to departments + departments = departments.map((dep) => ({ + ...dep, + nameoftheContactPerson: `${(dep.firstName || "").trim()} ${ + (dep.lastName || "").trim() + }`.trim(), + })); + + // 🔹 Build response + const responseData = [{ firstName: "Self" },{ reportingManager: "Self" }]; + + // branchMatches.forEach((br) => + // responseData.push({ + // officeType: "branch", + // ...br, + // }) + // ); + + // cityMatches.forEach((ho) => + // responseData.push({ + // officeType: "headOffice", + // ...ho, + // }) + // ); + + responseData.push(...departments); + + return reply.send({ + simplydata: { + error: false, + message: + departments.length || branchMatches.length || cityMatches.length + ? "Data retrieved successfully" + : "No data found for the given officeName and city", + data: responseData, + }, + }); + } catch (err) { + console.error("Error fetching departments:", err); + return reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } +}; + +exports.assignTeamMemberToQuotation = async (request, reply) => { + try { + const { installationId } = request.params; + const { teamMemberId, quotationId } = request.body; + + if (!teamMemberId || !quotationId) { + return reply.status(400).send({ + simplydata: { + error: true, + message: "Both teamMemberId and quotationId are required", + }, + }); + } + + // 🔹 Find installation by installationId + const installation = await Install.findOne({ installationId }); + + if (!installation) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Installation not found", + }, + }); + } + + // 🔹 Extract team members list + const teamMembers = installation.team_member?.team_member || []; + + // 🔹 Check if the provided teamMemberId exists in the installation's team + const assignedTeamMember = teamMembers.find( + (member) => member.teamMemberId === teamMemberId + ); + + if (!assignedTeamMember) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Team member not found in this installation", + }, + }); + } + + // 🔹 Find or create the quotation for the given installationId + let quotation = await Order.findOne({ installationId, quatationId: quotationId }); + + if (!quotation) { + quotation = new Order({ + installationId, + quatationId: quotationId, + assignedTeamMembers: [], // Ensure assignedTeamMembers array is initialized + quatation_status: "Pending", // Default status when created + }); + } + + // 🔹 Assign the team member to the quotation + if (!quotation.assignedTeamMembers) { + quotation.assignedTeamMembers = []; + } + + if (!quotation.assignedTeamMembers.includes(teamMemberId)) { + quotation.assignedTeamMembers.push(teamMemberId); + } + + // 🔹 Update order status when a team member is assigned + quotation.quatation_status = "Assigned"; // Update status + + // 🔹 Save the updated quotation in the Order schema + await quotation.save(); + + // 🔹 Update Installation schema with quotationId + installation.quatationId = quotationId; + await installation.save(); + + return reply.send({ + simplydata: { + error: false, + message: "Team member assigned to quotation successfully", + quotation, + }, + }); + + } catch (err) { + console.error("Error assigning team member to quotation:", err); + reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } +}; + + exports.getAllInstallers = async (request, reply) => { + try { + const { departmentName } = request.params; // Get installationId from request params + + // Check if installation exists + const installationList = await Deparments.find({ departmentName }); + + if (!installationList) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Installation not found", + }, + }); + } + + + + return reply.send({ + simplydata: { + error: false, + installationList, // Return the list of team members + }, + }); + + } catch (err) { + console.error("Error fetching team members:", err); + reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } + }; + + exports.editTeamMember = async (request, reply) => { + try { + const { installationId, teamMemberId } = request.params; + const updateData = request.body; + + // Find the installation + const installation = await Install.findOne({ installationId }); + + if (!installation) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Installation not found", + }, + }); + } + + // Find the team member + let teamMember = installation.team_member.team_member.find( + (member) => member.teamMemberId === teamMemberId + ); + + if (!teamMember) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Team member not found", + }, + }); + } + + // Update fields + Object.assign(teamMember, updateData); + + // Save changes + await installation.markModified("team_member.team_member"); + await installation.save(); + + return reply.send({ + simplydata: { + error: false, + message: "Team member updated successfully", + }, + }); + + } catch (err) { + console.error("Error updating team member:", err); + reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } + }; + + exports.deleteTeamMember = async (request, reply) => { + try { + const { installationId, teamMemberId } = request.params; + + // Find the installation + const installation = await Install.findOne({ installationId }); + + if (!installation) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Installation not found", + }, + }); + } + + // Find index of the team member + const memberIndex = installation.team_member.team_member.findIndex( + (member) => member.teamMemberId === teamMemberId + ); + + if (memberIndex === -1) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "Team member not found", + }, + }); + } + + // Remove the team member from the array + installation.team_member.team_member.splice(memberIndex, 1); + + // Save changes + await installation.markModified("team_member.team_member"); + await installation.save(); + + return reply.send({ + simplydata: { + error: false, + message: "Team member deleted successfully", + }, + }); + + } catch (err) { + console.error("Error deleting team member:", err); + reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } + }; + +exports.getInstallationTeamMembers = async (request, reply) => { + try { + const installationId = request.params.installationId; + const installation = await Install.findOne({ installationId }); + if (!installation) { + return reply.status(404).send({ message: 'Installation not found' }); + } + const teamMembers = installation.team_member.team_member; + reply.send({ teamMembers }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; + exports.getQuotationsByInstallationId = async (request, reply) => { + try { + const { installationId } = request.params; + + if (!installationId) { + return reply.status(400).send({ + simplydata: { + error: true, + message: "Installation ID is required", + }, + }); + } + + // 🔹 Fetch quotations based on installationId + const quotations = await Order.find({ installationId }); + + if (!quotations || quotations.length === 0) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "No quotations found for this installation ID", + }, + }); + } + + return reply.send({ + simplydata: { + error: false, + message: "Quotations fetched successfully", + quotations, + }, + }); + } catch (err) { + console.error("Error fetching quotations:", err); + reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } + }; + + +exports.getQuotationsByInstallationAndTeamMember = async (request, reply) => { + try { + const { installationId, teamMemberId } = request.params; + + if (!installationId || !teamMemberId) { + return reply.status(400).send({ + simplydata: { + error: true, + message: "Both installationId and teamMemberId are required", + }, + }); + } + + // 🔹 Find quotations matching installationId and assignedTeamMembers + let quotations = await Order.find({ + installationId, + assignedTeamMembers: teamMemberId, + }); + + if (!quotations || quotations.length === 0) { + return reply.status(404).send({ + simplydata: { + error: true, + message: "No quotations found for this installation and team member", + }, + }); + } + + // ✅ Step 1: update master_connections where work_status is missing to 'active' + for (const order of quotations) { + let updated = false; + + if (Array.isArray(order.master_connections)) { + for (const mc of order.master_connections) { + if (!mc.work_status) { + mc.work_status = 'active'; + updated = true; + } + } + } + + if (updated) { + await order.save(); + } + } + + // Re-fetch quotations as lean after update + quotations = await Order.find({ + installationId, + assignedTeamMembers: teamMemberId, + }).lean(); + + // ✅ Filter: keep only quotations where at least one master_connections.work_status === 'active' + quotations = quotations.filter(q => + Array.isArray(q.master_connections) && + q.master_connections.some(mc => mc.work_status === 'active') + ); + + // If no quotations left after filtering, return empty list + if (!quotations.length) { + return reply.send({ + simplydata: { + error: false, + message: "No active quotations found for this installation and team member", + quotations: [], + }, + }); + } + + // 🔹 Enrich each quotation with customer details & keep only active master_connections + const enrichedQuotations = await Promise.all( + quotations.map(async (quotation) => { + const customer = await User.findOne({ customerId: quotation.customerId }).lean(); + + const activeMasters = quotation.master_connections?.filter(mc => + mc.work_status === 'active' + ) || []; + + // 🔹 Take work_status of the first active master (if exists) + const work_status = activeMasters.length > 0 ? activeMasters[0].work_status : null; + + return { + ...quotation, + work_status, + master_connections: activeMasters, + customer: customer || null, + // ✅ add work_status field outside master_connections + }; + }) + ); + + return reply.send({ + simplydata: { + error: false, + message: "Active quotations fetched successfully", + quotations: enrichedQuotations, + }, + }); + } catch (err) { + console.error("Error fetching quotations:", err); + return reply.status(500).send({ + simplydata: { + error: true, + message: "Internal server error", + }, + }); + } +}; + + +exports.getDepartmentByFirstName = async (req, reply) => { + try { + let { departmentName, firstName } = req.params; + + if (!departmentName) { + return reply.status(400).send({ + simplydata: { error: true, message: "departmentName is required" }, + }); + } + if (!firstName) { + return reply.status(400).send({ + simplydata: { error: true, message: "firstName is required" }, + }); + } + + departmentName = departmentName.trim(); + firstName = firstName.trim(); + + console.log("Searching for:", { departmentName, firstName }); + + const department = await Deparments.findOne({ + firstName: { $regex: `^\\s*${firstName}\\s*$`, $options: "i" }, + departmentName: { $regex: `^${departmentName}$`, $options: "i" } + }).lean(); + + console.log("Department found:", department); + + if (!department) { + return reply.status(404).send({ + simplydata: { error: true, message: "Department not found" }, + }); + } + + return reply.send({ + simplydata: { + error: false, + message: "Department details fetched successfully", + firstName: department.firstName, + phone: department.phone, + lastName: department.lastName, + email: department.email + }, + }); + + } catch (err) { + console.error("Error fetching department details:", err); + return reply.status(500).send({ + simplydata: { error: true, message: "Internal server error" }, + }); + } +}; + + + + // const moment = require('moment-timezone'); + + exports.getByHardwareId = async (req, reply) => { + try { + const { hardwareId } = req.params; + + if (!hardwareId) { + return reply.status(400).send({ error: "hardwareId is required" }); + } + + console.log("Fetching details for hardwareId:", hardwareId); + + const iotData = await IotData.find({ hardwareId }) + .sort({ date: -1 }) + .limit(3) + .lean(); + + if (!iotData || iotData.length === 0) { + return reply.send({ status_code: 404, message: "IoT Data not found", data: null }); + } + + const latestRecord = iotData[0]; + const indiaTime = moment.tz(latestRecord.date, "Asia/Kolkata"); + const connected_gsm_date = indiaTime.format("DD-MM-YYYY"); + const connected_gsm_time = indiaTime.format("HH:mm:ss"); + + const now = moment.tz("Asia/Kolkata"); + const diffInMinutes = now.diff(indiaTime, "minutes"); + const isGSMConnected = diffInMinutes <= 1; + const gsmStatus = isGSMConnected ? "connected" : "disconnected"; + const gsmLastCheckTime = now.format("DD-MM-YYYY HH:mm:ss"); + + await Insensors.findOneAndUpdate( + { hardwareId }, + { + $set: { + connected_gsm_date, + connected_gsm_time, + connected_status: gsmStatus, + gsm_last_check_time: gsmLastCheckTime + } + } + ); + + const tanksWithConnectionStatus = latestRecord.tanks.map(tank => { + const tankMoment = moment.tz(tank.date, "Asia/Kolkata"); + const tankDiff = now.diff(tankMoment, "minutes"); + + return { + ...tank, + connected_status: tankDiff <= 1 ? "connected" : "disconnected" + }; + }); + + // 🔁 Raise ticket if applicable + const sensor = await Insensors.findOne({ hardwareId }).lean(); + if (sensor?.customerId) { + //await raiseATicketLikeLogic(sensor.customerId, hardwareId); + } + + return reply.send({ + status_code: 200, + message: "Success", + data: { + hardwareId, + gsm_connected_status: gsmStatus, + gsmStatus: isGSMConnected ? "GSM Connected" : "GSM Not Connected", + connected_gsm_date, + connected_gsm_time, + gsm_last_check_time: gsmLastCheckTime, + tanks: tanksWithConnectionStatus, + date: latestRecord.date, + time: latestRecord.time + } + }); + } catch (err) { + console.error("Error in getByHardwareId:", err); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; + + + +exports.getByHardwareIdSupport = async (req, reply) => { + try { + const { hardwareId } = req.params; + + if (!hardwareId) { + return reply.status(400).send({ error: "hardwareId is required" }); + } + + console.log("Fetching details for hardwareId:", hardwareId); + + const iotData = await IotData.find({ hardwareId }) + .sort({ date: -1 }) + .limit(3) + .lean(); + + if (!iotData || iotData.length === 0) { + return reply.send({ status_code: 404, message: "IoT Data not found", data: null }); + } + + const latestRecord = iotData[0]; + const indiaTime = moment.tz(latestRecord.date, "Asia/Kolkata"); + const connected_gsm_date = indiaTime.format("DD-MM-YYYY"); + const connected_gsm_time = indiaTime.format("HH:mm:ss"); + + const now = moment.tz("Asia/Kolkata"); + const diffInMinutes = now.diff(indiaTime, "minutes"); + const isGSMConnected = diffInMinutes <= 1; + const gsmStatus = isGSMConnected ? "connected" : "disconnected"; + const gsmLastCheckTime = now.format("DD-MM-YYYY HH:mm:ss"); + + await Insensors.findOneAndUpdate( + { hardwareId }, + { + $set: { + connected_gsm_date, + connected_gsm_time, + connected_status: gsmStatus, + support_gsm_last_check_time: gsmLastCheckTime + } + } + ); + + const tanksWithConnectionStatus = latestRecord.tanks.map(tank => { + const tankMoment = moment.tz(tank.date, "Asia/Kolkata"); + const tankDiff = now.diff(tankMoment, "minutes"); + + return { + ...tank, + connected_status: tankDiff <= 1 ? "connected" : "disconnected" + }; + }); + + // 🔁 Raise ticket if applicable + const sensor = await Insensors.findOne({ hardwareId }).lean(); + if (sensor?.customerId) { + //await raiseATicketLikeLogic(sensor.customerId, hardwareId); + } + + return reply.send({ + status_code: 200, + message: "Success", + data: { + hardwareId, + gsm_connected_status: gsmStatus, + gsmStatus: isGSMConnected ? "GSM Connected" : "GSM Not Connected", + connected_gsm_date, + connected_gsm_time, + support_gsm_last_check_time: gsmLastCheckTime, + tanks: tanksWithConnectionStatus, + date: latestRecord.date, + time: latestRecord.time + } + }); + } catch (err) { + console.error("Error in getByHardwareId:", err); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; + + +exports.getByHardwareIdSupportTeamMember = async (req, reply) => { + try { + const { hardwareId } = req.params; + + if (!hardwareId) { + return reply.status(400).send({ error: "hardwareId is required" }); + } + + console.log("Fetching details for hardwareId:", hardwareId); + + const iotData = await IotData.find({ hardwareId }) + .sort({ date: -1 }) + .limit(3) + .lean(); + + if (!iotData || iotData.length === 0) { + return reply.send({ status_code: 404, message: "IoT Data not found", data: null }); + } + + const latestRecord = iotData[0]; + + const indiaTime = moment.tz(latestRecord.date, "Asia/Kolkata"); + const connected_gsm_date = indiaTime.format("DD-MM-YYYY"); + const connected_gsm_time = indiaTime.format("HH:mm:ss"); + + const now = moment.tz("Asia/Kolkata"); + const diffInMinutes = now.diff(indiaTime, "minutes"); + const isGSMConnected = diffInMinutes <= 1; + const gsmStatus = isGSMConnected ? "connected" : "disconnected"; + + const gsmLastCheckTime = now.format("DD-MM-YYYY HH:mm:ss"); // formatted current time + + // ✅ Update slaves connected to this master hardwareId + await Insensors.updateMany( + { connected_to: hardwareId }, + { + $set: { + connected_gsm_date, + connected_gsm_time, + connected_status: gsmStatus, + team_member_support_gsm_last_check_time: gsmLastCheckTime + } + } + ); + + // ✅ Update the master device itself (if it's a master) + await Insensors.updateOne( + { hardwareId }, + { + $set: { + connected_gsm_date, + connected_gsm_time, + connected_status: gsmStatus, + team_member_support_gsm_last_check_time: gsmLastCheckTime + } + } + ); + + // ✅ Annotate tanks with LoRa connection status + const tanksWithConnectionStatus = latestRecord.tanks.map(tank => { + const tankMoment = moment.tz(tank.date, "Asia/Kolkata"); + const tankDiff = now.diff(tankMoment, "minutes"); + + return { + ...tank, + connected_status: tankDiff <= 1 ? "connected" : "disconnected" + }; + }); + + // ✅ Response + return reply.send({ + status_code: 200, + message: "Success", + data: { + hardwareId, + gsm_connected_status: gsmStatus, + gsmStatus: isGSMConnected ? "GSM Connected" : "GSM Not Connected", + connected_gsm_date, + connected_gsm_time, + team_member_support_gsm_last_check_time: gsmLastCheckTime, + tanks: tanksWithConnectionStatus, + date: latestRecord.date, + time: latestRecord.time + } + }); + + } catch (err) { + console.error("Error in getByHardwareIdSupport:", err); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; + +exports.getByHardwareAndTankId = async (req, reply) => { + try { + const { hardwareId, tankhardwareId } = req.params; + + if (!hardwareId || !tankhardwareId) { + return reply.status(400).send({ error: "Both hardwareId and tankhardwareId are required" }); + } + + console.log("📡 Fetching data for:", { hardwareId, tankhardwareId }); + + // Get the latest IoT data for the master hardwareId + const latestData = await IotData.findOne({ hardwareId }).sort({ date: -1 }).lean(); + + if (!latestData || !Array.isArray(latestData.tanks)) { + return reply.code(404).send({ message: "No data found for given hardwareId and tankhardwareId" }); + } + + const now = new Date(); + const dataDate = new Date(latestData.date); + + // Check master GSM connection (threshold 60 sec) + const isGSMConnected = now - dataDate <= 60000; + + // Find the tank for the slave + const matchedTank = latestData.tanks.find(tank => tank.tankhardwareId === tankhardwareId); + if (!matchedTank) { + return reply.code(404).send({ message: "Tank not found in latest record" }); + } + + const tankHeight = parseFloat(matchedTank.tankHeight || "0"); + + // If master is disconnected => slave is disconnected too + let isLoraConnected = false; + if (!isGSMConnected) { + isLoraConnected = false; + } else { + // Otherwise use tankHeight to determine LoRa connection + isLoraConnected = tankHeight > 0; + } + + // Format tank date/time to IST + const matchedTankDateObj = new Date(matchedTank.date); + const formattedDate = moment(matchedTankDateObj).tz("Asia/Kolkata").format("DD-MM-YYYY"); + const formattedTime = matchedTank.time || moment(matchedTankDateObj).tz("Asia/Kolkata").format("HH:mm:ss"); + + matchedTank.date = formattedDate; + matchedTank.time = formattedTime; + + // Prepare update object for Insensors collection + const updateFields = { + connected_status: isLoraConnected ? "connected" : "disconnected", + connected_lora_date: formattedDate, + connected_lora_time: formattedTime, + lora_last_check_time: moment().tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm:ss") + }; + const sensorDoc = await Insensors.findOne({ + tankhardwareId: tankhardwareId, + connected_to: hardwareId + }); + + if (!sensorDoc) { + console.warn("⚠️ No Insensors doc found for LoRa update:", { tankhardwareId, connected_to: hardwareId }); + + const fallbackDoc = await Insensors.findOne({ tankhardwareId: tankhardwareId }); + if (fallbackDoc) { + await Insensors.updateOne({ _id: fallbackDoc._id }, { $set: updateFields }); + console.log("⚠️ Fallback Insensors updated by tankhardwareId:", fallbackDoc._id); + } + } else { + await Insensors.updateOne({ _id: sensorDoc._id }, { $set: updateFields }); + console.log("✅ Insensors LoRa status updated:", sensorDoc._id); + } + + return reply.send({ + status_code: 200, + message: isLoraConnected ? "LoRa connected" : "LoRa not connected", + data: matchedTank, + lora_connected_status: updateFields.connected_status, + connected_lora_date: updateFields.connected_lora_date, + connected_lora_time: updateFields.connected_lora_time, + lora_last_check_time: updateFields.lora_last_check_time + }); + + } catch (err) { + console.error("❌ Error in getByHardwareAndTankId:", err); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; + + + + +exports.getByHardwareAndTankIdSupport = async (req, reply) => { + try { + const { hardwareId, tankhardwareId } = req.params; + + if (!hardwareId || !tankhardwareId) { + return reply.status(400).send({ error: "Both hardwareId and tankhardwareId are required" }); + } + + console.log("Fetching tank data for:", { hardwareId, tankhardwareId }); + + const latestData = await IotData.findOne({ hardwareId }).sort({ date: -1 }).lean(); + + if (!latestData || !Array.isArray(latestData.tanks)) { + return reply.code(404).send({ message: "No data found for given hardwareId and tankhardwareId" }); + } + + const now = new Date(); + const dataDate = new Date(latestData.date); + const diffInMs = now - dataDate; + const isGSMConnected = diffInMs <= 60000; + + const matchedTank = latestData.tanks.find(tank => tank.tankhardwareId === tankhardwareId); + + if (!matchedTank) { + return reply.code(404).send({ message: "Tank not found in latest record" }); + } + + const tankHeight = parseFloat(matchedTank.tankHeight || "0"); + const isLoraConnected = isGSMConnected && tankHeight > 0; + + const matchedTankDateObj = new Date(matchedTank.date); + const day = String(matchedTankDateObj.getDate()).padStart(2, '0'); + const month = String(matchedTankDateObj.getMonth() + 1).padStart(2, '0'); + const year = matchedTankDateObj.getFullYear(); + const formattedDate = `${day}-${month}-${year}`; + matchedTank.date = formattedDate; + + const support_lora_last_check_time = moment.tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm:ss"); + + const updateFields = { + connected_status: isLoraConnected ? "connected" : "disconnected", + support_lora_last_check_time + }; + + let connected_lora_date = null; + let connected_lora_time = null; + + if (isLoraConnected) { + connected_lora_date = formattedDate; + connected_lora_time = matchedTank.time || matchedTankDateObj.toTimeString().split(" ")[0]; + updateFields.connected_lora_date = connected_lora_date; + updateFields.connected_lora_time = connected_lora_time; + } + + // Support both slave and master structure + const updatedSensor = await Insensors.findOneAndUpdate( + { + $or: [ + { connected_to: hardwareId, tankhardwareId: tankhardwareId }, // slave + { hardwareId: tankhardwareId } // master + ] + }, + { $set: updateFields }, + { new: true } + ); + + if (!updatedSensor) { + console.warn("No matching Insensors document found for update"); + } else { + console.log("Updated support_lora_last_check_time for:", updatedSensor.hardwareId); + } + + const displayMessage = isLoraConnected ? "LoRa connected" : "LoRa not connected"; + + return reply.send({ + status_code: 200, + message: displayMessage, + data: matchedTank, + lora_connected_status: updateFields.connected_status, + connected_lora_date, + connected_lora_time, + support_lora_last_check_time + }); + + } catch (err) { + console.error("Error in getByHardwareAndTankIdSupport:", err); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; + + +exports.getByHardwareAndTankIdSupportTeamMember = async (req, reply) => { + try { + const { hardwareId, tankhardwareId } = req.params; + + if (!hardwareId || !tankhardwareId) { + return reply.status(400).send({ error: "Both hardwareId and tankhardwareId are required" }); + } + + console.log("Fetching tank data for:", { hardwareId, tankhardwareId }); + + const latestData = await IotData.findOne({ hardwareId }).sort({ date: -1 }).lean(); + + if (!latestData || !Array.isArray(latestData.tanks)) { + return reply.code(404).send({ message: "No data found for given hardwareId and tankhardwareId" }); + } + + const now = new Date(); + const dataDate = new Date(latestData.date); + const diffInMs = now - dataDate; + const isGSMConnected = diffInMs <= 60000; + + const matchedTank = latestData.tanks.find(tank => tank.tankhardwareId === tankhardwareId); + + if (!matchedTank) { + return reply.code(404).send({ message: "Tank not found in latest record" }); + } + + const tankHeight = parseFloat(matchedTank.tankHeight || "0"); + const isLoraConnected = isGSMConnected && tankHeight > 0; + + const matchedTankDateObj = new Date(matchedTank.date); + const day = String(matchedTankDateObj.getDate()).padStart(2, '0'); + const month = String(matchedTankDateObj.getMonth() + 1).padStart(2, '0'); + const year = matchedTankDateObj.getFullYear(); + const formattedDate = `${day}-${month}-${year}`; + matchedTank.date = formattedDate; + + const team_member_support_lora_last_check_time = moment.tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm:ss"); + + const updateFields = { + connected_status: isLoraConnected ? "connected" : "disconnected", + team_member_support_lora_last_check_time + }; + + let connected_lora_date = null; + let connected_lora_time = null; + + if (isLoraConnected) { + connected_lora_date = formattedDate; + connected_lora_time = matchedTank.time || matchedTankDateObj.toTimeString().split(" ")[0]; + updateFields.connected_lora_date = connected_lora_date; + updateFields.connected_lora_time = connected_lora_time; + } + + // Support both slave and master structure + const updatedSensor = await Insensors.findOneAndUpdate( + { + $or: [ + { connected_to: hardwareId, tankhardwareId: tankhardwareId }, // slave + { hardwareId: tankhardwareId } // master + ] + }, + { $set: updateFields }, + { new: true } + ); + + if (!updatedSensor) { + console.warn("No matching Insensors document found for update"); + } else { + console.log("Updated support_lora_last_check_time for:", updatedSensor.hardwareId); + } + + const displayMessage = isLoraConnected ? "LoRa connected" : "LoRa not connected"; + + return reply.send({ + status_code: 200, + message: displayMessage, + data: matchedTank, + lora_connected_status: updateFields.connected_status, + connected_lora_date, + connected_lora_time, + team_member_support_lora_last_check_time + }); + + } catch (err) { + console.error("Error in getByHardwareAndTankIdSupport:", err); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; + + + +exports.getAllocatedSensorsByTank = async (req, reply) => { + try { + let { customerId, tankName } = req.params; + + if (!customerId || !tankName) { + return reply.status(400).send({ error: "customerId and tankName are required" }); + } + + tankName = tankName.trim(); // Trim spaces + + console.log("Querying MongoDB with:", { customerId, tankName, status: "blocked" }); + + const allocatedSensors = await Insensors.find({ + customerId, + tankName: { $regex: `^${tankName}$`, $options: "i" }, // Case-insensitive search + status: "blocked", + }).lean(); + + if (!allocatedSensors.length) { + return reply.send({ + status_code: 200, + message: "No allocated sensors found for this tank", + allocatedSensors: [], + }); + } + + return reply.send({ + status_code: 200, + message: "Allocated sensors fetched successfully", + allocatedSensors, + }); + + } catch (err) { + console.error("Error fetching allocated sensors:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + + +exports.createMasterSlaveData = async (req, reply) => { + try { + const { installationId } = req.params; + const { + type, + customerId, + hardwareId, + batchno, + masterId, + tankName, + tankLocation, + materialRecived, + electricityWork, + plumbingWork, + loraCheck + } = req.body; + + if (!installationId || !hardwareId || !masterId) { + return reply.status(400).send({ message: "installationId, hardwareId, and masterId are required." }); + } + + // 🔹 Fetch stored electricity work pictures + const electricityWorkData = await ElectrictyWorkPictures.findOne({ + installationId, + customerId + }); + + const electricityWorkPictures = electricityWorkData + ? electricityWorkData.pictureUrl.map(pic => ({ url: pic.url, uploadedAt: new Date() })) + : []; + + // 🔹 Fetch stored plumbing work pictures + const plumbingWorkData = await PlumbingWorkPictures.findOne({ + installationId, + customerId + }); + + const plumbingWorkPictures = plumbingWorkData + ? plumbingWorkData.pictureUrl.map(pic => ({ url: pic.url, uploadedAt: new Date() })) + : []; + + const materialRecievedData = await MaterialRecievedPictures.findOne({ + installationId, + customerId + }); + + const materialRecievedPictures = materialRecievedData + ? materialRecievedData.pictureUrl.map(pic => ({ url: pic.url, uploadedAt: new Date() })) + : []; + + // 🔹 Save all data to MasterSlaveData + const newData = new MasterSlaveData({ + installationId, + type, + customerId, + hardwareId, + batchno, + masterId, + tankName, + tankLocation, + materialRecived, + electricityWork, + plumbingWork, + loraCheck, + electricityWorkPictures, + plumbingWorkPictures, + materialRecievedPictures + }); + + await newData.save(); + + reply.status(201).send({ + message: "Master-Slave data created successfully", + data: newData + }); + + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; + +function feetToCm(value) { + const v = parseFloat(value); + return !isNaN(v) ? parseFloat((v * 30.48).toFixed(2)) : null; +} + + +exports.masterConnectedSlaveList = async (req, reply) => { + try { + const { connectedTo, customerId } = req.params; + + if (!connectedTo) { + return reply.status(400).send({ success: false, message: "connectedTo is required" }); + } + + if (!customerId) { + return reply.status(400).send({ success: false, message: "customerId is required" }); + } + + // Step 1: Get master device + const master = await Insensors.findOne({ hardwareId: connectedTo, type: 'master', customerId }).lean(); + if (!master) { + return reply.status(404).send({ success: false, message: "Master device not found" }); + } + + const masterHardwareId = master.hardwareId; + console.log("masterHardwareId",masterHardwareId) + // Step 2: Get tank metadata + const tankDetails = await Tank.findOne( + { hardwareId: connectedTo }, + { tankLocation: 1, typeOfWater: 1, height: 1, length: 1, width: 1 } + ).lean(); + console.log("tankDetails",tankDetails) + const masterTypeOfWater = tankDetails?.typeOfWater || null; + + // Step 3: Get slave devices + const slaveTanks = await Insensors.find({ connected_to: connectedTo, type: 'slave', customerId }).lean(); + const slaveCount = slaveTanks.length; + + // Step 4: Latest IoT data + const latestIotData = await IotData.findOne({ hardwareId: connectedTo }).sort({ date: -1 }).lean(); + + // Step 5: Order info + const order = await Order.findOne({ "master_connections.hardwareId": connectedTo }).lean(); + + let masterOrderInfo = {}; + let mastersCount = null; + let slavesCount = null; + let sensorsCount = null; + let motorSwitches = []; + let electricals = []; + + if (order) { + const match = order.master_connections.find(mc => mc.hardwareId === connectedTo); + if (match) { + masterOrderInfo = { + masterName: match.master_name || "", + location: match.location || "", + googleLocation: match.googleLocation || "", + longitude: match.longitude || "", + latitude: match.latitude || "" + }; + } + + mastersCount = order.masters || null; + slavesCount = order.slaves || null; + sensorsCount = order.sensors || null; + motorSwitches = order.master_connections?.flatMap(mc => mc.motor_switches || []) || []; + electricals = order.electricals || []; + } + + // Step 6: Build master response **without** hardwareId + const { + hardwareId, // exclude this + ...restMasterFields + } = master; + + + const masterResponse = { + ...restMasterFields, + hardwareId: masterHardwareId, // new field + isMaster: true, + tankLocation: tankDetails?.tankLocation || null, + product_status: master.product_status || 'pending', + typeOfWater: masterTypeOfWater, + tankHeight: null, + masterName: masterOrderInfo.masterName, + location: masterOrderInfo.location, + googleLocation: masterOrderInfo.googleLocation, + longitude: masterOrderInfo.longitude, + latitude: masterOrderInfo.latitude, + height: tankDetails?.height || null, + length: tankDetails?.length || null, + width: tankDetails?.width || null, + heightInCm: tankDetails?.height ? feetToCm(tankDetails.height) : null, + lengthInCm: tankDetails?.length ? feetToCm(tankDetails.length) : null, + widthInCm: tankDetails?.width ? feetToCm(tankDetails.width) : null + }; + + // Step 7: Process slaves + // const processedSlaves = await Promise.all(slaveTanks.map(async slave => { + // const finalHardwareId = slave.tankhardwareId || slave.connected_to; + + // let slaveTankMeta = await Tank.findOne( + // { tankhardwareId: slave.tankhardwareId, customerId }, + // { height: 1, length: 1, width: 1,tankName:1 } + // ).lean(); + // console.log(slaveTankMeta,"slaveTankMeta") + // if (!slaveTankMeta && slave.connected_to) { + // slaveTankMeta = await Tank.findOne( + // { hardwareId: slave.connected_to, customerId }, + // { height: 1, length: 1, width: 1 } + // ).lean(); + // } + + // const matchingTankData = latestIotData?.tanks?.find(t => + // t.tankHardwareId === finalHardwareId || t.connected_to === finalHardwareId + // ); + + // return { + // ...slave, + // isMaster: false, + // product_status: slave.product_status || 'pending', + // tankHeight: matchingTankData?.tankHeight ?? null, + // typeOfWater: masterTypeOfWater === 'bore' ? 'bore' : (slave.typeOfWater || null), + // height: slaveTankMeta?.height || null, + // length: slaveTankMeta?.length || null, + // width: slaveTankMeta?.width || null, + // heightInCm: slaveTankMeta?.height ? feetToCm(slaveTankMeta.height) : null, + // lengthInCm: slaveTankMeta?.length ? feetToCm(slaveTankMeta.length) : null, + // widthInCm: slaveTankMeta?.width ? feetToCm(slaveTankMeta.width) : null + // }; + // })); +// Step 7: Process slaves +const processedSlaves = await Promise.all(slaveTanks.map(async (slave) => { + const finalHardwareId = slave.tankhardwareId || slave.connected_to; + + // Fetch slave tank metadata from Tank schema using tankhardwareId or tankName + customerId + let slaveTankMeta = await Tank.findOne( + { tankhardwareId: slave.tankhardwareId, customerId }, + { height: 1, length: 1, width: 1, typeOfWater: 1, tankName: 1 } + ).lean(); + + if (!slaveTankMeta && slave.tankName) { + // fallback: use tankName + customerId + slaveTankMeta = await Tank.findOne( + { tankName: slave.tankName, customerId }, + { height: 1, length: 1, width: 1, typeOfWater: 1, tankName: 1 } + ).lean(); + } + + if (!slaveTankMeta && slave.connected_to) { + // fallback: use connected_to as hardwareId + slaveTankMeta = await Tank.findOne( + { hardwareId: slave.connected_to, customerId }, + { height: 1, length: 1, width: 1, typeOfWater: 1, tankName: 1 } + ).lean(); + } + + const matchingTankData = latestIotData?.tanks?.find(t => + t.tankHardwareId === finalHardwareId || t.connected_to === finalHardwareId + ); + + // Use slaveTankMeta.typeOfWater if Insensors.typeOfWater is null + const typeOfWater = slave.typeOfWater || slaveTankMeta?.typeOfWater || masterTypeOfWater; +console.log(matchingTankData) + return { + ...slave, + isMaster: false, + product_status: slave.product_status || 'pending', + tankHeight: matchingTankData?.tankHeight ?? null, + typeOfWater: typeOfWater, + height: slaveTankMeta?.height || null, + length: slaveTankMeta?.length || null, + width: slaveTankMeta?.width || null, + heightInCm: slaveTankMeta?.height ? feetToCm(slaveTankMeta.height) : null, + lengthInCm: slaveTankMeta?.length ? feetToCm(slaveTankMeta.length) : null, + widthInCm: slaveTankMeta?.width ? feetToCm(slaveTankMeta.width) : null + }; +})); + + // Step 8: Send response + return reply.send({ + success: true, + tankLocation: tankDetails?.tankLocation || null, + typeOfWater: masterTypeOfWater, + connectedSlaveCount: slaveCount, + masters: mastersCount, + slaves: slavesCount, + sensors: sensorsCount, + motor_switches: motorSwitches, + electricals: electricals, + data: [masterResponse, ...processedSlaves] + }); + + } catch (error) { + console.error("Error fetching master connected slave data:", error); + return reply.status(500).send({ success: false, message: "Internal Server Error" }); + } +}; + + +exports.getTankDetailsByMaster = async (req, reply) => { + try { + const { customerId, hardwareId } = req.params; + + if (!customerId || !hardwareId) { + return reply.code(400).send({ + success: false, + message: 'customerId and hardwareId are required' + }); + } + + // Step 1: find master device (to confirm existence) + const masterDevice = await Insensors.findOne({ customerId, hardwareId, type: 'master' }).lean(); + if (!masterDevice) { + return reply.code(404).send({ + success: false, + message: 'Master device not found' + }); + } + + // Step 2: find order to get masterName & location + const orderRecord = await Order.findOne({ customerId, "master_connections.hardwareId": hardwareId }).lean(); + if (!orderRecord) { + return reply.code(404).send({ + success: false, + message: 'Order record not found for this hardwareId' + }); + } + + // find the master inside order + const masterInfo = orderRecord.master_connections.find( + m => m.hardwareId === hardwareId + ); + + const masterName = masterInfo ? masterInfo.master_name : null; + const location = masterInfo ? masterInfo.location : null; + + // Step 3: get tank details + const tank = await Tank.findOne({ customerId, hardwareId }).lean(); + if (!tank) { + return reply.code(404).send({ + success: false, + message: 'Tank details not found' + }); + } + + // Step 4: parse & format dimensions + const tankHeightFeet = parseInt(tank.height, 10); + const tankWidthFeet = parseInt(tank.width, 10); + const tankLengthFeet = parseInt(tank.length, 10); + + reply.send({ + success: true, + data: { + masterName: masterName, + location: location, + type:masterDevice.type, + hardwareId: hardwareId, + tankName: tank.tankName, + blockName: tank.blockName, + shape: tank.shape, + capacity: tank.capacity, + waterCapacityPerCm: tank.waterCapacityPerCm, + typeOfWater: tank.typeOfWater, + tankLocation: tank.tankLocation, + height: tankHeightFeet, + width: tankWidthFeet, + length: tankLengthFeet, + heightInCm: tankHeightFeet * 30.48, + widthInCm: tankWidthFeet * 30.48, + lengthInCm: tankLengthFeet * 30.48 + } + }); + + } catch (error) { + console.error('Error fetching tank details:', error); + reply.code(500).send({ + success: false, + message: 'Internal server error' + }); + } +}; + + +exports.getSlaveTankDetails = async (req, reply) => { + try { + const { customerId, hardwareId, tankHardwareId } = req.params; + + if (!customerId || !hardwareId || !tankHardwareId) { + return reply.code(400).send({ + success: false, + message: 'customerId, hardwareId, and tankHardwareId are required' + }); + } + + // Step 1: find slave device (type: slave) + const slaveDevice = await Insensors.findOne({ customerId, connected_to: hardwareId, type: 'slave' }).lean(); + if (!slaveDevice) { + return reply.code(404).send({ + success: false, + message: 'Slave device not found' + }); + } + + // Step 2: find master hardwareId from slave.connected_to + const masterHardwareId = slaveDevice.connected_to; + if (!masterHardwareId) { + return reply.code(404).send({ + success: false, + message: 'Slave device has no connected_to (master hardwareId)' + }); + } + + // Step 3: find master info from Order.master_connections + const order = await Order.findOne({ customerId, "master_connections.hardwareId": masterHardwareId }).lean(); + if (!order) { + return reply.code(404).send({ + success: false, + message: 'Order not found for connected master' + }); + } + + const masterInfo = order.master_connections.find(m => m.hardwareId === masterHardwareId); + + const masterName = masterInfo ? masterInfo.master_name : null; + const location = masterInfo ? masterInfo.location : null; + + // Step 4: get slave's tank details + const tank = await Tank.findOne({ customerId, hardwareId, tankhardwareId: tankHardwareId }).lean(); + if (!tank) { + return reply.code(404).send({ + success: false, + message: 'Tank not found for slave' + }); + } + + // Step 5: parse dimensions + const tankHeightFeet = parseInt(tank.height, 10); + const tankWidthFeet = parseInt(tank.width, 10); + const tankLengthFeet = parseInt(tank.length, 10); + + return reply.send({ + success: true, + data: { + masterName, + location, + slaveHardwareId: hardwareId, + type: slaveDevice.type, + tankHardwareId, + tankName: tank.tankName, + blockName: tank.blockName, + shape: tank.shape, + capacity: tank.capacity, + waterCapacityPerCm: tank.waterCapacityPerCm, + typeOfWater: tank.typeOfWater, + tankLocation: tank.tankLocation, + height: tankHeightFeet, + width: tankWidthFeet, + length: tankLengthFeet, + heightInCm: tankHeightFeet * 30.48, + widthInCm: tankWidthFeet * 30.48, + lengthInCm: tankLengthFeet * 30.48 + } + }); + + } catch (error) { + console.error('Error fetching slave tank details:', error); + reply.code(500).send({ + success: false, + message: 'Internal server error' + }); + } +}; + +exports.editTankDimensions = async (req, reply) => { + try { + const { customerId, teamMemberId, hardwareId, tankHardwareId } = req.params; + const { height, width, length, unit } = req.body; // unit: 'cm' or 'feet' + + if (!customerId || !teamMemberId || !hardwareId || !tankHardwareId) { + return reply.code(400).send({ + success: false, + message: 'customerId, teamMemberId, hardwareId and tankHardwareId are required' + }); + } + + if (height === undefined || width === undefined || length === undefined || !unit) { + return reply.code(400).send({ + success: false, + message: 'height, width, length and unit are required in body' + }); + } + + // Parse input numbers + const heightNum = Number(height); + const widthNum = Number(width); + const lengthNum = Number(length); + + if (isNaN(heightNum) || isNaN(widthNum) || isNaN(lengthNum)) { + return reply.code(400).send({ + success: false, + message: 'height, width and length must be numeric values' + }); + } + + if (!['cm', 'feet'].includes(unit)) { + return reply.code(400).send({ + success: false, + message: "unit must be either 'cm' or 'feet'" + }); + } + + // Step 1: Convert to feet if input is in cm (keep floats) + let heightInFeet, widthInFeet, lengthInFeet; + + if (unit === 'cm') { + heightInFeet = Number((heightNum / 30.48).toFixed(2)); + widthInFeet = Number((widthNum / 30.48).toFixed(2)); + lengthInFeet = Number((lengthNum / 30.48).toFixed(2)); +} else { + heightInFeet = Number(heightNum.toFixed(2)); + widthInFeet = Number(widthNum.toFixed(2)); + lengthInFeet = Number(lengthNum.toFixed(2)); +} + + + // Step 2: Calculate capacity & waterCapacityPerCm (use floats for precision) + const height_m = heightInFeet * 0.3048; + const width_m = widthInFeet * 0.3048; + const length_m = lengthInFeet * 0.3048; + + // const capacity = length_m * width_m * height_m * 1000; // in liters + // const waterCapacityPerCm = length_m * width_m * 0.01 * 1000; // liters per cm + + const capacity = Number((length_m * width_m * height_m * 1000).toFixed(2)); + const waterCapacityPerCm = Number((length_m * width_m * 0.01 * 1000).toFixed(2)); + + // Step 3: Find install record + const installRecord = await Install.findOne({ + 'team_member.team_member.teamMemberId': teamMemberId + }).lean(); + + if (!installRecord) { + return reply.code(404).send({ + success: false, + message: 'Team member not found or not assigned' + }); + } + + const teamMemberDetails = installRecord.team_member?.team_member?.find( + member => member.teamMemberId === teamMemberId + ); + + if (!teamMemberDetails) { + return reply.code(404).send({ + success: false, + message: 'Team member details not found under install record' + }); + } + + // Step 4: Find order record + const orderRecord = await Order.findOne({ + installationId: installRecord.installationId, + customerId + }).lean(); + + if (!orderRecord) { + return reply.code(404).send({ + success: false, + message: 'Order not found for this installation and customer' + }); + } + + // Step 5: Update tank dimensions (use floats, convert to string to store) + const updatedTank = await Tank.findOneAndUpdate( + { + customerId, + hardwareId, + tankhardwareId: tankHardwareId + }, + { + $set: { + height: heightInFeet, + width: widthInFeet, + length: lengthInFeet, + capacity: capacity.toString(), + waterCapacityPerCm: waterCapacityPerCm.toString() + } + + }, + { new: true } + ).lean(); + + if (!updatedTank) { + return reply.code(404).send({ + success: false, + message: 'Tank not found with given customerId, hardwareId and tankHardwareId' + }); + } + + return reply.send({ + success: true, + message: 'Tank dimensions updated successfully', + updatedTank, + teamMember: { + teamMemberId: teamMemberDetails.teamMemberId, + firstName: teamMemberDetails.firstName, + phone: teamMemberDetails.phone, + email: teamMemberDetails.email, + alternativePhone: teamMemberDetails.alternativePhone, + installationTeamMemId: teamMemberDetails.installationTeamMemId, + status: teamMemberDetails.status + } + }); + + } catch (error) { + console.error('Error updating tank dimensions:', error); + return reply.code(500).send({ + success: false, + message: 'Internal server error' + }); + } +}; + + + + +exports.updateWorkStatusAndProductStatus = async (req, reply) => { + try { + const { connectedTo, teamMemberId, customerId } = req.params; + const { work_status } = req.body; + + if (!connectedTo || !teamMemberId || !customerId) { + return reply.status(400).send({ success: false, message: "connectedTo, teamMemberId, and customerId are required" }); + } + + if (!work_status) { + return reply.status(400).send({ success: false, message: "work_status is required in body" }); + } + + // Step 1: Update work_status in Order schema + const orderUpdate = await Order.updateOne( + { customerId, "master_connections.hardwareId": connectedTo }, + { $set: { "master_connections.$.work_status": work_status } } + ); + + // Step 2: Update master product_status + const masterUpdate = await Insensors.updateOne( + { hardwareId: connectedTo, type: 'master', customerId }, + { $set: { product_status: 'complete', team_member_support_gsm_last_check_time: new Date().toISOString() } } + ); + + // Step 3: Update all connected slaves product_status + const slaveUpdate = await Insensors.updateMany( + { connected_to: connectedTo, type: 'slave', customerId }, + { $set: { product_status: 'complete', team_member_support_lora_last_check_time: new Date().toISOString() } } + ); + + return reply.send({ + success: true, + message: 'Work status and product_status updated successfully', + orderMatchedCount: orderUpdate.matchedCount, + orderModifiedCount: orderUpdate.modifiedCount, + masterModifiedCount: masterUpdate.modifiedCount, + slaveModifiedCount: slaveUpdate.modifiedCount + }); + } catch (error) { + console.error('Error updating work_status and product_status:', error); + return reply.status(500).send({ success: false, message: 'Internal Server Error' }); + } +}; + + + +exports.addMediaToInsensor = async (req, reply) => { + try { + // const { hardwareId, customerId, type } = req.params; + // const { video, material, workStatus, product_status } = req.body; + const { customerId } = req.params; + const { hardwareId, type, video, material, workStatus, product_status,description } = req.body; + + + if (!hardwareId || !customerId || !type) { + return reply.status(400).send({ success: false, message: "Missing required params" }); + } + + const insensor = await Insensors.findOne({ hardwareId, customerId, type }); + if (!insensor) { + return reply.status(404).send({ success: false, message: "Insensor not found" }); + } + + // Append if arrays are provided + if (Array.isArray(video) && video.length) { + const items = video.map(url => ({ url, createdAt: new Date() })); + insensor.manualTestVideos.push(...items); + } + + if (Array.isArray(material) && material.length) { + const items = material.map(url => ({ url, createdAt: new Date() })); + insensor.materialReceivedPictures.push(...items); + } + + if (Array.isArray(workStatus) && workStatus.length) { + const items = workStatus.map(url => ({ url, createdAt: new Date() })); + insensor.workStatusPictures.push(...items); + } + + // Update product_status if provided + if (product_status && ['pending', 'complete'].includes(product_status)) { + insensor.product_status = product_status; + } + if (description) { + insensor.description = description; + } + + await insensor.save(); + + const updated = await Insensors.findOne({ hardwareId, customerId, type }).lean(); + return reply.send({ + success: true, + message: "Media saved successfully", + data: updated + }); + } catch (error) { + console.error("Error adding media to insensor:", error); + return reply.status(500).send({ success: false, message: "Internal server error" }); + } +}; + + + +exports.mastrerList = async (req, reply) => { + try { + const { customerId, installationId } = req.params; + + // Step 1: Get User and extract buildingName + const user = await User.findOne({ customerId , installationId + }); + + if (!user) { + return reply.status(404).send({ success: false, message: "User not found" }); + } + + const { buildingName } = user; + + // Step 2: Get Tanks with matching customerId + const tanks = await Tank.find({ customerId }); + + if (!tanks.length) { + return reply.status(404).send({ success: false, message: "No tanks found for this customer" }); + } + + const {tankLocation,typeOfWater} = tanks + //console.log(tanks) + // Step 3: Extract hardwareId from tanks + const hardwareIds = tanks.map(tank => tank.hardwareId); + + // Step 4: Find master tanks in InSensors with both customerId and installationId + const masterTanks = await Insensors.find({ + customerId, + connected_to: { $in: hardwareIds }, + type: "master" + }); + + if (!masterTanks.length) { + return reply.status(404).send({ success: false, message: "No master tanks found" }); + } + + return reply.send({ success: true,tankLocation,typeOfWater ,buildingName, data: masterTanks, user }); + + } catch (error) { + console.error("Error fetching master tanks:", error); + return reply.status(500).send({ success: false, message: "Internal Server Error" }); + } +}; + + + +exports.getMasterSlaveSummary = async (req, reply) => { + try { + const { customerId } = req.params; + + if (!customerId) { + return reply.status(400).send({ error: "customerId is required" }); + } + + // Fetch all master devices from Insensors + const masters = await Insensors.find({ customerId, type: "master" }).lean(); + + // Fetch orders to build orderMap: hardwareId → { masterName, location, work_status } + const orders = await Order.find({ customerId }).lean(); + const orderMap = {}; // key: hardwareId + + orders.forEach(order => { + if (Array.isArray(order.master_connections)) { + order.master_connections.forEach(connection => { + if (connection.hardwareId) { + orderMap[connection.hardwareId] = { + masterName: connection.master_name || null, + location: connection.location || null, + work_status: connection.work_status || null + }; + } + }); + } + }); + + const result = []; + + for (const master of masters) { + const orderInfo = orderMap[master.hardwareId] || {}; + + // ✅ Only keep masters where work_status === 'active' + if (orderInfo.work_status !== 'active') { + continue; // skip this master + } + + // Prefer Insensors name/location, fallback to order info + const masterName = master.masterName || orderInfo.masterName || null; + const location = master.location || orderInfo.location || null; + + // Fetch latest GSM data + const latestGsmData = await IotData.findOne({ hardwareId: master.hardwareId }) + .sort({ date: -1, time: -1 }) + .lean(); + + let connectedGsmDate = null; + let connectedGsmTime = null; + let gsmStatus = "unknown"; + let gsmLastDisconnect = master.gsm_last_disconnect_time || null; + let gsmLastCheckTime = master.gsm_last_check_time || null; + + if (latestGsmData?.date && latestGsmData?.time) { + const indiaTime = moment.tz( + `${moment(latestGsmData.date).format("YYYY-MM-DD")} ${latestGsmData.time}`, + "YYYY-MM-DD HH:mm:ss", + "Asia/Kolkata" + ); + connectedGsmDate = indiaTime.format("DD-MM-YYYY"); + connectedGsmTime = indiaTime.format("HH:mm:ss"); + + const now = moment.tz("Asia/Kolkata"); + const diffInMinutes = now.diff(indiaTime, "minutes"); + + gsmStatus = diffInMinutes <= 1 ? "connected" : "disconnected"; + + if (gsmStatus === "disconnected") { + gsmLastDisconnect = `${connectedGsmDate} ${connectedGsmTime}`; + } + + // Update master Insensors record + await Insensors.updateOne( + { hardwareId: master.hardwareId }, + { + $set: { + connected_status: gsmStatus, + connected_gsm_date: connectedGsmDate, + connected_gsm_time: connectedGsmTime, + gsm_last_check_time: gsmLastCheckTime, + gsm_last_disconnect_time: gsmLastDisconnect + } + } + ); + } + + // Process connected slaves + const connectedSlaves = []; + const slaves = await Insensors.find({ connected_to: master.hardwareId, type: "slave" }).lean(); + + for (const slave of slaves) { + const now = moment.tz("Asia/Kolkata"); + let connectedLoraDate = null; + let connectedLoraTime = null; + let loraStatus = "disconnected"; + let loraLastDisconnect = slave.lora_last_disconnect_time || null; + let loraLastCheckTime = slave.lora_last_check_time || null; + let typeOfWater = null; + + // Fetch latest slave IotData (using master hardwareId because slave data comes there) + const slaveIot = await IotData.findOne({ hardwareId: slave.connected_to }) + .sort({ date: -1, time: -1 }) + .lean(); + + if (slaveIot?.tanks?.length && slave.tankhardwareId) { + const matchedTank = slaveIot.tanks.find(t => t.tankhardwareId === slave.tankhardwareId); + + if (matchedTank) { + const indiaTime = moment.tz( + `${moment(matchedTank.date).format("YYYY-MM-DD")} ${matchedTank.time}`, + "YYYY-MM-DD HH:mm:ss", + "Asia/Kolkata" + ); + + connectedLoraDate = indiaTime.format("DD-MM-YYYY"); + connectedLoraTime = indiaTime.format("HH:mm:ss"); + + const diffMinutes = now.diff(indiaTime, "minutes"); + const tankHeight = parseFloat(matchedTank.tankHeight) || 0; + + loraStatus = (tankHeight > 0 && diffMinutes <= 1) ? "connected" : "disconnected"; + + if (loraStatus === "disconnected") { + loraLastDisconnect = `${connectedLoraDate} ${connectedLoraTime}`; + } + } + } + + // Enrich with tank typeOfWater if exists + const matchedTankDetails = await Tank.findOne({ + customerId, + tankhardwareId: slave.tankhardwareId + }).lean(); + + if (matchedTankDetails?.typeOfWater) { + typeOfWater = matchedTankDetails.typeOfWater; + } + + // Update slave Insensors record + await Insensors.updateOne( + { hardwareId: slave.hardwareId }, + { + $set: { + connected_status: loraStatus, + connected_lora_date: connectedLoraDate, + connected_lora_time: connectedLoraTime, + lora_last_check_time: loraLastCheckTime, + lora_last_disconnect_time: loraLastDisconnect + } + } + ); + + connectedSlaves.push({ + hardwareId: slave.hardwareId, + tankhardwareId: slave.tankhardwareId || null, + tankName: slave.tankName || null, + location: slave.tankLocation || null, + connected_status: loraStatus, + connected_lora_date: connectedLoraDate, + connected_lora_time: connectedLoraTime, + lora_last_check_time: loraLastCheckTime, + lora_last_disconnect_time: loraLastDisconnect, + type: slave.type || "slave", + typeOfWater, + connected_to: slave.connected_to || null + }); + } + + result.push({ + hardwareId: master.hardwareId, + masterName, + location, + work_status: orderInfo.work_status || null, + type: master.type || "master", + connected_status: gsmStatus, + connected_slave_count: connectedSlaves.length, + connected_slaves: connectedSlaves, + connected_gsm_date: connectedGsmDate, + connected_gsm_time: connectedGsmTime, + gsm_last_check_time: gsmLastCheckTime, + gsm_last_disconnect_time: gsmLastDisconnect, + connected_lora_date: master.connected_lora_date || null, + connected_lora_time: master.connected_lora_time || null + }); + } + + return reply.send({ + status_code: 200, + message: "Master-slave summary retrieved successfully", + data: result + }); + + } catch (error) { + console.error("Error in getMasterSlaveSummary:", error); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; + +exports.getWaitingMasterSlaveSummary = async (req, reply) => { + try { + const { customerId } = req.params; + + if (!customerId) { + return reply.status(400).send({ error: "customerId is required" }); + } + + // Fetch all master devices from Insensors + const masters = await Insensors.find({ customerId, type: "master" }).lean(); + + // Fetch orders to build orderMap: hardwareId → { masterName, location, work_status } + const orders = await Order.find({ customerId }).lean(); + const orderMap = {}; // key: hardwareId + + orders.forEach(order => { + if (Array.isArray(order.master_connections)) { + order.master_connections.forEach(connection => { + if (connection.hardwareId) { + orderMap[connection.hardwareId] = { + masterName: connection.master_name || null, + location: connection.location || null, + work_status: connection.work_status || null + }; + } + }); + } + }); + + const result = []; + + for (const master of masters) { + const orderInfo = orderMap[master.hardwareId] || {}; + + // ✅ Only keep masters where work_status === 'active' + if (orderInfo.work_status !== 'waiting') { + continue; // skip this master + } + + // Prefer Insensors name/location, fallback to order info + const masterName = master.masterName || orderInfo.masterName || null; + const location = master.location || orderInfo.location || null; + + // Fetch latest GSM data + const latestGsmData = await IotData.findOne({ hardwareId: master.hardwareId }) + .sort({ date: -1, time: -1 }) + .lean(); + + let connectedGsmDate = null; + let connectedGsmTime = null; + let gsmStatus = "unknown"; + let gsmLastDisconnect = master.gsm_last_disconnect_time || null; + let gsmLastCheckTime = master.gsm_last_check_time || null; + + if (latestGsmData?.date && latestGsmData?.time) { + const indiaTime = moment.tz( + `${moment(latestGsmData.date).format("YYYY-MM-DD")} ${latestGsmData.time}`, + "YYYY-MM-DD HH:mm:ss", + "Asia/Kolkata" + ); + connectedGsmDate = indiaTime.format("DD-MM-YYYY"); + connectedGsmTime = indiaTime.format("HH:mm:ss"); + + const now = moment.tz("Asia/Kolkata"); + const diffInMinutes = now.diff(indiaTime, "minutes"); + + gsmStatus = diffInMinutes <= 1 ? "connected" : "disconnected"; + + if (gsmStatus === "disconnected") { + gsmLastDisconnect = `${connectedGsmDate} ${connectedGsmTime}`; + } + + // Update master Insensors record + await Insensors.updateOne( + { hardwareId: master.hardwareId }, + { + $set: { + connected_status: gsmStatus, + connected_gsm_date: connectedGsmDate, + connected_gsm_time: connectedGsmTime, + gsm_last_check_time: gsmLastCheckTime, + gsm_last_disconnect_time: gsmLastDisconnect + } + } + ); + } + + // Process connected slaves + const connectedSlaves = []; + const slaves = await Insensors.find({ connected_to: master.hardwareId, type: "slave" }).lean(); + + for (const slave of slaves) { + const now = moment.tz("Asia/Kolkata"); + let connectedLoraDate = null; + let connectedLoraTime = null; + let loraStatus = "disconnected"; + let loraLastDisconnect = slave.lora_last_disconnect_time || null; + let loraLastCheckTime = slave.lora_last_check_time || null; + let typeOfWater = null; + + // Fetch latest slave IotData (using master hardwareId because slave data comes there) + const slaveIot = await IotData.findOne({ hardwareId: slave.connected_to }) + .sort({ date: -1, time: -1 }) + .lean(); + + if (slaveIot?.tanks?.length && slave.tankhardwareId) { + const matchedTank = slaveIot.tanks.find(t => t.tankhardwareId === slave.tankhardwareId); + + if (matchedTank) { + const indiaTime = moment.tz( + `${moment(matchedTank.date).format("YYYY-MM-DD")} ${matchedTank.time}`, + "YYYY-MM-DD HH:mm:ss", + "Asia/Kolkata" + ); + + connectedLoraDate = indiaTime.format("DD-MM-YYYY"); + connectedLoraTime = indiaTime.format("HH:mm:ss"); + + const diffMinutes = now.diff(indiaTime, "minutes"); + const tankHeight = parseFloat(matchedTank.tankHeight) || 0; + + loraStatus = (tankHeight > 0 && diffMinutes <= 1) ? "connected" : "disconnected"; + + if (loraStatus === "disconnected") { + loraLastDisconnect = `${connectedLoraDate} ${connectedLoraTime}`; + } + } + } + + // Enrich with tank typeOfWater if exists + const matchedTankDetails = await Tank.findOne({ + customerId, + tankhardwareId: slave.tankhardwareId + }).lean(); + + if (matchedTankDetails?.typeOfWater) { + typeOfWater = matchedTankDetails.typeOfWater; + } + + // Update slave Insensors record + await Insensors.updateOne( + { hardwareId: slave.hardwareId }, + { + $set: { + connected_status: loraStatus, + connected_lora_date: connectedLoraDate, + connected_lora_time: connectedLoraTime, + lora_last_check_time: loraLastCheckTime, + lora_last_disconnect_time: loraLastDisconnect + } + } + ); + + connectedSlaves.push({ + hardwareId: slave.hardwareId, + tankhardwareId: slave.tankhardwareId || null, + tankName: slave.tankName || null, + location: slave.tankLocation || null, + connected_status: loraStatus, + connected_lora_date: connectedLoraDate, + connected_lora_time: connectedLoraTime, + lora_last_check_time: loraLastCheckTime, + lora_last_disconnect_time: loraLastDisconnect, + type: slave.type || "slave", + typeOfWater, + connected_to: slave.connected_to || null + }); + } + + result.push({ + hardwareId: master.hardwareId, + masterName, + location, + work_status: orderInfo.work_status, + type: master.type || "master", + connected_status: gsmStatus, + connected_slave_count: connectedSlaves.length, + connected_slaves: connectedSlaves, + connected_gsm_date: connectedGsmDate, + connected_gsm_time: connectedGsmTime, + gsm_last_check_time: gsmLastCheckTime, + gsm_last_disconnect_time: gsmLastDisconnect, + connected_lora_date: master.connected_lora_date || null, + connected_lora_time: master.connected_lora_time || null + }); + } + + return reply.send({ + status_code: 200, + message: "Master-slave summary retrieved successfully", + data: result + }); + + } catch (error) { + console.error("Error in getMasterSlaveSummary:", error); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; + +exports.getCompleteMasterSlaveSummary = async (req, reply) => { + try { + const { customerId } = req.params; + + if (!customerId) { + return reply.status(400).send({ error: "customerId is required" }); + } + + // Fetch all master devices from Insensors + const masters = await Insensors.find({ customerId, type: "master" }).lean(); + + // Fetch orders to build orderMap: hardwareId → { masterName, location, work_status } + const orders = await Order.find({ customerId }).lean(); + const orderMap = {}; // key: hardwareId + + orders.forEach(order => { + if (Array.isArray(order.master_connections)) { + order.master_connections.forEach(connection => { + if (connection.hardwareId) { + orderMap[connection.hardwareId] = { + masterName: connection.master_name || null, + location: connection.location || null, + work_status: connection.work_status || null + }; + } + }); + } + }); + + const result = []; + + for (const master of masters) { + const orderInfo = orderMap[master.hardwareId] || {}; + + // ✅ Only keep masters where work_status === 'active' + if (orderInfo.work_status !== 'complete') { + continue; // skip this master + } + + // Prefer Insensors name/location, fallback to order info + const masterName = master.masterName || orderInfo.masterName || null; + const location = master.location || orderInfo.location || null; + + // Fetch latest GSM data + const latestGsmData = await IotData.findOne({ hardwareId: master.hardwareId }) + .sort({ date: -1, time: -1 }) + .lean(); + + let connectedGsmDate = null; + let connectedGsmTime = null; + let gsmStatus = "unknown"; + let gsmLastDisconnect = master.gsm_last_disconnect_time || null; + let gsmLastCheckTime = master.gsm_last_check_time || null; + + if (latestGsmData?.date && latestGsmData?.time) { + const indiaTime = moment.tz( + `${moment(latestGsmData.date).format("YYYY-MM-DD")} ${latestGsmData.time}`, + "YYYY-MM-DD HH:mm:ss", + "Asia/Kolkata" + ); + connectedGsmDate = indiaTime.format("DD-MM-YYYY"); + connectedGsmTime = indiaTime.format("HH:mm:ss"); + + const now = moment.tz("Asia/Kolkata"); + const diffInMinutes = now.diff(indiaTime, "minutes"); + + gsmStatus = diffInMinutes <= 1 ? "connected" : "disconnected"; + + if (gsmStatus === "disconnected") { + gsmLastDisconnect = `${connectedGsmDate} ${connectedGsmTime}`; + } + + // Update master Insensors record + await Insensors.updateOne( + { hardwareId: master.hardwareId }, + { + $set: { + connected_status: gsmStatus, + connected_gsm_date: connectedGsmDate, + connected_gsm_time: connectedGsmTime, + gsm_last_check_time: gsmLastCheckTime, + gsm_last_disconnect_time: gsmLastDisconnect + } + } + ); + } + + // Process connected slaves + const connectedSlaves = []; + const slaves = await Insensors.find({ connected_to: master.hardwareId, type: "slave" }).lean(); + + for (const slave of slaves) { + const now = moment.tz("Asia/Kolkata"); + let connectedLoraDate = null; + let connectedLoraTime = null; + let loraStatus = "disconnected"; + let loraLastDisconnect = slave.lora_last_disconnect_time || null; + let loraLastCheckTime = slave.lora_last_check_time || null; + let typeOfWater = null; + + // Fetch latest slave IotData (using master hardwareId because slave data comes there) + const slaveIot = await IotData.findOne({ hardwareId: slave.connected_to }) + .sort({ date: -1, time: -1 }) + .lean(); + + if (slaveIot?.tanks?.length && slave.tankhardwareId) { + const matchedTank = slaveIot.tanks.find(t => t.tankhardwareId === slave.tankhardwareId); + + if (matchedTank) { + const indiaTime = moment.tz( + `${moment(matchedTank.date).format("YYYY-MM-DD")} ${matchedTank.time}`, + "YYYY-MM-DD HH:mm:ss", + "Asia/Kolkata" + ); + + connectedLoraDate = indiaTime.format("DD-MM-YYYY"); + connectedLoraTime = indiaTime.format("HH:mm:ss"); + + const diffMinutes = now.diff(indiaTime, "minutes"); + const tankHeight = parseFloat(matchedTank.tankHeight) || 0; + + loraStatus = (tankHeight > 0 && diffMinutes <= 1) ? "connected" : "disconnected"; + + if (loraStatus === "disconnected") { + loraLastDisconnect = `${connectedLoraDate} ${connectedLoraTime}`; + } + } + } + + // Enrich with tank typeOfWater if exists + const matchedTankDetails = await Tank.findOne({ + customerId, + tankhardwareId: slave.tankhardwareId + }).lean(); + + if (matchedTankDetails?.typeOfWater) { + typeOfWater = matchedTankDetails.typeOfWater; + } + + // Update slave Insensors record + await Insensors.updateOne( + { hardwareId: slave.hardwareId }, + { + $set: { + connected_status: loraStatus, + connected_lora_date: connectedLoraDate, + connected_lora_time: connectedLoraTime, + lora_last_check_time: loraLastCheckTime, + lora_last_disconnect_time: loraLastDisconnect + } + } + ); + + connectedSlaves.push({ + hardwareId: slave.hardwareId, + tankhardwareId: slave.tankhardwareId || null, + tankName: slave.tankName || null, + location: slave.tankLocation || null, + connected_status: loraStatus, + connected_lora_date: connectedLoraDate, + connected_lora_time: connectedLoraTime, + lora_last_check_time: loraLastCheckTime, + lora_last_disconnect_time: loraLastDisconnect, + type: slave.type || "slave", + typeOfWater, + connected_to: slave.connected_to || null + }); + } + + result.push({ + hardwareId: master.hardwareId, + masterName, + location, + work_status: orderInfo.work_status, + type: master.type || "master", + connected_status: gsmStatus, + connected_slave_count: connectedSlaves.length, + connected_slaves: connectedSlaves, + connected_gsm_date: connectedGsmDate, + connected_gsm_time: connectedGsmTime, + gsm_last_check_time: gsmLastCheckTime, + gsm_last_disconnect_time: gsmLastDisconnect, + connected_lora_date: master.connected_lora_date || null, + connected_lora_time: master.connected_lora_time || null + }); + } + + return reply.send({ + status_code: 200, + message: "Master-slave summary retrieved successfully", + data: result + }); + + } catch (error) { + console.error("Error in getMasterSlaveSummary:", error); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; +exports.getMasterWithSlaves = async (req, reply) => { + try { + const { installationId, customerId, hardwareId } = req.params; + + if (!installationId || !customerId || !hardwareId) { + return reply.code(400).send({ success: false, message: "installationId, customerId, and hardwareId are required" }); + } + + // Find order + const order = await Order.findOne({ installationId, customerId }).lean(); + if (!order) { + return reply.code(404).send({ success: false, message: "Order not found" }); + } + + // Find device (could be master or slave) + const device = await Insensors.findOne({ hardwareId, customerId }).lean(); + if (!device) { + return reply.code(404).send({ success: false, message: "Device not found in Insensors" }); + } + + let enrichedDevice = null; + + if (device.type === 'master') { + // Find matching master in order.master_connections + const matchingMaster = order.master_connections?.find(m => m.hardwareId === hardwareId); + + enrichedDevice = { + ...device, + masterName: matchingMaster?.master_name ?? null, + location: matchingMaster?.location ?? null, + }; + + } else if (device.type === 'slave' && device.connected_to) { + // Find master device to get masterName and location + const masterInOrder = order.master_connections?.find(m => m.hardwareId === device.connected_to); + + enrichedDevice = { + ...device, + masterName: masterInOrder?.master_name ?? null, + location: masterInOrder?.location ?? null, + }; + + } else { + return reply.code(400).send({ success: false, message: "Device type unknown or missing connected_to" }); + } + + return reply.send({ + success: true, + device: enrichedDevice + }); + + } catch (error) { + console.error("Error fetching device:", error); + return reply.code(500).send({ success: false, message: "Internal server error" }); + } +}; + + +exports.getPendingMasterSlaveSummary = async (req, reply) => { + try { + const { customerId } = req.params; + + if (!customerId) { + return reply.status(400).send({ error: "customerId is required" }); + } + + // Fetch all master devices from Insensors + const masters = await Insensors.find({ customerId, type: "master" }).lean(); + + // Fetch orders to build orderMap: hardwareId → { masterName, location, work_status } + const orders = await Order.find({ customerId }).lean(); + const orderMap = {}; // key: hardwareId + + orders.forEach(order => { + if (Array.isArray(order.master_connections)) { + order.master_connections.forEach(connection => { + if (connection.hardwareId) { + orderMap[connection.hardwareId] = { + masterName: connection.master_name || null, + location: connection.location || null, + work_status: connection.work_status || null + }; + } + }); + } + }); + + const result = []; + + for (const master of masters) { + const orderInfo = orderMap[master.hardwareId] || {}; + + // ✅ Only keep masters where work_status === 'active' + if (orderInfo.work_status !== 'pending') { + continue; // skip this master + } + + // Prefer Insensors name/location, fallback to order info + const masterName = master.masterName || orderInfo.masterName || null; + const location = master.location || orderInfo.location || null; + + // Fetch latest GSM data + const latestGsmData = await IotData.findOne({ hardwareId: master.hardwareId }) + .sort({ date: -1, time: -1 }) + .lean(); + + let connectedGsmDate = null; + let connectedGsmTime = null; + let gsmStatus = "unknown"; + let gsmLastDisconnect = master.gsm_last_disconnect_time || null; + let gsmLastCheckTime = master.gsm_last_check_time || null; + + if (latestGsmData?.date && latestGsmData?.time) { + const indiaTime = moment.tz( + `${moment(latestGsmData.date).format("YYYY-MM-DD")} ${latestGsmData.time}`, + "YYYY-MM-DD HH:mm:ss", + "Asia/Kolkata" + ); + connectedGsmDate = indiaTime.format("DD-MM-YYYY"); + connectedGsmTime = indiaTime.format("HH:mm:ss"); + + const now = moment.tz("Asia/Kolkata"); + const diffInMinutes = now.diff(indiaTime, "minutes"); + + gsmStatus = diffInMinutes <= 1 ? "connected" : "disconnected"; + + if (gsmStatus === "disconnected") { + gsmLastDisconnect = `${connectedGsmDate} ${connectedGsmTime}`; + } + + // Update master Insensors record + await Insensors.updateOne( + { hardwareId: master.hardwareId }, + { + $set: { + connected_status: gsmStatus, + connected_gsm_date: connectedGsmDate, + connected_gsm_time: connectedGsmTime, + gsm_last_check_time: gsmLastCheckTime, + gsm_last_disconnect_time: gsmLastDisconnect + } + } + ); + } + + // Process connected slaves + const connectedSlaves = []; + const slaves = await Insensors.find({ connected_to: master.hardwareId, type: "slave" }).lean(); + + for (const slave of slaves) { + const now = moment.tz("Asia/Kolkata"); + let connectedLoraDate = null; + let connectedLoraTime = null; + let loraStatus = "disconnected"; + let loraLastDisconnect = slave.lora_last_disconnect_time || null; + let loraLastCheckTime = slave.lora_last_check_time || null; + let typeOfWater = null; + + // Fetch latest slave IotData (using master hardwareId because slave data comes there) + const slaveIot = await IotData.findOne({ hardwareId: slave.connected_to }) + .sort({ date: -1, time: -1 }) + .lean(); + + if (slaveIot?.tanks?.length && slave.tankhardwareId) { + const matchedTank = slaveIot.tanks.find(t => t.tankhardwareId === slave.tankhardwareId); + + if (matchedTank) { + const indiaTime = moment.tz( + `${moment(matchedTank.date).format("YYYY-MM-DD")} ${matchedTank.time}`, + "YYYY-MM-DD HH:mm:ss", + "Asia/Kolkata" + ); + + connectedLoraDate = indiaTime.format("DD-MM-YYYY"); + connectedLoraTime = indiaTime.format("HH:mm:ss"); + + const diffMinutes = now.diff(indiaTime, "minutes"); + const tankHeight = parseFloat(matchedTank.tankHeight) || 0; + + loraStatus = (tankHeight > 0 && diffMinutes <= 1) ? "connected" : "disconnected"; + + if (loraStatus === "disconnected") { + loraLastDisconnect = `${connectedLoraDate} ${connectedLoraTime}`; + } + } + } + + // Enrich with tank typeOfWater if exists + const matchedTankDetails = await Tank.findOne({ + customerId, + tankhardwareId: slave.tankhardwareId + }).lean(); + + if (matchedTankDetails?.typeOfWater) { + typeOfWater = matchedTankDetails.typeOfWater; + } + + // Update slave Insensors record + await Insensors.updateOne( + { hardwareId: slave.hardwareId }, + { + $set: { + connected_status: loraStatus, + connected_lora_date: connectedLoraDate, + connected_lora_time: connectedLoraTime, + lora_last_check_time: loraLastCheckTime, + lora_last_disconnect_time: loraLastDisconnect + } + } + ); + + connectedSlaves.push({ + hardwareId: slave.hardwareId, + tankhardwareId: slave.tankhardwareId || null, + tankName: slave.tankName || null, + location: slave.tankLocation || null, + connected_status: loraStatus, + connected_lora_date: connectedLoraDate, + connected_lora_time: connectedLoraTime, + lora_last_check_time: loraLastCheckTime, + lora_last_disconnect_time: loraLastDisconnect, + type: slave.type || "slave", + typeOfWater, + connected_to: slave.connected_to || null + }); + } + + result.push({ + hardwareId: master.hardwareId, + masterName, + location, + work_status: orderInfo.work_status || null, + type: master.type || "master", + connected_status: gsmStatus, + connected_slave_count: connectedSlaves.length, + connected_slaves: connectedSlaves, + connected_gsm_date: connectedGsmDate, + connected_gsm_time: connectedGsmTime, + gsm_last_check_time: gsmLastCheckTime, + gsm_last_disconnect_time: gsmLastDisconnect, + connected_lora_date: master.connected_lora_date || null, + connected_lora_time: master.connected_lora_time || null + }); + } + + return reply.send({ + status_code: 200, + message: "Master-slave summary retrieved successfully", + data: result + }); + + } catch (error) { + console.error("Error in getMasterSlaveSummary:", error); + return reply.status(500).send({ error: "Internal Server Error" }); + } +}; + + + +// 🔍 Helper to get tankHeight from latest IotData record +async function getTankHeight(tankhardwareId) { + const iotData = await IotData.findOne({ 'tanks.tankhardwareId': tankhardwareId }) + .sort({ date: -1 }) + .lean(); + + if (!iotData) return null; + + const matchedTank = iotData.tanks.find(t => t.tankhardwareId === tankhardwareId); + return matchedTank?.tankHeight || null; +} + + +exports.getIotDataByCustomer = async (req, reply) => { + try { + const { customerId } = req.params; + + if (!customerId) { + return reply.code(400).send({ error: "customerId is required" }); + } + + // ✅ Get all sensors for the customer + const sensors = await Insensors.find({ customerId }); + + if (!sensors.length) { + return reply.code(404).send({ message: "No sensors found for this customer." }); + } + + // ✅ Filter master sensors + let masterSensors = sensors.filter(s => s.type === 'master'); + + // ✅ If hardwareId is provided, filter to that master only + // if (hardwareId) { + // masterSensors = masterSensors.filter(m => m.hardwareId?.trim() === hardwareId.trim()); + // } + + if (!masterSensors.length) { + return reply.code(404).send({ message: "No master found for the given hardwareId or customer." }); + } + + const masterHardwareIds = masterSensors.map(m => m.hardwareId?.trim()); + + // ✅ Build map of masterName/location from Order + const orders = await Order.find({ customerId }).lean(); + const orderMap = {}; + orders.forEach(order => { + order.master_connections.forEach(connection => { + orderMap[connection.hardwareId] = { + masterName: connection.master_name || null, + location: connection.location || null + }; + }); + }); + + // ✅ Enrich each master with latest IoT data + const enrichedMasters = await Promise.all(masterHardwareIds.map(async (hardwareId) => { + const latestRecord = await IotData.findOne({ hardwareId }).sort({ date: -1 }).lean(); + + const orderInfo = orderMap[hardwareId] || {}; + + if (!latestRecord) { + return { + hardwareId, + message: "No IoT data found", + masterName: orderInfo.masterName ?? null, + location: orderInfo.location ?? null, + tanks: [] + }; + } + + // ✅ GSM connection status + const indiaTime = moment.tz(latestRecord.date, "Asia/Kolkata"); + const now = moment.tz("Asia/Kolkata"); + const diffInMinutes = now.diff(indiaTime, "minutes"); + const gsmConnected = diffInMinutes <= 1; + const message = gsmConnected ? "GSM is connected" : "GSM is not connected"; + + // ✅ Find slaves connected to this master + const connectedSlaves = sensors.filter(sensor => sensor.connected_to?.trim() === hardwareId); + + // ✅ Enrich each slave/tank + const tanks = connectedSlaves.map(slave => { + const slaveId = slave.tankhardwareId?.trim(); + const matchedTank = latestRecord.tanks?.find(t => t.tankhardwareId === slaveId); + + let loraMessage = "LORA is not connected"; + if (matchedTank?.date && matchedTank.tankHeight !== "0") { + const tankTime = moment.tz(matchedTank.date, "Asia/Kolkata"); + const loraDiff = now.diff(tankTime, "minutes"); + loraMessage = loraDiff <= 1 ? "LORA is connected" : "LORA is not connected"; + } + + return { + tankhardwareId: slaveId, + tankName: slave.tankName ?? null, + tankLocation: slave.tankLocation ?? null, + masterName: orderInfo.masterName ?? null, + location: orderInfo.location ?? null, + loraMessage, + latestTankData: matchedTank ?? null + }; + }); + + return { + hardwareId, + message, + masterName: orderInfo.masterName ?? null, + location: orderInfo.location ?? null, + tanks + }; + })); + + return reply.send({ + status_code: 200, + message: "Success", + data: enrichedMasters + }); + + } catch (err) { + console.error("Error fetching IoT data by customerId:", err); + return reply.code(500).send({ error: "Internal Server Error" }); + } +}; + + + + +exports.getIotDataByCustomerAndHardwareId = async (req, reply) => { + try { + const { customerId, hardwareId } = req.params; + + if (!customerId || !hardwareId) { + return reply.code(400).send({ error: "Both customerId and hardwareId are required" }); + } + + // Step 1: Get all sensors + const sensors = await Insensors.find({ customerId }); + if (!sensors.length) { + return reply.code(404).send({ message: "No sensors found for this customer." }); + } + // console.log("sensors",sensors) + // Step 2: Get latest IoT data + const latestRecord = await IotData.findOne({ hardwareId }).sort({ date: -1 }).lean(); + if (!latestRecord) { + return reply.code(404).send({ + hardwareId, + message: "No IoT data found", + tanks: [] + }); + } + + // Step 3: Calculate GSM connection status + const indiaTime = moment.tz(latestRecord.date, "Asia/Kolkata"); + const now = moment.tz("Asia/Kolkata"); + const diffInMinutes = now.diff(indiaTime, "minutes"); + const gsmConnected = diffInMinutes <= 1; + const gsmMessage = gsmConnected ? "GSM is connected" : "GSM is not connected"; + + // Step 4: Get all connected slaves + const connectedSlaves = sensors.filter(sensor => sensor.connected_to?.trim() === hardwareId); + // console.log("connectedSlaves",connectedSlaves) + + // Step 5: Get orderMap for fallback master info + const orders = await Order.find({ customerId }).lean(); + // console.log("orders",orders) + + const orderMap = {}; + orders.forEach(order => { + order.master_connections?.forEach(connection => { + orderMap[connection.hardwareId] = { + masterName: connection.master_name || null, + location: connection.location || null + }; + }); + }); + + // Step 6: Fallback master info from orderMap + const fallbackMasterInfo = orderMap[hardwareId] || { masterName: null, location: null }; + console.log("fallbackMasterInfo",fallbackMasterInfo) + + // Step 7: Map tanks data + const tanks = connectedSlaves.map(slave => { + const slaveId = slave.tankhardwareId?.trim(); + const matchedTank = latestRecord.tanks?.find(tank => tank.tankhardwareId === slaveId); + + let loraConnected = false; + let loraMessage = "LORA is not connected"; + + if (matchedTank?.date && matchedTank.tankHeight !== "0") { + const tankTime = moment.tz(matchedTank.date, "Asia/Kolkata"); + const loraDiff = now.diff(tankTime, "minutes"); + loraConnected = loraDiff <= 1; + loraMessage = loraConnected ? "LORA is connected" : "LORA is not connected"; + } + + return { + tankhardwareId: slaveId, + tankName: slave.tankName ?? null, + tankLocation: slave.tankLocation ?? null, + masterName: fallbackMasterInfo.masterName, + location: fallbackMasterInfo.location, + message: loraMessage, + latestTankData: matchedTank ?? null + }; + }); + + // Step 8: Send response + return reply.send({ + status_code: 200, + message: "Success", + data: { + hardwareId, + message: gsmMessage, + masterName: fallbackMasterInfo.masterName, + location: fallbackMasterInfo.location, + tanks + } + }); + + } catch (err) { + console.error("Error fetching IoT data by customerId and hardwareId:", err); + return reply.code(500).send({ error: "Internal Server Error" }); + } +}; + + +const cron = require("node-cron"); +const Admin = require("../models/admin"); + + +// ⬇️ Include the function here or import it if it's in another file +const updateConnectedStatusOnly = async (customerId, hardwareId) => { + try { + const sensors = await Insensors.find({ customerId }); + if (!sensors.length) return; + + const now = moment.tz("Asia/Kolkata"); + const latestRecord = await IotData.findOne({ hardwareId }).sort({ date: -1 }).lean(); + if (!latestRecord) return; + + const gsmTime = moment.tz(latestRecord.date, "Asia/Kolkata"); + const gsmDiff = now.diff(gsmTime, "minutes"); + const gsmConnected = gsmDiff <= 1; + + const connectedSlaves = sensors.filter(s => s.connected_to?.trim() === hardwareId); + const tankMap = {}; + (latestRecord.tanks || []).forEach(t => { + if (t.tankhardwareId) { + tankMap[t.tankhardwareId.trim()] = t; + } + }); + + const allSlavesConnected = connectedSlaves.every(slave => { + const slaveId = slave.tankhardwareId?.trim(); + const tank = tankMap[slaveId]; + if (!tank || !tank.date || tank.tankHeight === "0") return false; + const loraTime = moment.tz(tank.date, "Asia/Kolkata"); + return now.diff(loraTime, "minutes") <= 1; + }); + + const masterStatus = gsmConnected && allSlavesConnected ? "connected" : "disconnected"; + await Insensors.updateOne({ hardwareId, customerId }, { $set: { connected_status: masterStatus } }); + + for (const slave of connectedSlaves) { + const slaveId = slave.tankhardwareId?.trim(); + const tank = tankMap[slaveId]; + + let status = "disconnected"; + if (tank && tank.date && tank.tankHeight !== "0") { + const loraTime = moment.tz(tank.date, "Asia/Kolkata"); + if (now.diff(loraTime, "minutes") <= 1) status = "connected"; + } + + await Insensors.updateOne({ hardwareId: slave.hardwareId }, { $set: { connected_status: status } }); + } + } catch (error) { + console.error("❌ updateConnectedStatusOnly error:", error); + } +}; + + +const generateTicketId = () => { + return "AWTKT" + Date.now(); // Or use UUID or nanoid +}; + + +const raiseATicketLikeLogic = async (supportRecord, masterHardwareId, slaveData = []) => { + const now = new Date(); + const formattedNow = new Date(now.getTime() + 19800000) // +05:30 IST offset + .toISOString() + .replace("T", " ") + .substring(0, 19); + + // ✅ If issue already exists anywhere for this hardwareId, SKIP + const issueAlreadyExists = [ + ...supportRecord.issues, + ...supportRecord.categorizedIssues, + //...supportRecord.resolvedIssues + ].some((issue) => issue.hardwareId === masterHardwareId); + + if (issueAlreadyExists) { + console.log(`⛔ Issue already exists for ${masterHardwareId}. Not raising again.`); + return; + } + + // ✅ Prepare slave hardwareIds and names + const slaveHardwareIds = slaveData.map((s) => s.tankhardwareId).sort(); + const slaveNames = slaveData.map((s) => s.sensorName || s.tankName || "").sort(); + + // ✅ Create new issue + const newIssue = { + type: "GSM or LoRa Disconnected", + masterHardwareId, + hardwareId: masterHardwareId, + hardwareIds: slaveHardwareIds, + slaveNames: slaveNames, + resolved: false, + movedToCategory: false, + lastTicketRaisedAt: formattedNow, + createdAt: formattedNow, + }; + + supportRecord.issues.push(newIssue); + supportRecord.lastTicketRaisedAt = formattedNow; + + await supportRecord.save(); + console.log(`✅ New ticket raised for ${masterHardwareId}`); +}; + + +cron.schedule("*/1 * * * *", async () => { + try { + console.log("🔁 Running auto-disconnect ticket check..."); + + // Step 1: Get all support profiles + const allSupportProfiles = await Support.find({}); + + for (const supportRecord of allSupportProfiles) { + const supportId = supportRecord.supportId; + if (!supportId) continue; + + // Step 2: Find all master sensors + const allMasters = await Insensors.find({ type: "master" }).lean(); + + for (const master of allMasters) { + const customerId = master.customerId; + const hardwareId = master.hardwareId; + + if (!customerId || !hardwareId) continue; + + // ✅ Update GSM and LoRa connection statuses + await updateConnectedStatusOnly(customerId, hardwareId); + + // 🔄 Re-fetch updated master and slaves + const updatedMaster = await Insensors.findOne({ hardwareId, customerId }).lean(); + const connectedSlaves = await Insensors.find({ + connected_to: hardwareId, + type: "slave" + }).lean(); + + // Step 3: Check disconnections + const disconnectedSlaves = connectedSlaves.filter( + (s) => s.connected_status === "disconnected" + ); + const masterIsDisconnected = updatedMaster.connected_status === "disconnected"; + + // Step 4: Raise ticket if needed + if (masterIsDisconnected || disconnectedSlaves.length > 0) { + await raiseATicketLikeLogic(supportRecord, hardwareId, disconnectedSlaves); + } + } + } + + console.log("✅ Auto ticket check completed."); + } catch (err) { + console.error("❌ Cron error:", err); + } +}); + + + +exports.raiseATicketBuildingDetails = async (req, reply) => { + try { + const { customerId, connected_to, installationId } = req.params; + + if (!customerId || !connected_to || !installationId) { + return reply.code(400).send({ error: "customerId, connected_to, and installationId are required" }); + } + + const customer = await User.findOne({ customerId, installationId }).lean(); + if (!customer) { + return reply.code(404).send({ message: "Customer not found." }); + } + + const sensors = await Insensors.find({ customerId }); + if (!sensors.length) { + return reply.code(404).send({ message: "No sensors found for this customer." }); + } + + const masterSensor = sensors.find(s => (s.hardwareId?.trim() === connected_to.trim())); + if (!masterSensor) { + return reply.code(404).send({ message: "Master hardwareId not found." }); + } + + const latestMasterRecord = await IotData.findOne({ hardwareId: connected_to }).sort({ date: -1 }).lean(); + if (!latestMasterRecord) { + return reply.code(404).send({ message: "No IoT data found for this hardwareId." }); + } + + const indiaTime = moment.tz(latestMasterRecord.date, "Asia/Kolkata"); + const now = moment.tz("Asia/Kolkata"); + const formattedNow = now.format("YYYY-MM-DD HH:mm:ss"); + + const diffInMinutesMaster = now.diff(indiaTime, "minutes"); + + if (diffInMinutesMaster > 1) { + await Insensors.updateOne( + { hardwareId: connected_to }, + { $set: { lastTicketRaisedAt: formattedNow } } + ); + } + + const connectedSlaves = sensors.filter(sensor => sensor.connected_to?.trim() === connected_to.trim()); + + // ✅ Check if any slave is disconnected + const disconnectedSlave = connectedSlaves.find(slave => slave.connected_status === "disconnected"); + if (disconnectedSlave) { + return reply.code(400).send({ + error: `Slave device ${disconnectedSlave.hardwareId} is disconnected. Cannot raise ticket.` + }); + } + + for (const slave of connectedSlaves) { + const slaveId = slave.hardwareId?.trim(); + const matchedTank = latestMasterRecord.tanks.find(tank => tank.tankhardwareId === slaveId); + + if (matchedTank && matchedTank.date) { + const tankTime = moment.tz(matchedTank.date, "Asia/Kolkata"); + const loraDiffInMinutes = now.diff(tankTime, "minutes"); + + if (loraDiffInMinutes > 1) { + await Insensors.updateOne( + { hardwareId: slaveId }, + { $set: { lastTicketRaisedAt: formattedNow } } + ); + } + } + } + + await Support.updateOne( + { supportId: "AWHYSU64" }, + { $set: { lastTicketRaisedAt: formattedNow } } + ); + + // Fetch updated values + const updatedMasterSensor = await Insensors.findOne({ hardwareId: connected_to }).lean(); + const updatedSupport = await Support.findOne({ supportId: "AWHYSU64" }).lean(); + + return reply.send({ + status_code: 200, + customer, + lastTicketRaisedAt: { + masterSensor: updatedMasterSensor?.lastTicketRaisedAt || null, + support: updatedSupport?.lastTicketRaisedAt || null + } + }); + + } catch (error) { + console.error("Error raising ticket:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + +exports.raiseATicketSlave = async (req, reply) => { + try { + const { customerId, connected_to } = req.params; + + if (!customerId || !connected_to) { + return reply.code(400).send({ error: "customerId and connected_to are required" }); + } + + const sensors = await Insensors.find({ customerId }); + const masterSensor = sensors.find(s => s.hardwareId?.trim() === connected_to.trim()); + + if (!masterSensor) { + return reply.code(404).send({ message: "Master hardwareId not found." }); + } + + const latestMasterRecord = await IotData.findOne({ hardwareId: connected_to }).sort({ date: -1 }).lean(); + + if (!latestMasterRecord) { + return reply.code(404).send({ message: "No IoT data found for this hardwareId." }); + } + + const now = moment().tz("Asia/Kolkata"); + const masterTime = moment.tz(latestMasterRecord.date, "Asia/Kolkata"); + const diff = now.diff(masterTime, "minutes"); + + const gsm_connected_status = diff <= 1 ? "connected" : "disconnected"; + const gsmStatus = gsm_connected_status === "connected" ? "GSM Connected" : "GSM Disconnected"; + + const formattedTime = masterTime.format("HH:mm:ss"); + const formattedDate = masterTime.format("DD-MM-YYYY"); + + const tanks = (latestMasterRecord.tanks || []).map(tank => { + const tankTime = moment.tz(tank.date, "Asia/Kolkata"); + const timeDiff = now.diff(tankTime, "minutes"); + + return { + ...tank, + time: tankTime.format("HH:mm:ss"), + connected_status: timeDiff <= 1 ? "connected" : "disconnected" + }; + }); + + const responseData = { + hardwareId: connected_to, + gsm_connected_status, + gsmStatus, + connected_gsm_date: formattedDate, + connected_gsm_time: formattedTime, + gsm_last_check_time: now.format("DD-MM-YYYY HH:mm:ss"), + tanks, + date: latestMasterRecord.date, + time: formattedTime + }; + + return reply.send({ + status_code: 200, + message: "Success", + data: responseData + }); + + } catch (error) { + console.error("Error in raiseATicketSlave:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + + + + + +exports.getDisconnectedIssuesBySupportId = async (req, reply) => { + try { + const { supportId, customerId } = req.params; + + if (!supportId || !customerId) { + return reply.code(400).send({ error: "supportId and customerId are required" }); + } + + const supportRecord = await Support.findOne({ supportId }).lean(); + if (!supportRecord) { + return reply.code(404).send({ message: "No support record found for this supportId and customerId" }); + } + + const allIssues = supportRecord.issues || []; + const hardwareSet = new Set(); + + for (const issue of allIssues) { + if (issue.hardwareId) hardwareSet.add(issue.hardwareId); + if (issue.masterHardwareId) hardwareSet.add(issue.masterHardwareId); + } + + const hardwareIds = [...hardwareSet]; + + const sensors = await Insensors.find({ + customerId, + $or: [ + { hardwareId: { $in: hardwareIds } }, + { tankhardwareId: { $in: hardwareIds } } + ] + }).lean(); + + const sensorMap = {}; + for (const sensor of sensors) { + if (sensor.hardwareId) sensorMap[sensor.hardwareId] = sensor; + if (sensor.tankhardwareId) sensorMap[sensor.tankhardwareId] = sensor; + } + + const orders = await Order.find({ customerId }).lean(); + + const orderMap = {}; + for (const order of orders) { + (order.master_connections || []).forEach(conn => { + if (conn.hardwareId) { + orderMap[conn.hardwareId] = { + masterName: conn.master_name || null, + location: conn.location || null + }; + } + }); + } + + const slaveOrderMap = {}; + for (const order of orders) { + (order.tank_connections || []).forEach(conn => { + if (conn.hardwareId) { + slaveOrderMap[conn.hardwareId] = { + location: conn.location || null, + typeOfWater: conn.typeOfWater || null + }; + } + }); + } + + const masterMap = {}; + const now = moment.tz("Asia/Kolkata"); + + for (const issue of allIssues) { + const masterId = issue.masterHardwareId || issue.hardwareId; + const masterSensor = sensorMap[masterId]; + if (!masterSensor || masterSensor.type !== "master") continue; + + const stillUnresolved = allIssues.some(i => + (i.hardwareId === masterSensor.hardwareId || i.masterHardwareId === masterSensor.hardwareId) && + !i.resolved && !i.movedToCategory + ); + + if (!stillUnresolved) continue; + + const latestMasterData = await IotData.findOne({ hardwareId: masterSensor.hardwareId }).sort({ date: -1 }).lean(); + + let gsmConnected = false; + if (latestMasterData?.date) { + const gsmTime = moment.tz(latestMasterData.date, "Asia/Kolkata"); + gsmConnected = now.diff(gsmTime, "minutes") <= 1; + } + + // Get latest lastTicketRaisedAt for this master + const relatedIssues = allIssues.filter( + i => + i.hardwareId === masterSensor.hardwareId || + i.masterHardwareId === masterSensor.hardwareId + ); + + const lastTicketRaisedAt = relatedIssues.reduce((latest, issue) => { + if (!issue.lastTicketRaisedAt) return latest; + const current = new Date(issue.lastTicketRaisedAt); + return !latest || current > new Date(latest) ? issue.lastTicketRaisedAt : latest; + }, null); + + if (!masterMap[masterSensor.hardwareId]) { + const enriched = orderMap[masterSensor.hardwareId] || {}; + masterMap[masterSensor.hardwareId] = { + hardwareId: masterSensor.hardwareId, + masterName: enriched.masterName || masterSensor.masterName || "", + location: enriched.location || masterSensor.location || "", + type: "master", + connected_status: gsmConnected ? "connected" : "disconnected", + gsm_last_check_time: masterSensor.gsm_last_check_time, + gsm_last_disconnect_time: masterSensor.gsm_last_disconnect_time, + connected_gsm_date: masterSensor.connected_gsm_date, + connected_gsm_time: masterSensor.connected_gsm_time, + connected_lora_date: masterSensor.connected_lora_date, + connected_lora_time: masterSensor.connected_lora_time, + support_gsm_last_check_time: masterSensor.support_gsm_last_check_time, + support_lora_last_check_time: masterSensor.support_lora_last_check_time, + team_member_support_gsm_last_check_time: masterSensor.team_member_support_gsm_last_check_time, + team_member_support_lora_last_check_time: masterSensor.team_member_support_lora_last_check_time, + outDoor_status: masterSensor.outDoor_status || "inprogress", + connected_slave_count: 0, + connected_slaves: [], + lastTicketRaisedAt: lastTicketRaisedAt + }; + } + + const master = masterMap[masterSensor.hardwareId]; + const connectedSlaves = await Insensors.find({ + connected_to: masterSensor.hardwareId, + type: "slave", + customerId + }).lean(); + + const slaveSet = new Set(master.connected_slaves.map(s => s.hardwareId)); + + for (const slave of connectedSlaves) { + const slaveHardwareId = slave.tankhardwareId || slave.hardwareId; + if (slaveSet.has(slaveHardwareId)) continue; + slaveSet.add(slaveHardwareId); + + const tankInfo = await Tank.findOne({ + $or: [ + { hardwareId: slaveHardwareId }, + { tankhardwareId: slaveHardwareId } + ] + }).lean(); + + const slaveOrderInfo = slaveOrderMap[slaveHardwareId] || {}; + + const matchedTank = latestMasterData?.tanks?.find(t => t.tankhardwareId === slaveHardwareId); + let loraConnected = false; + if (matchedTank?.date && matchedTank.tankHeight !== "0") { + const loraTime = moment.tz(matchedTank.date, "Asia/Kolkata"); + loraConnected = now.diff(loraTime, "minutes") <= 1; + } + + // Get latest lastTicketRaisedAt for this slave + const slaveRelatedIssues = allIssues.filter( + i => i.hardwareId === slaveHardwareId + ); + + const slaveLastTicketRaisedAt = slaveRelatedIssues.reduce((latest, issue) => { + if (!issue.lastTicketRaisedAt) return latest; + const current = new Date(issue.lastTicketRaisedAt); + return !latest || current > new Date(latest) ? issue.lastTicketRaisedAt : latest; + }, null); + + const slaveEnriched = { + hardwareId: slaveHardwareId, + tankName: slave.tankName || tankInfo?.tankName || "", + location: slave.location || tankInfo?.tankLocation || slaveOrderInfo.location || "", + connected_status: loraConnected ? "connected" : "disconnected", + connected_lora_time: slave.connected_lora_time, + connected_lora_date: slave.connected_lora_date, + lora_last_check_time: slave.lora_last_check_time, + lora_last_disconnect_time: slave.lora_last_disconnect_time, + connected_to: slave.connected_to, + masterName: master.masterName, + type: "slave", + typeOfWater: slave.typeOfWater || tankInfo?.typeOfWater || slaveOrderInfo.typeOfWater || "", + tankHeight: slave.tankHeight, + support_lora_last_check_time: slave.support_lora_last_check_time, + team_member_support_lora_last_check_time: slave.team_member_support_lora_last_check_time, + lastTicketRaisedAt: slaveLastTicketRaisedAt, + outDoor_status: slave.outDoor_status || "inprogress", + + }; + + master.connected_slaves.push(slaveEnriched); + master.connected_slave_count++; + } + } + + // Filter comments by customerId + const comments = (supportRecord.comments || []) + .filter(c => c.customerId === customerId) + .map(c => ({ + text: c.text, + call_status: c.call_status, + call_time: c.call_time, + commentsTime: moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") + })); + + for (const master of Object.values(masterMap)) { + master.comments = comments; + + // const masterCallRecords = (supportRecord.callRecord || []) + // .filter(record => + // record.customerId === customerId && + // record.hardwareId === master.hardwareId + // ) + // .map(record => ({ + // call_status: record.call_status, + // call_time: record.call_time, + // createdAt: record.createdAt + // ? moment(record.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") + // : null + // })); + + // master.callRecord = masterCallRecords; + } + + return reply.send({ + status_code: 200, + supportId, + customerId, + totalMasters: Object.keys(masterMap).length, + disconnectedIssues: Object.values(masterMap) + }); + } catch (error) { + console.error("Error fetching disconnected issues:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + +exports.getResolvedIssuesBySupportId = async (req, reply) => { + try { + const { supportId, customerId } = req.params; + + if (!supportId || !customerId) { + return reply.code(400).send({ error: "supportId and customerId are required" }); + } + + const supportRecord = await Support.findOne({ supportId }).lean(); + if (!supportRecord) { + return reply.code(404).send({ message: "No support record found" }); + } + + const resolvedIssues = (supportRecord.resolvedIssues || []).filter(issue => issue.currentlyResolved !== false); + const hardwareSet = new Set(); + + for (const issue of resolvedIssues) { + if (issue.hardwareId) hardwareSet.add(issue.hardwareId); + if (issue.masterHardwareId) hardwareSet.add(issue.masterHardwareId); + } + + const hardwareIds = [...hardwareSet]; + + const sensors = await Insensors.find({ + customerId, + $or: [ + { hardwareId: { $in: hardwareIds } }, + { tankhardwareId: { $in: hardwareIds } } + ] + }).lean(); + + const sensorMap = {}; + for (const sensor of sensors) { + if (sensor.hardwareId) sensorMap[sensor.hardwareId] = sensor; + if (sensor.tankhardwareId) sensorMap[sensor.tankhardwareId] = sensor; + } + + const orders = await Order.find({ customerId }).lean(); + + const orderMap = {}; + for (const order of orders) { + (order.master_connections || []).forEach(conn => { + if (conn.hardwareId) { + orderMap[conn.hardwareId] = { + masterName: conn.master_name || null, + location: conn.location || null + }; + } + }); + } + + const slaveOrderMap = {}; + for (const order of orders) { + (order.tank_connections || []).forEach(conn => { + if (conn.hardwareId) { + slaveOrderMap[conn.hardwareId] = { + location: conn.location || null, + typeOfWater: conn.typeOfWater || null + }; + } + }); + } + + const masterMap = {}; + + for (const issue of resolvedIssues) { + const masterId = issue.masterHardwareId || issue.hardwareId; + const masterSensor = sensorMap[masterId]; + if (!masterSensor || masterSensor.type !== "master") continue; + + const enriched = orderMap[masterId] || {}; + + const masterEntry = { + hardwareId: masterSensor.hardwareId, + masterName: enriched.masterName || masterSensor.masterName || "", + location: enriched.location || masterSensor.location || "", + type: "master", + connected_status: "connected", + connected_gsm_date: masterSensor.connected_gsm_date, + connected_gsm_time: masterSensor.connected_gsm_time, + connected_lora_date: masterSensor.connected_lora_date, + connected_lora_time: masterSensor.connected_lora_time, + gsm_last_check_time: masterSensor.gsm_last_check_time, + gsm_last_disconnect_time: masterSensor.gsm_last_disconnect_time, + support_gsm_last_check_time: masterSensor.support_gsm_last_check_time, + support_lora_last_check_time: masterSensor.support_lora_last_check_time, + team_member_support_gsm_last_check_time: masterSensor.team_member_support_gsm_last_check_time, + team_member_support_lora_last_check_time: masterSensor.team_member_support_lora_last_check_time, + connected_slave_count: 0, + connected_slaves: [], + resolvedAt: issue.resolvedAt, + originalMovedAt: issue.originalMovedAt, + reason: issue.reason + }; + + const connectedSlaves = await Insensors.find({ + connected_to: masterId, + type: "slave", + customerId + }).lean(); + + for (const slave of connectedSlaves) { + const slaveHardwareId = slave.tankhardwareId || slave.hardwareId; + + const tankInfo = await Tank.findOne({ + $or: [{ hardwareId: slaveHardwareId }, { tankhardwareId: slaveHardwareId }] + }).lean(); + + const slaveOrderInfo = slaveOrderMap[slaveHardwareId] || {}; + + const slaveEnriched = { + hardwareId: slaveHardwareId, + tankName: slave.tankName || tankInfo?.tankName || "", + location: slave.location || tankInfo?.tankLocation || slaveOrderInfo.location || "", + connected_status: "connected", + connected_lora_time: slave.connected_lora_time, + connected_lora_date: slave.connected_lora_date, + lora_last_check_time: slave.lora_last_check_time, + lora_last_disconnect_time: slave.lora_last_disconnect_time, + connected_to: slave.connected_to, + masterName: masterEntry.masterName, + type: "slave", + typeOfWater: slave.typeOfWater || tankInfo?.typeOfWater || slaveOrderInfo.typeOfWater || "", + tankHeight: slave.tankHeight, + support_lora_last_check_time: slave.support_lora_last_check_time, + team_member_support_lora_last_check_time: slave.team_member_support_lora_last_check_time + }; + + masterEntry.connected_slaves.push(slaveEnriched); + masterEntry.connected_slave_count++; + } + + masterMap[masterSensor.hardwareId] = masterEntry; + } + + // Format and attach comments + const comments = (supportRecord.comments || []).map(c => ({ + text: c.text, + commentsTime: moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") + })); + + for (const master of Object.values(masterMap)) { + master.comments = comments; + } + + return reply.send({ + status_code: 200, + supportId, + customerId, + totalResolved: Object.keys(masterMap).length, + resolvedIssues: Object.values(masterMap) + }); + } catch (err) { + console.error("Error in getResolvedIssuesBySupportId:", err); + return reply.code(500).send({ error: "Internal Server Error" }); + } +}; + + +exports.getRemoveConnectedMastersWithSlaves = async (req, reply) => { + try { + const { supportId, hardwareId } = req.params; + + if (!supportId || !hardwareId) { + return reply.code(400).send({ error: "supportId and hardwareId are required" }); + } + + // Step 1: Get support record + const supportRecord = await Support.findOne({ supportId }); + if (!supportRecord) { + return reply.code(404).send({ message: "No support record found for this supportId" }); + } + + // Step 2: Get master sensor + const masterSensor = await Insensors.findOne({ hardwareId, type: "master" }).lean(); + if (!masterSensor) { + return reply.code(404).send({ message: "Master not found in Insensors" }); + } + + // Step 3: Check master is connected + if (masterSensor.connected_status !== "connected") { + return reply.code(400).send({ message: "Master is not in connected status" }); + } + + // Step 4: Get connected slave sensors + const slaveSensors = await Insensors.find({ connected_to: hardwareId, type: "slave" }).lean(); + + const allSlavesConnected = slaveSensors.every(slave => slave.connected_status === "connected"); + + if (!allSlavesConnected) { + return reply.code(400).send({ message: "Not all connected slaves are in connected status" }); + } + + // Step 5: Move issues from issues → resolvedIssues if hardwareId or masterHardwareId matches + const currentIssues = supportRecord.issues || []; + const resolvedIssues = supportRecord.resolvedIssues || []; + + const { movedIssues, remainingIssues } = currentIssues.reduce( + (acc, issue) => { + if (issue.hardwareId === hardwareId || issue.masterHardwareId === hardwareId) { + issue.resolvedAt = new Date(); + acc.movedIssues.push(issue); + } else { + acc.remainingIssues.push(issue); + } + return acc; + }, + { movedIssues: [], remainingIssues: [] } + ); + + // Step 6: Save updates + await Support.updateOne( + { supportId }, + { + $set: { + issues: remainingIssues, + resolvedIssues: [...resolvedIssues, ...movedIssues] + } + } + ); + + return reply.send({ + status_code: 200, + message: `Master and all connected slaves are connected. ${movedIssues.length} issue(s) moved to resolved.`, + movedHardwareId: hardwareId, + resolvedIssues: movedIssues + }); + + } catch (error) { + console.error("Error in getRemoveConnectedMastersWithSlaves:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + + +exports.getDisconnectedCustomerDetails = async (req, reply) => { + try { + const { supportId } = req.params; + + if (!supportId) { + return reply.code(400).send({ error: "supportId is required" }); + } + + const supportRecord = await Support.findOne({ supportId }).lean(); + if (!supportRecord) { + return reply.code(404).send({ message: "No support record found for this supportId" }); + } + + const unresolvedIssues = supportRecord.issues?.filter( + (issue) => issue.resolved === false && issue.movedToCategory === false + ) || []; + + const existingCategorizedHardwareIds = new Set(); + (supportRecord.categorizedIssues || []).forEach(issue => { + if (issue.hardwareId) existingCategorizedHardwareIds.add(issue.hardwareId.trim().toLowerCase()); + if (Array.isArray(issue.hardwareIds)) { + issue.hardwareIds.forEach(id => { + if (typeof id === "string") existingCategorizedHardwareIds.add(id.trim().toLowerCase()); + }); + } + }); + + const resolvedHardwareIds = new Set(); + (supportRecord.resolvedIssues || []).forEach(issue => { + if (issue.hardwareId) resolvedHardwareIds.add(issue.hardwareId.trim().toLowerCase()); + if (Array.isArray(issue.hardwareIds)) { + issue.hardwareIds.forEach(id => { + if (typeof id === "string") resolvedHardwareIds.add(id.trim().toLowerCase()); + }); + } + }); + + const hardwareIdsArray = new Set(); + unresolvedIssues.forEach((issue) => { + if (issue.hardwareId) hardwareIdsArray.add(issue.hardwareId.trim()); + if (Array.isArray(issue.hardwareIds)) { + issue.hardwareIds.forEach((id) => { + if (typeof id === "string") hardwareIdsArray.add(id.trim()); + }); + } + }); + + const allHardwareIds = [...hardwareIdsArray]; + + const relevantSensorsRaw = await Insensors.find({ + $or: [ + { connected_to: { $in: allHardwareIds } }, + { hardwareId: { $in: allHardwareIds } }, + { tankhardwareId: { $in: allHardwareIds } } + ] + }).lean(); + + // ✅ Allow resolved hardwareIds that are part of new unresolved issues + const unresolvedHardwareIdSet = new Set(); + unresolvedIssues.forEach(issue => { + if (issue.hardwareId) unresolvedHardwareIdSet.add(issue.hardwareId.trim().toLowerCase()); + if (Array.isArray(issue.hardwareIds)) { + issue.hardwareIds.forEach(id => { + if (typeof id === "string") unresolvedHardwareIdSet.add(id.trim().toLowerCase()); + }); + } + }); + + const sensors = relevantSensorsRaw.filter(sensor => { + const ids = [ + sensor.hardwareId?.trim().toLowerCase(), + sensor.connected_to?.trim().toLowerCase(), + sensor.tankhardwareId?.trim().toLowerCase() + ]; + return !ids.some(id => + existingCategorizedHardwareIds.has(id) || + (resolvedHardwareIds.has(id) && !unresolvedHardwareIdSet.has(id)) + ); + }); + + const customerHardwareMap = {}; + for (const sensor of sensors) { + const custId = sensor.customerId; + if (!customerHardwareMap[custId]) { + customerHardwareMap[custId] = new Set(); + } + + const sensorHw = sensor.tankhardwareId?.trim().toLowerCase(); + const sensorConnected = sensor.connected_to?.trim().toLowerCase(); + const sensorHardwareId = sensor.hardwareId?.trim().toLowerCase(); + + for (const issue of unresolvedIssues) { + const allIssueHardwareIds = [ + ...(issue.hardwareIds?.map(id => id?.trim().toLowerCase()) || []), + issue.hardwareId?.trim().toLowerCase() + ]; + + const isCategorizedOrResolved = [sensorHw, sensorConnected, sensorHardwareId].some(id => + id && (existingCategorizedHardwareIds.has(id) || (resolvedHardwareIds.has(id) && !unresolvedHardwareIdSet.has(id))) + ); + if (isCategorizedOrResolved) continue; + + if ( + (sensorHw && allIssueHardwareIds.includes(sensorHw)) || + (sensorConnected && allIssueHardwareIds.includes(sensorConnected)) || + (sensorHardwareId && allIssueHardwareIds.includes(sensorHardwareId)) + ) { + for (const hw of allIssueHardwareIds) { + if (hw && !existingCategorizedHardwareIds.has(hw) && (!resolvedHardwareIds.has(hw) || unresolvedHardwareIdSet.has(hw))) { + customerHardwareMap[custId].add(hw); + } + } + } + } + } + + const customerDetails = await User.find({ + customerId: { $in: Object.keys(customerHardwareMap) } + }).lean(); + + const customerResults = customerDetails.map((customer) => { + const customerId = customer.customerId; + const affectedHardwareSet = customerHardwareMap[customerId] || new Set(); + const affectedLowerSet = new Set([...affectedHardwareSet].map(id => id.toLowerCase().trim())); + + const sensorsForCustomer = sensors.filter(s => s.customerId === customerId); + + const disconnectedSlaves = sensorsForCustomer.filter(s => s.type === 'slave' && s.connected_status === "disconnected"); + const disconnectedMasters = sensorsForCustomer.filter(s => s.type === 'master' && s.connected_status === "disconnected"); + + const uniqueDisconnectedHardwareIds = new Set([ + ...disconnectedMasters.map(s => s.hardwareId?.trim()), + ...disconnectedSlaves.map(s => s.tankhardwareId?.trim() || s.hardwareId?.trim()) + ].filter(Boolean)); + + const disconnectedCount = disconnectedMasters.length + disconnectedSlaves.length; + + const customerIssues = unresolvedIssues.filter(issue => { + const allIssueHardwareIds = [ + ...(issue.hardwareIds?.map(id => id?.trim().toLowerCase()) || []), + issue.hardwareId?.trim().toLowerCase() + ].filter(Boolean); + return allIssueHardwareIds.some(hw => affectedLowerSet.has(hw)); + }); + + const lastTicketRaisedAt = customerIssues.reduce((latest, issue) => { + const issueTime = new Date(issue.lastTicketRaisedAt); + if (!isNaN(issueTime)) { + return (!latest || issueTime > new Date(latest)) ? issue.lastTicketRaisedAt : latest; + } + return latest; + }, null); + + return { + customerId: customer.customerId, + buildingName: customer.buildingName || "", + location: customer.location || "", + username: customer.username || "", + firstName: customer.profile?.firstName || "", + lastName: customer.profile?.lastName || "", + phone: customer.phone || customer.profile?.contactNumber || "", + email: customer.emails?.[0]?.email || "", + phoneVerified: customer.phoneVerified || false, + address1: customer.profile?.address1 || "", + address2: customer.profile?.address2 || "", + city: customer.profile?.city || "", + latitude: customer.latitude, + longitude: customer.longitude, + totalHardwareIdsCount: uniqueDisconnectedHardwareIds.size, + hardwareIds: [...uniqueDisconnectedHardwareIds], + lastTicketRaisedAt: lastTicketRaisedAt || null, + disconnectedCount + }; + }); + + return reply.code(200).send({ + success: true, + totalCustomers: customerResults.length, + customers: customerResults + }); + + } catch (error) { + console.error("Error in getDisconnectedCustomerDetails:", error); + return reply.code(500).send({ + success: false, + message: "Internal Server Error" + }); + } +}; + + + + +exports.getDisconnectedCustomerDetailsByTeamMemberId = async (req, reply) => { + try { + const { support_teamMemberId } = req.params; + + if (!support_teamMemberId) { + return reply.code(400).send({ error: "support_teamMemberId is required" }); + } + + // Step 1: Get support record with categorized issues assigned to the team member + const supportRecord = await Support.findOne({ + "team_member.team_member.support_teamMemberId": support_teamMemberId + }).lean(); + + if (!supportRecord) { + return reply.code(404).send({ message: "Support record not found" }); + } + + // Step 2: Filter categorized issues assigned to this team member + const assignedIssues = (supportRecord.categorizedIssues || []).filter( + issue => issue.assignedTo?.support_teamMemberId === support_teamMemberId + ); + + // Step 3: Extract unique hardwareIds from assigned issues + const assignedHardwareIds = [ + ...new Set( + assignedIssues.map(issue => issue.hardwareId || issue.masterHardwareId).filter(Boolean) + ) + ]; + + if (assignedHardwareIds.length === 0) { + return reply.code(404).send({ message: "No categorized issues assigned to this team member" }); + } + + // Step 4: Find disconnected insensors (either masters or slaves) + const disconnectedDevices = await Insensors.find({ + $or: [ + { hardwareId: { $in: assignedHardwareIds } }, + { connected_to: { $in: assignedHardwareIds } } + ], + connected_status: "disconnected" + }).lean(); + + if (disconnectedDevices.length === 0) { + return reply.code(404).send({ message: "No disconnected devices assigned to this team member" }); + } + + // Step 5: Extract unique customerIds + const customerIds = [...new Set(disconnectedDevices.map(d => d.customerId))]; + + const users = await User.find({ customerId: { $in: customerIds } }).lean(); + console.log("users",users) + + // Step 6: Prepare final response + const response = users.map(user => ({ + customerId: user.customerId, + firstName: user.profile?.firstName || "", + lastName: user.profile?.lastName || "", + address1: user.profile?.address1 || "", + address2: user.profile?.address2 || "", + + phone: user.phone || user.profile?.contactNumber || "", + email: user.emails?.[0]?.email || "", + + latitude: user.latitude, + longitude: user.longitude, + fcmIds: (user.fcmIds || []).filter(fcm => typeof fcm === "string"), + installationId: user.installationId || "", + username: user.username || "", + buildingName: user.buildingName || "", + notificationPreferences: { + allowNotifications: user.allowNotifications || false, + automaticStartAndStopNotify: user.automaticStartAndStopNotify || false, + manualStartAndStopNotify: user.manualStartAndStopNotify || false, + criticalLowWaterAlert: user.criticalLowWaterAlert || false, + lowWaterAlert: user.lowWaterAlert || false, + notificationPreference: user.notificationPreference || "never" + }, + createdAt: user.createdAt, + updatedAt: user.updatedAt + })); + + return reply.send({ status_code: 200, data: response }); + + } catch (error) { + console.error("Error in getDisconnectedCustomerDetailsByTeamMemberId:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + +exports.getDisconnectedMoveCustomerDetails = async (req, reply) => { + try { + const { supportId } = req.params; + + if (!supportId) { + return reply.code(400).send({ error: "supportId is required" }); + } + + const supportRecord = await Support.findOne({ supportId }).lean(); + if (!supportRecord) { + return reply.code(404).send({ message: "No support record found for this supportId" }); + } + + // ✅ Filter only "Power Outage" or "OutDoor Escalation" issues + const validCategories = ["Power Outage", "OutDoor Escalation"]; + const categorizedHardwareIds = []; + + for (const issue of supportRecord.categorizedIssues || []) { + if (!validCategories.includes(issue.category)) continue; + + if (issue.hardwareId) categorizedHardwareIds.push(issue.hardwareId); + if (Array.isArray(issue.hardwareIds)) categorizedHardwareIds.push(...issue.hardwareIds); + } + + if (categorizedHardwareIds.length === 0) { + return reply.code(404).send({ message: "No hardware IDs in categorized issues with Power Outage or OutDoor Escalation" }); + } + + const allSensors = await Insensors.find({ + $or: [ + { hardwareId: { $in: categorizedHardwareIds } }, + { connected_to: { $in: categorizedHardwareIds } } + ] + }).lean(); + + if (allSensors.length === 0) { + return reply.code(404).send({ message: "No sensors found for categorized hardware IDs" }); + } + + // Determine connection status per customer + const customerStatusMap = {}; + + for (const sensor of allSensors) { + const cid = sensor.customerId; + if (!cid) continue; + + if (!customerStatusMap[cid]) { + customerStatusMap[cid] = { status: "unknown" }; + } + + if (sensor.connected_status === "disconnected") { + customerStatusMap[cid].status = "disconnected"; + } else if (sensor.connected_status === "connected" && customerStatusMap[cid].status !== "disconnected") { + customerStatusMap[cid].status = "connected"; + } + } + + const customerIds = Object.keys(customerStatusMap); + const users = await User.find({ customerId: { $in: customerIds } }).lean(); + + const combinedCustomerList = users.map(user => { + const cid = user.customerId; + return { + customer: { + customerId: cid, + connectionStatus: customerStatusMap[cid]?.status || "unknown", + username: user.username || "", + firstName: user.profile?.firstName || user.firstName || "", + lastName: user.profile?.lastName || user.lastName || "", + phone: user.phone || user.profile?.contactNumber || user.alternativeNumber || "", + email: user.emails?.[0]?.email || user.email || "", + phoneVerified: user.phoneVerified || false, + address1: user.profile?.address1 || user.address1 || "", + address2: user.profile?.address2 || user.address2 || "", + city: user.profile?.city || user.city || "", + state: user.profile?.state || user.state || "", + country: user.profile?.country || user.country || "", + zip: user.profile?.zip || "", + notes: user.profile?.notes || "", + latitude: user.latitude || 0, + longitude: user.longitude || 0, + fcmIds: (user.fcmIds || []).filter(fcm => typeof fcm === "string" && fcm.startsWith("d")), + installationId: user.installationId || "", + notificationPreferences: { + allowNotifications: user.allowNotifications || false, + automaticStartAndStopNotify: user.automaticStartAndStopNotify || false, + manualStartAndStopNotify: user.manualStartAndStopNotify || false, + criticalLowWaterAlert: user.criticalLowWaterAlert || false, + lowWaterAlert: user.lowWaterAlert || false, + notificationPreference: user.notificationPreference || "never" + }, + surveyStatus: user.survey_status || "pending", + buildingName: user.buildingName || "", + stripePaymentStatus: user.stripePaymentStatus || false, + stripeSubscriptionStatus: user.stripeSubscriptionStatus || false, + createdAt: user.createdAt, + updatedAt: user.updatedAt + } + }; + }); + + return reply.send({ + status_code: 200, + data: combinedCustomerList + }); + + } catch (error) { + console.error("Error in getDisconnectedMoveCustomerDetails:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + +exports.getResolvedCustomerDetails = async (req, reply) => { + try { + const { supportId } = req.params; + + if (!supportId) { + return reply.code(400).send({ error: "supportId is required" }); + } + + const supportRecord = await Support.findOne({ supportId }).lean(); + + if (!supportRecord) { + return reply.code(404).send({ message: "No support record found for this supportId" }); + } + + console.log("✅ Support Record:", JSON.stringify(supportRecord, null, 2)); + + const resolvedIssues = supportRecord.resolvedIssues || []; + + if (!Array.isArray(resolvedIssues) || resolvedIssues.length === 0) { + return reply.code(404).send({ message: "No resolved issues to process" }); + } + + const resolvedHardwareIds = []; + for (const issue of resolvedIssues) { + if (issue.hardwareId) resolvedHardwareIds.push(issue.hardwareId); + if (Array.isArray(issue.hardwareIds)) resolvedHardwareIds.push(...issue.hardwareIds); + } + + if (resolvedHardwareIds.length === 0) { + return reply.code(404).send({ message: "No hardware IDs in resolved issues" }); + } + + const sensors = await Insensors.find({ + $or: [ + { hardwareId: { $in: resolvedHardwareIds } }, + { connected_to: { $in: resolvedHardwareIds } } + ] + }).lean(); + + if (sensors.length === 0) { + return reply.code(404).send({ message: "No sensors found for resolved hardware" }); + } + + const customerIds = [...new Set(sensors.map(s => s.customerId))]; + const customers = await User.find({ customerId: { $in: customerIds } }).lean(); + + const uniqueCustomerMap = {}; + +// Step 1: Build map of resolvedAt per hardwareId +const resolvedHardwareMap = {}; +for (const issue of resolvedIssues) { + const resolvedAt = issue.resolvedAt; + if (issue.hardwareId) resolvedHardwareMap[issue.hardwareId] = resolvedAt; + if (Array.isArray(issue.hardwareIds)) { + issue.hardwareIds.forEach(hid => { + resolvedHardwareMap[hid] = resolvedAt; + }); + } +} + + for (const user of customers) { + const cid = user.customerId; + if (!uniqueCustomerMap[cid]) { + // Find all matching resolvedAt timestamps for this user's hardwareIds + const customerSensorHardwareIds = sensors + .filter(s => s.customerId === cid) + .map(s => s.hardwareId || s.tankhardwareId || s.connected_to) + .filter(Boolean); + + const resolvedTimes = customerSensorHardwareIds + .map(hid => resolvedHardwareMap[hid]) + .filter(Boolean) + .sort((a, b) => new Date(b) - new Date(a)); // Get most recent + + uniqueCustomerMap[cid] = { + customer: { + customerId: cid, + username: user.username || "", + firstName: user.profile?.firstName || user.firstName || "", + lastName: user.profile?.lastName || user.lastName || "", + phone: user.phone || user.profile?.contactNumber || user.alternativeNumber || "", + email: user.emails?.[0]?.email || user.email || "", + phoneVerified: user.phoneVerified || false, + address1: user.profile?.address1 || user.address1 || "", + address2: user.profile?.address2 || user.address2 || "", + city: user.profile?.city || user.city || "", + state: user.profile?.state || user.state || "", + country: user.profile?.country || user.country || "", + zip: user.profile?.zip || "", + notes: user.profile?.notes || "", + latitude: user.latitude || 0, + longitude: user.longitude || 0, + fcmIds: (user.fcmIds || []).filter(fcm => typeof fcm === "string" && fcm.startsWith("d")), + installationId: user.installationId || "", + notificationPreferences: { + allowNotifications: user.allowNotifications || false, + automaticStartAndStopNotify: user.automaticStartAndStopNotify || false, + manualStartAndStopNotify: user.manualStartAndStopNotify || false, + criticalLowWaterAlert: user.criticalLowWaterAlert || false, + lowWaterAlert: user.lowWaterAlert || false, + notificationPreference: user.notificationPreference || "never" + }, + surveyStatus: user.survey_status || "pending", + buildingName: user.buildingName || "", + stripePaymentStatus: user.stripePaymentStatus || false, + stripeSubscriptionStatus: user.stripeSubscriptionStatus || false, + createdAt: user.createdAt, + updatedAt: user.updatedAt, + resolvedAt: resolvedTimes[0] || null // 🆕 Include latest resolved time if available + } + }; + } + } + + + return reply.send({ + status_code: 200, + data: Object.values(uniqueCustomerMap) + }); + + } catch (error) { + console.error("❌ Error fetching resolved customer details:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + +exports.getLongTermCustomerDetails = async (req, reply) => { + try { + const { supportId } = req.params; + + if (!supportId) { + return reply.code(400).send({ error: "supportId is required" }); + } + + const supportRecord = await Support.findOne({ supportId }).lean(); + if (!supportRecord) { + return reply.code(404).send({ message: "No support record found for this supportId" }); + } + + const categorizedIssues = supportRecord.categorizedIssues || []; + const longTermIssues = categorizedIssues.filter(issue => issue.category === "LongTerm Issues"); + + if (!longTermIssues.length) { + return reply.code(404).send({ message: "No Long Term Issues to process" }); + } + + const hardwareIds = []; + for (const issue of longTermIssues) { + if (issue.hardwareId) hardwareIds.push(issue.hardwareId); + if (Array.isArray(issue.hardwareIds)) hardwareIds.push(...issue.hardwareIds); + } + + if (!hardwareIds.length) { + return reply.code(404).send({ message: "No hardware IDs in Long Term Issues" }); + } + + // Fetch related sensors to get customerId + const sensors = await Insensors.find({ + $or: [ + { hardwareId: { $in: hardwareIds } }, + { connected_to: { $in: hardwareIds } } + ] + }).lean(); + + if (!sensors.length) { + return reply.code(404).send({ message: "No sensors found for Long Term hardware" }); + } + + const customerIds = [...new Set(sensors.map(s => s.customerId))]; + + const customers = await User.find({ customerId: { $in: customerIds } }).lean(); + + // Build map of movedAt per hardwareId + const movedAtMap = {}; + for (const issue of longTermIssues) { + const movedAt = issue.movedAt; + if (issue.hardwareId) movedAtMap[issue.hardwareId] = movedAt; + if (Array.isArray(issue.hardwareIds)) { + issue.hardwareIds.forEach(hid => { + movedAtMap[hid] = movedAt; + }); + } + } + + const uniqueCustomerMap = {}; + + for (const user of customers) { + const cid = user.customerId; + + if (!uniqueCustomerMap[cid]) { + const customerSensorHardwareIds = sensors + .filter(s => s.customerId === cid) + .map(s => s.hardwareId || s.tankhardwareId || s.connected_to) + .filter(Boolean); + + const movedTimes = customerSensorHardwareIds + .map(hid => movedAtMap[hid]) + .filter(Boolean) + .sort((a, b) => new Date(b) - new Date(a)); // Descending + + uniqueCustomerMap[cid] = { + customer: { + customerId: cid, + username: user.username || "", + firstName: user.profile?.firstName || user.firstName || "", + lastName: user.profile?.lastName || user.lastName || "", + phone: user.phone || user.profile?.contactNumber || user.alternativeNumber || "", + email: user.emails?.[0]?.email || user.email || "", + phoneVerified: user.phoneVerified || false, + address1: user.profile?.address1 || user.address1 || "", + address2: user.profile?.address2 || user.address2 || "", + city: user.profile?.city || user.city || "", + state: user.profile?.state || user.state || "", + country: user.profile?.country || user.country || "", + zip: user.profile?.zip || "", + notes: user.profile?.notes || "", + latitude: user.latitude || 0, + longitude: user.longitude || 0, + fcmIds: (user.fcmIds || []).filter(fcm => typeof fcm === "string" && fcm.startsWith("d")), + installationId: user.installationId || "", + notificationPreferences: { + allowNotifications: user.allowNotifications || false, + automaticStartAndStopNotify: user.automaticStartAndStopNotify || false, + manualStartAndStopNotify: user.manualStartAndStopNotify || false, + criticalLowWaterAlert: user.criticalLowWaterAlert || false, + lowWaterAlert: user.lowWaterAlert || false, + notificationPreference: user.notificationPreference || "never" + }, + surveyStatus: user.survey_status || "pending", + buildingName: user.buildingName || "", + stripePaymentStatus: user.stripePaymentStatus || false, + stripeSubscriptionStatus: user.stripeSubscriptionStatus || false, + createdAt: user.createdAt, + updatedAt: user.updatedAt, + movedAt: movedTimes[0] || null // ⏳ Latest movedAt + } + }; + } + } + + return reply.send({ + status_code: 200, + data: Object.values(uniqueCustomerMap) + }); + + } catch (error) { + console.error("❌ Error in getLongTermCustomerDetails:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + +exports.powerOutageCustomerDetails = async (req, reply) => { + try { + const { supportId } = req.params; + + if (!supportId) { + return reply.code(400).send({ error: "supportId is required" }); + } + + const supportRecord = await Support.findOne({ supportId }).lean(); + if (!supportRecord) { + return reply.code(404).send({ message: "No support record found for this supportId" }); + } + + const categorizedIssues = supportRecord.categorizedIssues || []; + const longTermIssues = categorizedIssues.filter(issue => issue.category === "Power Outage"); + + if (!longTermIssues.length) { + return reply.code(404).send({ message: "No Long Term Issues to process" }); + } + + const hardwareIds = []; + for (const issue of longTermIssues) { + if (issue.hardwareId) hardwareIds.push(issue.hardwareId); + if (Array.isArray(issue.hardwareIds)) hardwareIds.push(...issue.hardwareIds); + } + + if (!hardwareIds.length) { + return reply.code(404).send({ message: "No hardware IDs in Long Term Issues" }); + } + + // Fetch related sensors to get customerId + const sensors = await Insensors.find({ + $or: [ + { hardwareId: { $in: hardwareIds } }, + { connected_to: { $in: hardwareIds } } + ] + }).lean(); + + if (!sensors.length) { + return reply.code(404).send({ message: "No sensors found for Long Term hardware" }); + } + + const customerIds = [...new Set(sensors.map(s => s.customerId))]; + + const customers = await User.find({ customerId: { $in: customerIds } }).lean(); + + // Build map of movedAt per hardwareId + const movedAtMap = {}; + for (const issue of longTermIssues) { + const movedAt = issue.movedAt; + if (issue.hardwareId) movedAtMap[issue.hardwareId] = movedAt; + if (Array.isArray(issue.hardwareIds)) { + issue.hardwareIds.forEach(hid => { + movedAtMap[hid] = movedAt; + }); + } + } + + const uniqueCustomerMap = {}; + + for (const user of customers) { + const cid = user.customerId; + + if (!uniqueCustomerMap[cid]) { + const customerSensorHardwareIds = sensors + .filter(s => s.customerId === cid) + .map(s => s.hardwareId || s.tankhardwareId || s.connected_to) + .filter(Boolean); + + const movedTimes = customerSensorHardwareIds + .map(hid => movedAtMap[hid]) + .filter(Boolean) + .sort((a, b) => new Date(b) - new Date(a)); // Descending + + uniqueCustomerMap[cid] = { + customer: { + customerId: cid, + username: user.username || "", + firstName: user.profile?.firstName || user.firstName || "", + lastName: user.profile?.lastName || user.lastName || "", + phone: user.phone || user.profile?.contactNumber || user.alternativeNumber || "", + email: user.emails?.[0]?.email || user.email || "", + phoneVerified: user.phoneVerified || false, + address1: user.profile?.address1 || user.address1 || "", + address2: user.profile?.address2 || user.address2 || "", + city: user.profile?.city || user.city || "", + state: user.profile?.state || user.state || "", + country: user.profile?.country || user.country || "", + zip: user.profile?.zip || "", + notes: user.profile?.notes || "", + latitude: user.latitude || 0, + longitude: user.longitude || 0, + fcmIds: (user.fcmIds || []).filter(fcm => typeof fcm === "string" && fcm.startsWith("d")), + installationId: user.installationId || "", + notificationPreferences: { + allowNotifications: user.allowNotifications || false, + automaticStartAndStopNotify: user.automaticStartAndStopNotify || false, + manualStartAndStopNotify: user.manualStartAndStopNotify || false, + criticalLowWaterAlert: user.criticalLowWaterAlert || false, + lowWaterAlert: user.lowWaterAlert || false, + notificationPreference: user.notificationPreference || "never" + }, + surveyStatus: user.survey_status || "pending", + buildingName: user.buildingName || "", + stripePaymentStatus: user.stripePaymentStatus || false, + stripeSubscriptionStatus: user.stripeSubscriptionStatus || false, + createdAt: user.createdAt, + updatedAt: user.updatedAt, + movedAt: movedTimes[0] || null // ⏳ Latest movedAt + } + }; + } + } + + return reply.send({ + status_code: 200, + data: Object.values(uniqueCustomerMap) + }); + + } catch (error) { + console.error("❌ Error in getLongTermCustomerDetails:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + +exports.outDoorEscalationCustomerDetails = async (req, reply) => { + try { + const { supportId } = req.params; + + if (!supportId) { + return reply.code(400).send({ error: "supportId is required" }); + } + + const supportRecord = await Support.findOne({ supportId }).lean(); + if (!supportRecord) { + return reply.code(404).send({ message: "No support record found for this supportId" }); + } + + const categorizedIssues = supportRecord.categorizedIssues || []; + const longTermIssues = categorizedIssues.filter(issue => issue.category === "OutDoor Escalation"); + + if (!longTermIssues.length) { + return reply.code(404).send({ message: "No Long Term Issues to process" }); + } + + const hardwareIds = []; + for (const issue of longTermIssues) { + if (issue.hardwareId) hardwareIds.push(issue.hardwareId); + if (Array.isArray(issue.hardwareIds)) hardwareIds.push(...issue.hardwareIds); + } + + if (!hardwareIds.length) { + return reply.code(404).send({ message: "No hardware IDs in Long Term Issues" }); + } + + // Fetch related sensors to get customerId + const sensors = await Insensors.find({ + $or: [ + { hardwareId: { $in: hardwareIds } }, + { connected_to: { $in: hardwareIds } } + ] + }).lean(); + + if (!sensors.length) { + return reply.code(404).send({ message: "No sensors found for Long Term hardware" }); + } + + const customerIds = [...new Set(sensors.map(s => s.customerId))]; + + const customers = await User.find({ customerId: { $in: customerIds } }).lean(); + + // Build map of movedAt per hardwareId + const movedAtMap = {}; + for (const issue of longTermIssues) { + const movedAt = issue.movedAt; + if (issue.hardwareId) movedAtMap[issue.hardwareId] = movedAt; + if (Array.isArray(issue.hardwareIds)) { + issue.hardwareIds.forEach(hid => { + movedAtMap[hid] = movedAt; + }); + } + } + + const uniqueCustomerMap = {}; + + for (const user of customers) { + const cid = user.customerId; + + if (!uniqueCustomerMap[cid]) { + const customerSensorHardwareIds = sensors + .filter(s => s.customerId === cid) + .map(s => s.hardwareId || s.tankhardwareId || s.connected_to) + .filter(Boolean); + + const movedTimes = customerSensorHardwareIds + .map(hid => movedAtMap[hid]) + .filter(Boolean) + .sort((a, b) => new Date(b) - new Date(a)); // Descending + + uniqueCustomerMap[cid] = { + customer: { + customerId: cid, + username: user.username || "", + firstName: user.profile?.firstName || user.firstName || "", + lastName: user.profile?.lastName || user.lastName || "", + phone: user.phone || user.profile?.contactNumber || user.alternativeNumber || "", + email: user.emails?.[0]?.email || user.email || "", + phoneVerified: user.phoneVerified || false, + address1: user.profile?.address1 || user.address1 || "", + address2: user.profile?.address2 || user.address2 || "", + city: user.profile?.city || user.city || "", + state: user.profile?.state || user.state || "", + country: user.profile?.country || user.country || "", + zip: user.profile?.zip || "", + notes: user.profile?.notes || "", + latitude: user.latitude || 0, + longitude: user.longitude || 0, + fcmIds: (user.fcmIds || []).filter(fcm => typeof fcm === "string" && fcm.startsWith("d")), + installationId: user.installationId || "", + notificationPreferences: { + allowNotifications: user.allowNotifications || false, + automaticStartAndStopNotify: user.automaticStartAndStopNotify || false, + manualStartAndStopNotify: user.manualStartAndStopNotify || false, + criticalLowWaterAlert: user.criticalLowWaterAlert || false, + lowWaterAlert: user.lowWaterAlert || false, + notificationPreference: user.notificationPreference || "never" + }, + surveyStatus: user.survey_status || "pending", + buildingName: user.buildingName || "", + stripePaymentStatus: user.stripePaymentStatus || false, + stripeSubscriptionStatus: user.stripeSubscriptionStatus || false, + createdAt: user.createdAt, + updatedAt: user.updatedAt, + movedAt: movedTimes[0] || null // ⏳ Latest movedAt + } + }; + } + } + + return reply.send({ + status_code: 200, + data: Object.values(uniqueCustomerMap) + }); + + } catch (error) { + console.error("❌ Error in getLongTermCustomerDetails:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + +exports.getLongTermIssuesByCustomer = async (req, reply) => { + try { + const { supportId, customerId } = req.params; + + if (!supportId || !customerId) { + return reply.code(400).send({ error: "supportId and customerId are required in path params" }); + } + + const support = await Support.findOne({ supportId }).lean(); + if (!support) return reply.code(404).send({ message: "Support record not found" }); + + const longTermIssues = (support.categorizedIssues || []).filter(i => i.category === "LongTerm Issues"); + if (!longTermIssues.length) return reply.code(404).send({ message: "No Long Term Issues found" }); + + const hardwareIds = [...new Set(longTermIssues.map(i => i.hardwareId).filter(Boolean))]; + const sensors = await Insensors.find({ + customerId, + hardwareId: { $in: hardwareIds } + }).lean(); + + if (!sensors.length) return reply.code(404).send({ message: "No sensors found for this customer" }); + + const orders = await Order.find({ customerId }).lean(); + const orderMap = {}; + for (const order of orders) { + for (const conn of order.master_connections || []) { + const trimmedId = (conn.hardwareId || "").trim(); + if (trimmedId) { + orderMap[trimmedId] = { + masterName: conn.master_name?.trim() || "", + location: conn.location?.trim() || "" + }; + } + } + } + + const issueMap = {}; + for (const issue of longTermIssues) { + issueMap[issue.hardwareId] = issue; + } + + const disconnectedIssues = []; + for (const master of sensors.filter(s => s.type === "master")) { + const slaves = await Insensors.find({ connected_to: master.hardwareId, customerId }).lean(); + const latestIotData = await IotData.findOne({ hardwareId: master.hardwareId }).sort({ date: -1 }).lean(); + const now = moment.tz("Asia/Kolkata"); + + let gsmConnected = false; + if (latestIotData?.date) { + const gsmTime = moment.tz(latestIotData.date, "Asia/Kolkata"); + gsmConnected = now.diff(gsmTime, "minutes") <= 1; + } + + const slaveDetails = await Promise.all(slaves.map(async (slave) => { + const slaveHardwareId = slave.tankhardwareId?.trim(); + const matchedTank = latestIotData?.tanks?.find(tank => tank.tankhardwareId === slaveHardwareId); + + let loraConnected = false; + if (matchedTank?.date && matchedTank?.tankHeight !== "0") { + const tankTime = moment.tz(matchedTank.date, "Asia/Kolkata"); + loraConnected = now.diff(tankTime, "minutes") <= 1; + } + + const tankInfo = await Tank.findOne({ + $or: [ + { hardwareId: slaveHardwareId }, + { tankhardwareId: slaveHardwareId } + ] + }).lean(); + + const slaveComments = (support.comments || []).filter( + comment => comment.hardwareId === slave.hardwareId && comment.customerId === customerId + ).map(c => ({ + text: c.text, + commentsTime: c.createdAt ? moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") : null + })); + + return { + hardwareId: slave.tankhardwareId, + tankName: slave.tankName || "", + location: slave.tankLocation || "", + connected_status: loraConnected ? "connected" : "disconnected", + connected_to: slave.connected_to || "", + gsm_last_check_time: slave.gsm_last_check_time || null, + gsm_last_disconnect_time: slave.gsm_last_disconnect_time || null, + lora_last_disconnect_time: slave.lora_last_disconnect_time || null, + connected_gsm_date: slave.connected_gsm_date || "", + connected_gsm_time: slave.connected_gsm_time || "", + connected_lora_date: slave.connected_lora_date || "", + connected_lora_time: slave.connected_lora_time || "", + support_lora_last_check_time: slave.support_lora_last_check_time || null, + masterName: orderMap[master.hardwareId?.trim()]?.masterName || "", + type: "slave", + typeOfWater: tankInfo?.typeOfWater || "", + outDoor_status: slave.outDoor_status || "inprogress" + }; + })); + + const masterComments = (support.comments || []).filter( + comment => comment.hardwareId === master.hardwareId && comment.customerId === customerId + ).map(c => ({ + text: c.text, + commentsTime: c.createdAt ? moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") : null + })); + + const orderDetails = orderMap[master.hardwareId?.trim()] || {}; + const issue = issueMap[master.hardwareId]; + + disconnectedIssues.push({ + hardwareId: master.hardwareId, + masterName: orderDetails.masterName || "", + location: orderDetails.location || "", + type: "master", + connected_status: gsmConnected ? "connected" : "disconnected", + connected_slave_count: slaveDetails.length, + gsm_last_check_time: master.gsm_last_check_time || null, + gsm_last_disconnect_time: master.gsm_last_disconnect_time || null, + lora_last_disconnect_time: master.lora_last_disconnect_time || null, + connected_gsm_date: master.connected_gsm_date || "", + connected_gsm_time: master.connected_gsm_time || "", + connected_lora_date: master.connected_lora_date || "", + connected_lora_time: master.connected_lora_time || "", + support_gm_last_check_time: master.support_gsm_last_check_time || null, + connected_slaves: slaveDetails, + comments: masterComments, + outDoor_status: master.outDoor_status || "inprogress", + movedAt: issue?.movedAt || null, + resolvedAt: issue?.resolvedAt || null, + category: issue?.category || "Uncategorized", + hardwareList: master.hardwareList || {}, + assignedTo: issue?.assignedTo || null + }); + } + + return reply.send({ + status_code: 200, + supportId, + customerId, + totalMasters: disconnectedIssues.length, + disconnectedIssues + }); + + } catch (err) { + console.error("❌ Error in getLongTermIssuesByCustomer:", err); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + +exports.getPowerOutageIssuesByCustomer = async (req, reply) => { + try { + const { supportId, customerId } = req.params; + + if (!supportId || !customerId) { + return reply.code(400).send({ error: "supportId and customerId are required in path params" }); + } + + const support = await Support.findOne({ supportId }).lean(); + if (!support) return reply.code(404).send({ message: "Support record not found" }); + + const longTermIssues = (support.categorizedIssues || []).filter(i => i.category === "Power Outage"); + if (!longTermIssues.length) return reply.code(404).send({ message: "No Long Term Issues found" }); + + const hardwareIds = [...new Set(longTermIssues.map(i => i.hardwareId).filter(Boolean))]; + const sensors = await Insensors.find({ + customerId, + hardwareId: { $in: hardwareIds } + }).lean(); + + if (!sensors.length) return reply.code(404).send({ message: "No sensors found for this customer" }); + + const orders = await Order.find({ customerId }).lean(); + const orderMap = {}; + for (const order of orders) { + for (const conn of order.master_connections || []) { + const trimmedId = (conn.hardwareId || "").trim(); + if (trimmedId) { + orderMap[trimmedId] = { + masterName: conn.master_name?.trim() || "", + location: conn.location?.trim() || "" + }; + } + } + } + + const issueMap = {}; + for (const issue of longTermIssues) { + issueMap[issue.hardwareId] = issue; + } + + const disconnectedIssues = []; + for (const master of sensors.filter(s => s.type === "master")) { + const slaves = await Insensors.find({ connected_to: master.hardwareId, customerId }).lean(); + const latestIotData = await IotData.findOne({ hardwareId: master.hardwareId }).sort({ date: -1 }).lean(); + const now = moment.tz("Asia/Kolkata"); + + let gsmConnected = false; + if (latestIotData?.date) { + const gsmTime = moment.tz(latestIotData.date, "Asia/Kolkata"); + gsmConnected = now.diff(gsmTime, "minutes") <= 1; + } + + const slaveDetails = await Promise.all(slaves.map(async (slave) => { + const slaveHardwareId = slave.tankhardwareId?.trim(); + const matchedTank = latestIotData?.tanks?.find(tank => tank.tankhardwareId === slaveHardwareId); + + let loraConnected = false; + if (matchedTank?.date && matchedTank?.tankHeight !== "0") { + const tankTime = moment.tz(matchedTank.date, "Asia/Kolkata"); + loraConnected = now.diff(tankTime, "minutes") <= 1; + } + + const tankInfo = await Tank.findOne({ + $or: [ + { hardwareId: slaveHardwareId }, + { tankhardwareId: slaveHardwareId } + ] + }).lean(); + + const slaveComments = (support.comments || []).filter( + comment => comment.hardwareId === slave.hardwareId && comment.customerId === customerId + ).map(c => ({ + text: c.text, + commentsTime: c.createdAt ? moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") : null + })); + + return { + hardwareId: slave.tankhardwareId, + tankName: slave.tankName || "", + location: slave.tankLocation || "", + connected_status: loraConnected ? "connected" : "disconnected", + connected_to: slave.connected_to || "", + gsm_last_check_time: slave.gsm_last_check_time || null, + gsm_last_disconnect_time: slave.gsm_last_disconnect_time || null, + lora_last_disconnect_time: slave.lora_last_disconnect_time || null, + connected_gsm_date: slave.connected_gsm_date || "", + connected_gsm_time: slave.connected_gsm_time || "", + connected_lora_date: slave.connected_lora_date || "", + connected_lora_time: slave.connected_lora_time || "", + support_lora_last_check_time: slave.support_lora_last_check_time || null, + masterName: orderMap[master.hardwareId?.trim()]?.masterName || "", + type: "slave", + typeOfWater: tankInfo?.typeOfWater || "", + outDoor_status: slave.outDoor_status || "inprogress" + }; + })); + + const masterComments = (support.comments || []).filter( + comment => comment.hardwareId === master.hardwareId && comment.customerId === customerId + ).map(c => ({ + text: c.text, + commentsTime: c.createdAt ? moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") : null + })); + + const orderDetails = orderMap[master.hardwareId?.trim()] || {}; + const issue = issueMap[master.hardwareId]; + + disconnectedIssues.push({ + hardwareId: master.hardwareId, + masterName: orderDetails.masterName || "", + location: orderDetails.location || "", + type: "master", + connected_status: gsmConnected ? "connected" : "disconnected", + connected_slave_count: slaveDetails.length, + gsm_last_check_time: master.gsm_last_check_time || null, + gsm_last_disconnect_time: master.gsm_last_disconnect_time || null, + lora_last_disconnect_time: master.lora_last_disconnect_time || null, + connected_gsm_date: master.connected_gsm_date || "", + connected_gsm_time: master.connected_gsm_time || "", + connected_lora_date: master.connected_lora_date || "", + connected_lora_time: master.connected_lora_time || "", + support_gm_last_check_time: master.support_gsm_last_check_time || null, + connected_slaves: slaveDetails, + comments: masterComments, + outDoor_status: master.outDoor_status || "inprogress", + movedAt: issue?.movedAt || null, + resolvedAt: issue?.resolvedAt || null, + category: issue?.category || "Uncategorized", + hardwareList: master.hardwareList || {}, + assignedTo: issue?.assignedTo || null + }); + } + + return reply.send({ + status_code: 200, + supportId, + customerId, + totalMasters: disconnectedIssues.length, + disconnectedIssues + }); + + } catch (err) { + console.error("❌ Error in getLongTermIssuesByCustomer:", err); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + +exports.getOutDoorEscalationIssuesByCustomer = async (req, reply) => { + try { + const { supportId, customerId } = req.params; + + if (!supportId || !customerId) { + return reply.code(400).send({ error: "supportId and customerId are required in path params" }); + } + + const support = await Support.findOne({ supportId }).lean(); + if (!support) return reply.code(404).send({ message: "Support record not found" }); + + const longTermIssues = (support.categorizedIssues || []).filter(i => i.category === "OutDoor Escalation"); + if (!longTermIssues.length) return reply.code(404).send({ message: "No Long Term Issues found" }); + + const hardwareIds = [...new Set(longTermIssues.map(i => i.hardwareId).filter(Boolean))]; + const sensors = await Insensors.find({ + customerId, + hardwareId: { $in: hardwareIds } + }).lean(); + + if (!sensors.length) return reply.code(404).send({ message: "No sensors found for this customer" }); + + const orders = await Order.find({ customerId }).lean(); + const orderMap = {}; + for (const order of orders) { + for (const conn of order.master_connections || []) { + const trimmedId = (conn.hardwareId || "").trim(); + if (trimmedId) { + orderMap[trimmedId] = { + masterName: conn.master_name?.trim() || "", + location: conn.location?.trim() || "" + }; + } + } + } + + const issueMap = {}; + for (const issue of longTermIssues) { + issueMap[issue.hardwareId] = issue; + } + + const disconnectedIssues = []; + for (const master of sensors.filter(s => s.type === "master")) { + const slaves = await Insensors.find({ connected_to: master.hardwareId, customerId }).lean(); + const latestIotData = await IotData.findOne({ hardwareId: master.hardwareId }).sort({ date: -1 }).lean(); + const now = moment.tz("Asia/Kolkata"); + + let gsmConnected = false; + if (latestIotData?.date) { + const gsmTime = moment.tz(latestIotData.date, "Asia/Kolkata"); + gsmConnected = now.diff(gsmTime, "minutes") <= 1; + } + + const slaveDetails = await Promise.all(slaves.map(async (slave) => { + const slaveHardwareId = slave.tankhardwareId?.trim(); + const matchedTank = latestIotData?.tanks?.find(tank => tank.tankhardwareId === slaveHardwareId); + + let loraConnected = false; + if (matchedTank?.date && matchedTank?.tankHeight !== "0") { + const tankTime = moment.tz(matchedTank.date, "Asia/Kolkata"); + loraConnected = now.diff(tankTime, "minutes") <= 1; + } + + const tankInfo = await Tank.findOne({ + $or: [ + { hardwareId: slaveHardwareId }, + { tankhardwareId: slaveHardwareId } + ] + }).lean(); + + const slaveComments = (support.comments || []).filter( + comment => comment.hardwareId === slave.hardwareId && comment.customerId === customerId + ).map(c => ({ + text: c.text, + commentsTime: c.createdAt ? moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") : null + })); + + return { + hardwareId: slave.tankhardwareId, + tankName: slave.tankName || "", + location: slave.tankLocation || "", + connected_status: loraConnected ? "connected" : "disconnected", + connected_to: slave.connected_to || "", + gsm_last_check_time: slave.gsm_last_check_time || null, + gsm_last_disconnect_time: slave.gsm_last_disconnect_time || null, + lora_last_disconnect_time: slave.lora_last_disconnect_time || null, + connected_gsm_date: slave.connected_gsm_date || "", + connected_gsm_time: slave.connected_gsm_time || "", + connected_lora_date: slave.connected_lora_date || "", + connected_lora_time: slave.connected_lora_time || "", + support_lora_last_check_time: slave.support_lora_last_check_time || null, + masterName: orderMap[master.hardwareId?.trim()]?.masterName || "", + type: "slave", + typeOfWater: tankInfo?.typeOfWater || "", + outDoor_status: slave.outDoor_status || "inprogress" + }; + })); + + const masterComments = (support.comments || []).filter( + comment => comment.hardwareId === master.hardwareId && comment.customerId === customerId + ).map(c => ({ + text: c.text, + commentsTime: c.createdAt ? moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") : null + })); + + const orderDetails = orderMap[master.hardwareId?.trim()] || {}; + const issue = issueMap[master.hardwareId]; + + disconnectedIssues.push({ + hardwareId: master.hardwareId, + masterName: orderDetails.masterName || "", + location: orderDetails.location || "", + type: "master", + connected_status: gsmConnected ? "connected" : "disconnected", + connected_slave_count: slaveDetails.length, + gsm_last_check_time: master.gsm_last_check_time || null, + gsm_last_disconnect_time: master.gsm_last_disconnect_time || null, + lora_last_disconnect_time: master.lora_last_disconnect_time || null, + connected_gsm_date: master.connected_gsm_date || "", + connected_gsm_time: master.connected_gsm_time || "", + connected_lora_date: master.connected_lora_date || "", + connected_lora_time: master.connected_lora_time || "", + support_gm_last_check_time: master.support_gsm_last_check_time || null, + connected_slaves: slaveDetails, + comments: masterComments, + outDoor_status: master.outDoor_status || "inprogress", + movedAt: issue?.movedAt || null, + resolvedAt: issue?.resolvedAt || null, + category: issue?.category || "Uncategorized", + hardwareList: master.hardwareList || {}, + assignedTo: issue?.assignedTo || null + }); + } + + return reply.send({ + status_code: 200, + supportId, + customerId, + totalMasters: disconnectedIssues.length, + disconnectedIssues + }); + + } catch (err) { + console.error("❌ Error in getLongTermIssuesByCustomer:", err); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + +// const bcrypt = require("bcrypt"); + +exports.createTeamMemberSupport = async (req, reply) => { + try { + const { supportId } = req.params; + + const c_id = await generateTeamMemberId(); + const support_teamMemberId = `AWTMSU${c_id}`; + + const { + name, + phone, + installationTeamMemId, + password, + email, + alternativePhone, + status + } = req.body; + + if (!supportId || !support_teamMemberId || !name || !phone || !password) { + return reply.code(400).send({ error: "Missing required fields" }); + } + + const supportRecord = await Support.findOne({ supportId }); + + if (!supportRecord) { + return reply.code(404).send({ error: "Support record not found" }); + } + + const existingMember = supportRecord.team_member?.team_member.find( + member => member.phone === phone || member.support_teamMemberId === support_teamMemberId + ); + + if (existingMember) { + return reply.code(400).send({ error: "Team member with this phone or ID already exists" }); + } + + const hashedPassword = await bcrypt.hash(password, 10); + + const newTeamMember = { + support_teamMemberId, + name, + phone, + installationTeamMemId, + password: hashedPassword, + status: status || "active", + email: email || null, + alternativePhone: alternativePhone || null, + }; + + await Support.findOneAndUpdate( + { supportId }, + { + $push: { + "team_member.team_member": newTeamMember + }, + $set: { + updatedAt: new Date() + } + }, + { new: true } + ); + + return reply.send({ + status_code: 200, + message: "Team member added successfully", + teamMember: newTeamMember + }); + + } catch (error) { + console.error("Error adding team member:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + +exports.getAllTeamMembersListSupport = async (req, reply) => { + try { + const { supportId } = req.params; + + const support = await Support.findOne({ supportId }); + + if (!support) { + return reply.code(404).send({ error: "Support record not found" }); + } + + const teamMembers = support.team_member?.team_member || []; + + return reply.send({ + status_code: 200, + message: "Team members fetched successfully", + count: teamMembers.length, + teamMembers + }); + } catch (error) { + console.error("Error fetching team members:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +} +exports.updateTeamMemberSupport = async (req, reply) => { + try { + const { supportId, teamMemberId } = req.params; + const updateData = req.body; + + const support = await Support.findOne({ supportId }); + + if (!support) { + return reply.code(404).send({ error: "Support record not found" }); + } + + const teamMembers = support.team_member?.team_member || []; + const memberIndex = teamMembers.findIndex(m => m.support_teamMemberId === teamMemberId); + + if (memberIndex === -1) { + return reply.code(404).send({ error: "Team member not found" }); + } + + Object.assign(teamMembers[memberIndex], updateData); + + await Support.updateOne( + { supportId }, + { + $set: { + "team_member.team_member": teamMembers, + updatedAt: new Date() + } + } + ); + + return reply.send({ + status_code: 200, + message: "Team member updated successfully", + teamMember: teamMembers[memberIndex] + }); + } catch (error) { + console.error("Error updating team member:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + +exports.deleteTeamMemberSupport = async (req, reply)=> { + try { + const { supportId, teamMemberId } = req.params; + + const support = await Support.findOne({ supportId }); + + if (!support) { + return reply.code(404).send({ error: "Support record not found" }); + } + + const originalLength = support.team_member?.team_member.length || 0; + + const updatedTeam = support.team_member?.team_member.filter( + m => m.support_teamMemberId !== teamMemberId + ); + + if (originalLength === updatedTeam.length) { + return reply.code(404).send({ error: "Team member not found" }); + } + + await Support.updateOne( + { supportId }, + { + $set: { + "team_member.team_member": updatedTeam, + updatedAt: new Date() + } + } + ); + + return reply.send({ + status_code: 200, + message: "Team member deleted successfully" + }); + } catch (error) { + console.error("Error deleting team member:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +} + +exports.moveIssueToCategory = async (req, reply) => { + try { + const { supportId } = req.params; + const { category, hardwareId } = req.body; + + if (!supportId || !category || !hardwareId) { + return reply.code(400).send({ + message: "supportId (path), category and hardwareId (body) are required", + }); + } + + const support = await Support.findOne({ supportId }); + if (!support) { + return reply.code(404).send({ message: "Support record not found" }); + } + + let issueMoved = false; + const nowTime = moment().tz("Asia/Kolkata").format("YYYY-MM-DD HH:mm:ss"); + + // Ensure categorizedIssues exists + if (!Array.isArray(support.categorizedIssues)) { + support.categorizedIssues = []; + } + + // Step 1: Try moving from support.issues (original logic) + const index = support.issues.findIndex((issue) => { + if (issue.hardwareId === hardwareId) return true; + if (Array.isArray(issue.hardwareIds) && issue.hardwareIds.includes(hardwareId)) return true; + return false; + }); + + if (index !== -1) { + const issue = support.issues[index]; + + // Normalize type if needed + const normalizedType = + issue.type === "GSM or LoRa Disconnected" ? "GSM Disconnected" : issue.type; + + if (issue.hardwareId === hardwareId) { + // Master issue + issue.movedToCategory = true; + + support.categorizedIssues.push({ + type: normalizedType, + hardwareId: issue.hardwareId, + masterHardwareId: issue.masterHardwareId || issue.hardwareId, + category, + movedAt: nowTime, + movedToCategory: true, + }); + + support.issues.splice(index, 1); + issueMoved = true; + } else { + // Slave issue + const slaveIndex = issue.hardwareIds.indexOf(hardwareId); + if (slaveIndex !== -1) { + const slaveName = issue.slaveNames?.[slaveIndex] || "Unknown"; + + support.categorizedIssues.push({ + type: normalizedType, + hardwareId, + masterHardwareId: issue.masterHardwareId || issue.hardwareId, + slaveName, + category, + movedAt: nowTime, + movedToCategory: true, + }); + + issue.hardwareIds.splice(slaveIndex, 1); + issue.slaveNames.splice(slaveIndex, 1); + + if (issue.hardwareIds.length === 0) { + support.issues.splice(index, 1); + } + + issueMoved = true; + } + } + } + + // Step 2: If not found in support.issues, try to update existing categorized issue + if (!issueMoved) { + const categorized = support.categorizedIssues.find( + (i) => i.hardwareId === hardwareId + ); + + if (categorized) { + categorized.category = category; + categorized.movedAt = nowTime; + await support.save(); + return reply.send({ message: "Category updated for already categorized issue" }); + } + + return reply.code(404).send({ message: "No matching issue found to move or update" }); + } + + // Cleanup duplicates + support.issues = support.issues.filter((i) => { + const isMaster = i.hardwareId === hardwareId; + const isSlave = Array.isArray(i.hardwareIds) && i.hardwareIds.includes(hardwareId); + return !isMaster && !isSlave; + }); + + await support.save(); + return reply.send({ message: "Issue moved to category successfully" }); + } catch (err) { + console.error("Error moving issue:", err); + return reply.code(500).send({ error: "Internal Server Error" }); + } +}; + + + +exports.particularCategory = async (req, reply) => { + try { + const { supportId, category } = req.params; + const { customerId: queryCustomerId } = req.query; + + if (!supportId || !category) { + return reply.code(400).send({ error: "supportId and category are required" }); + } + + const support = await Support.findOne({ supportId }).lean(); + if (!support) return reply.code(404).send({ message: "Support record not found" }); + + const issues = (category === "Resolved" ? support.resolvedIssues : support.categorizedIssues || []) + .filter(issue => issue.category === category); + if (!issues.length) return reply.code(404).send({ message: `No issues found for category: ${category}` }); + + const hardwareIds = [...new Set(issues.map(issue => issue.hardwareId).filter(Boolean))]; + if (!hardwareIds.length) return reply.code(404).send({ message: "No hardware IDs found for these issues" }); + + let customerId = queryCustomerId; + if (!customerId) { + const sensorDoc = await Insensors.findOne({ hardwareId: { $in: hardwareIds } }).lean(); + if (!sensorDoc || !sensorDoc.customerId) return reply.code(404).send({ message: "Customer ID not found" }); + customerId = sensorDoc.customerId; + } + + const allRelatedSensors = await Insensors.find({ + customerId, + hardwareId: { $in: hardwareIds } + }).lean(); + + if (!allRelatedSensors.length) return reply.code(404).send({ message: "No sensors found" }); + + //const orders = await Order.find({ customerId }).lean(); +let orders = await Order.find({ customerId }).lean(); +if (!orders.length) { + const fallbackOrder = await Order.findOne({ "master_connections.hardwareId": { $in: hardwareIds } }).lean(); + if (fallbackOrder) { + orders = [fallbackOrder]; + console.log("⚠️ Used fallback Order based on hardwareId match"); + } +} + + const orderMap = {}; + orders.forEach(order => { + if (!Array.isArray(order.master_connections)) return; + order.master_connections.forEach(conn => { + const trimmedId = (conn.hardwareId || "").trim(); + if (!trimmedId) return; + orderMap[trimmedId] = { + masterName: conn.master_name?.trim() || "", + location: conn.location?.trim() || "" + }; + }); + }); + + console.log("🧭 Mapped orderMap keys:", Object.keys(orderMap)); + + const issueMap = {}; + issues.forEach(issue => { + issueMap[issue.hardwareId] = issue; + }); + + const disconnectedIssues = []; + const allMasters = allRelatedSensors.filter(i => i.type === "master"); + + for (const master of allMasters) { + const slaves = await Insensors.find({ connected_to: master.hardwareId, customerId }).lean(); + const latestIotData = await IotData.findOne({ hardwareId: master.hardwareId }).sort({ date: -1 }).lean(); + + const now = moment.tz("Asia/Kolkata"); + let gsmConnected = false; + + if (latestIotData?.date) { + const gsmTime = moment.tz(latestIotData.date, "Asia/Kolkata"); + gsmConnected = now.diff(gsmTime, "minutes") <= 1; + } + + await Insensors.updateOne( + { hardwareId: master.hardwareId }, + { $set: { connected_status: gsmConnected ? "connected" : "disconnected" } } + ); + + const slaveDetails = await Promise.all(slaves.map(async (slave) => { + const slaveHardwareId = slave.tankhardwareId?.trim(); + const matchedTank = latestIotData?.tanks?.find(tank => tank.tankhardwareId === slaveHardwareId); + + let loraConnected = false; + if (matchedTank?.date && matchedTank?.tankHeight !== "0") { + const tankTime = moment.tz(matchedTank.date, "Asia/Kolkata"); + loraConnected = now.diff(tankTime, "minutes") <= 1; + } + + await Insensors.updateOne( + { tankhardwareId: slaveHardwareId }, + { $set: { connected_status: loraConnected ? "connected" : "disconnected" } } + ); + + const tankInfo = await Tank.findOne({ + $or: [ + { hardwareId: slaveHardwareId }, + { tankhardwareId: slaveHardwareId } + ] + }).lean(); + + const slaveComments = (support.comments || []) + .filter(comment => + comment.hardwareId === slave.hardwareId && + comment.customerId === customerId + ) + .map(c => ({ + text: c.text, + commentsTime: c.createdAt + ? moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") + : null + })); + + return { + hardwareId: slave.tankhardwareId, + tankName: slave.tankName || "", + location: slave.tankLocation || "", + connected_status: loraConnected ? "connected" : "disconnected", + connected_to: slave.connected_to || "", + gsm_last_check_time: slave.gsm_last_check_time || null, + gsm_last_disconnect_time: slave.gsm_last_disconnect_time || null, + lora_last_disconnect_time: slave.lora_last_disconnect_time || null, + connected_gsm_date: slave.connected_gsm_date || "", + connected_gsm_time: slave.connected_gsm_time || "", + connected_lora_date: slave.connected_lora_date || "", + connected_lora_time: slave.connected_lora_time || "", + support_lora_last_check_time: slave.support_lora_last_check_time || null, + masterName: orderMap[master.hardwareId?.trim()]?.masterName || "", + type: "slave", + typeOfWater: tankInfo?.typeOfWater || "", + outDoor_status: slave.outDoor_status || "inprogress" + }; + })); + + const masterComments = (support.comments || []) + .filter(comment => + comment.hardwareId === master.hardwareId && + comment.customerId === customerId + ) + .map(c => ({ + text: c.text, + call_status: c.call_status, + call_time: c.call_time, + commentsTime: c.createdAt + ? moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") + : null + })); + + + + + const trimmedMasterId = (master.hardwareId || "").trim(); + const orderDetails = orderMap[trimmedMasterId] || {}; + console.log("📦 Resolved orderDetails for", trimmedMasterId, ":", orderDetails); + + const issue = issueMap[master.hardwareId]; + + disconnectedIssues.push({ + hardwareId: master.hardwareId, + masterName: orderDetails.masterName || "", + location: orderDetails.location || "", + type: "master", + connected_status: gsmConnected ? "connected" : "disconnected", + connected_slave_count: slaveDetails.length, + gsm_last_check_time: master.gsm_last_check_time || null, + gsm_last_disconnect_time: master.gsm_last_disconnect_time || null, + lora_last_disconnect_time: master.lora_last_disconnect_time || null, + connected_gsm_date: master.connected_gsm_date || "", + connected_gsm_time: master.connected_gsm_time || "", + connected_lora_date: master.connected_lora_date || "", + connected_lora_time: master.connected_lora_time || "", + support_gm_last_check_time: master.support_gsm_last_check_time || null, + connected_slaves: slaveDetails, + comments: masterComments, + // callRecord: masterCallRecords, + outDoor_status: master.outDoor_status || "inprogress", + movedAt: category !== "Resolved" ? (issue?.movedAt || null) : null, + resolvedAt: category === "Resolved" ? (issue?.resolvedAt || null) : null, + category: issue?.category || category, + hardwareList: master.hardwareList || {}, + assignedTo: issue?.assignedTo || null + }); + } + + return reply.send({ + status_code: 200, + supportId, + customerId, + totalMasters: disconnectedIssues.length, + disconnectedIssues + }); + + } catch (err) { + console.error("❌ Error in particularCategory:", err); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + +exports.sendToStoreHardwareList = async (req, reply) => { + try { + const { supportId, customerId } = req.params; + const { storeId, type } = req.body; + + if (!supportId || !customerId) { + return reply.code(400).send({ error: "supportId and customerId are required in path params" }); + } + + const support = await Support.findOne({ supportId }).lean(); + if (!support) return reply.code(404).send({ message: "Support record not found" }); + + const issues = [...(support.categorizedIssues || []), ...(support.resolvedIssues || [])]; + if (!issues.length) return reply.code(404).send({ message: "No issues found for this support ID" }); + + const hardwareIds = [...new Set(issues.map(issue => issue.hardwareId).filter(Boolean))]; + if (!hardwareIds.length) return reply.code(404).send({ message: "No hardware IDs found for these issues" }); + + const allRelatedSensors = await Insensors.find({ + customerId, + hardwareId: { $in: hardwareIds } + }).lean(); + + if (!allRelatedSensors.length) return reply.code(404).send({ message: "No sensors found" }); + + let orders = []; + if (storeId) { + orders = await Order.find({ customerId, storeId }).lean(); + } else { + orders = await Order.find({ customerId }).lean(); + } + + if (!orders.length) { + const fallbackOrder = await Order.findOne({ "master_connections.hardwareId": { $in: hardwareIds } }).lean(); + if (fallbackOrder) orders = [fallbackOrder]; + } + + const orderMap = {}; + orders.forEach(order => { + (order.master_connections || []).forEach(conn => { + const trimmedId = (conn.hardwareId || "").trim(); + if (trimmedId) { + orderMap[trimmedId] = { + masterName: conn.master_name?.trim() || "", + location: conn.location?.trim() || "" + }; + } + }); + }); + + const issueMap = {}; + issues.forEach(issue => { + issueMap[issue.hardwareId] = issue; + }); + + const disconnectedIssues = []; + const allMasters = allRelatedSensors.filter(i => i.type === "master"); + + for (const master of allMasters) { + const slaves = await Insensors.find({ connected_to: master.hardwareId, customerId }).lean(); + const latestIotData = await IotData.findOne({ hardwareId: master.hardwareId }).sort({ date: -1 }).lean(); + const now = moment.tz("Asia/Kolkata"); + + let gsmConnected = false; + if (latestIotData?.date) { + const gsmTime = moment.tz(latestIotData.date, "Asia/Kolkata"); + gsmConnected = now.diff(gsmTime, "minutes") <= 1; + } + + await Insensors.updateOne( + { hardwareId: master.hardwareId.trim() }, + { + $set: { + connected_status: gsmConnected ? "connected" : "disconnected", + outDoor_status: "inprogress at store" + } + } + ); + + const updatedMaster = await Insensors.findOne({ hardwareId: master.hardwareId.trim() }).lean(); + + const slaveDetails = await Promise.all(slaves.map(async (slave) => { + const slaveHardwareId = slave.tankhardwareId?.trim(); + const matchedTank = latestIotData?.tanks?.find(tank => tank.tankhardwareId === slaveHardwareId); + + let loraConnected = false; + if (matchedTank?.date && matchedTank?.tankHeight !== "0") { + const tankTime = moment.tz(matchedTank.date, "Asia/Kolkata"); + loraConnected = now.diff(tankTime, "minutes") <= 1; + } + + await Insensors.updateOne( + { tankhardwareId: slaveHardwareId }, + { + $set: { + connected_status: loraConnected ? "connected" : "disconnected", + outDoor_status: "inprogress at store" + } + } + ); + + const updatedSlave = await Insensors.findOne({ tankhardwareId: slaveHardwareId }).lean(); + + const tankInfo = await Tank.findOne({ + $or: [ + { hardwareId: slaveHardwareId }, + { tankhardwareId: slaveHardwareId } + ] + }).lean(); + + return { + hardwareId: updatedSlave.tankhardwareId, + tankName: updatedSlave.tankName || "", + location: updatedSlave.tankLocation || "", + connected_status: updatedSlave.connected_status, + connected_to: updatedSlave.connected_to || "", + gsm_last_check_time: updatedSlave.gsm_last_check_time || null, + gsm_last_disconnect_time: updatedSlave.gsm_last_disconnect_time || null, + lora_last_disconnect_time: updatedSlave.lora_last_disconnect_time || null, + connected_gsm_date: updatedSlave.connected_gsm_date || "", + connected_gsm_time: updatedSlave.connected_gsm_time || "", + connected_lora_date: updatedSlave.connected_lora_date || "", + connected_lora_time: updatedSlave.connected_lora_time || "", + support_lora_last_check_time: updatedSlave.support_lora_last_check_time || null, + masterName: orderMap[master.hardwareId?.trim()]?.masterName || "", + type: "slave", + typeOfWater: tankInfo?.typeOfWater || "", + outDoor_status: updatedSlave.outDoor_status + }; + })); + + const masterComments = (support.comments || []).filter( + comment => comment.hardwareId === master.hardwareId && comment.customerId === customerId + ).map(c => ({ + text: c.text, + commentsTime: c.createdAt ? moment(c.createdAt).tz("Asia/Kolkata").format("DD-MM-YYYY HH:mm") : null + })); + + const trimmedMasterId = (master.hardwareId || "").trim(); + const orderDetails = orderMap[trimmedMasterId] || {}; + const issue = issueMap[master.hardwareId]; + + disconnectedIssues.push({ + hardwareId: updatedMaster.hardwareId, + masterName: orderDetails.masterName || "", + location: orderDetails.location || "", + type: "master", + connected_status: updatedMaster.connected_status, + connected_slave_count: slaveDetails.length, + gsm_last_check_time: updatedMaster.gsm_last_check_time || null, + gsm_last_disconnect_time: updatedMaster.gsm_last_disconnect_time || null, + lora_last_disconnect_time: updatedMaster.lora_last_disconnect_time || null, + connected_gsm_date: updatedMaster.connected_gsm_date || "", + connected_gsm_time: updatedMaster.connected_gsm_time || "", + connected_lora_date: updatedMaster.connected_lora_date || "", + connected_lora_time: updatedMaster.connected_lora_time || "", + support_gm_last_check_time: updatedMaster.support_gsm_last_check_time || null, + connected_slaves: slaveDetails, + comments: masterComments, + outDoor_status: updatedMaster.outDoor_status, + movedAt: issue?.movedAt || null, + resolvedAt: issue?.resolvedAt || null, + category: issue?.category || "Uncategorized", + hardwareList: updatedMaster.hardwareList || {}, + assignedTo: issue?.assignedTo || null + }); + } + + const updatedHardwareIds = disconnectedIssues.map(item => item.hardwareId); + await Order.updateMany( + { + customerId, + "master_connections.hardwareId": { $in: updatedHardwareIds } + }, + { + $set: { + storeId, + type: type + } + } + ); + + return reply.send({ + status_code: 200, + supportId, + customerId, + storeId, + totalMasters: disconnectedIssues.length, + disconnectedIssues + }); + + } catch (err) { + console.error("❌ Error in sendToStoreHardwareList:", err); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + + +exports.updateHardwareList = async (req, reply) => { + try { + const { supportId, customerId, hardwareId } = req.params; + const { hardwareList } = req.body; + + if (!supportId || !customerId || !hardwareId) { + return reply.code(400).send({ error: "supportId, customerId, and hardwareId are required" }); + } + + if (!Array.isArray(hardwareList)) { + return reply.code(400).send({ error: "hardwareList must be an array of objects" }); + } + + const support = await Support.findOne({ supportId }).lean(); + if (!support) return reply.code(404).send({ error: "Support record not found" }); + + const escalationIssue = (support.categorizedIssues || []).find(issue => + issue.category === "OutDoor Escalation" && issue.hardwareId === hardwareId + ); + + if (!escalationIssue) { + return reply.code(403).send({ + error: "Escalation issue not found. Cannot update hardware list." + }); + } + + const sensor = await Insensors.findOne({ customerId, hardwareId }); + if (!sensor) { + return reply.code(404).send({ error: "Insensor not found" }); + } + + sensor.hardwareList = hardwareList; + sensor.markModified('hardwareList'); + await sensor.save(); + + return reply.send({ + status_code: 200, + message: "Hardware list updated successfully", + data: { + supportId, + customerId, + hardwareId, + hardwareList: sensor.hardwareList // ← array format + } +}); + + } catch (err) { + console.error("Error updating hardwareList:", err); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + + + + + + + +exports.assignCategorizeIssue = async (request, reply) => { + const { supportId } = request.params; + const { support_teamMemberId, category, masterHardwareId } = request.body; + + if (!support_teamMemberId || !category || !masterHardwareId) { + return reply.code(400).send({ + error: 'support_teamMemberId, category, and masterHardwareId are required' + }); + } + + const support = await Support.findOne({ supportId }); + if (!support) { + return reply.code(404).send({ error: 'Support record not found' }); + } + + const teamMembers = support.team_member?.team_member || []; + const teamMember = teamMembers.find(m => m.support_teamMemberId === support_teamMemberId); + + if (!teamMember) { + return reply.code(400).send({ error: `Team member ID ${support_teamMemberId} not found` }); + } + + const assignedAt = moment().format("DD-MM-YYYY HH:mm:ss"); +const assignmentCode = Math.floor(100000 + Math.random() * 900000).toString(); // random 6-digit code + let assignedCount = 0; + + support.categorizedIssues.forEach(issue => { + if ( + issue.masterHardwareId === masterHardwareId && + issue.category === category + ) { + issue.assignedTo = { + name: teamMember.name, + support_teamMemberId: teamMember.support_teamMemberId, + phone: teamMember.phone, + email: teamMember.email, + assignedAt: assignedAt, + assignmentCode: assignmentCode // ← Add this field + }; + assignedCount++; + } + }); + + if (assignedCount === 0) { + return reply.code(404).send({ message: 'No matching issues found for assignment' }); + } + + await support.save(); + + return reply.send({ + message: `Assigned ${assignedCount} categorized issue(s) to ${teamMember.name}`, + assignmentCode: assignmentCode, // ← Return the code in response + assignedTo: { + support_teamMemberId: teamMember.support_teamMemberId, + name: teamMember.name, + phone: teamMember.phone, + email: teamMember.email, + assignedAt: assignedAt, + } + }); +}; + + + + + +exports.getCategorizedIssue = async (request, reply) => { + try { + const { support_teamMemberId, customerId } = request.params; + + if (!support_teamMemberId || !customerId) { + return reply.code(400).send({ error: "support_teamMemberId and customerId are required" }); + } + + const supportRecords = await Support.find({ + 'categorizedIssues.assignedTo.support_teamMemberId': support_teamMemberId + }).lean(); + + if (!supportRecords.length) { + return reply.code(404).send({ message: 'No categorized issues assigned to this team member.' }); + } + + const allIssues = supportRecords.flatMap(s => + s.categorizedIssues + .filter(i => i.assignedTo?.support_teamMemberId === support_teamMemberId) + .map(i => ({ ...i, supportId: s.supportId })) + ); + + if (!allIssues.length) { + return reply.code(404).send({ message: 'No categorized issues found for this team member.' }); + } + + const supportId = allIssues[0].supportId; + const hardwareIds = allIssues.map(i => i.hardwareId).filter(Boolean); + const masterHardwareIds = allIssues.map(i => i.masterHardwareId).filter(Boolean); + + const insensors = await Insensors.find({ + customerId, + $or: [ + { hardwareId: { $in: hardwareIds } }, + { hardwareId: { $in: masterHardwareIds } } + ], + connected_status: "disconnected" + }).lean(); + + if (!insensors.length) { + return reply.code(404).send({ message: "No disconnected devices found for this customer and team member." }); + } + + const orders = await Order.find({ customerId }).lean(); + const orderMap = {}; + orders.forEach(order => { + order.master_connections?.forEach(conn => { + orderMap[conn.hardwareId] = { + masterName: conn.master_name || null, + location: conn.location || null + }; + }); + }); + + const formatDateIfValid = (value) => { + const date = moment(value, "DD-MM-YYYY HH:mm:ss", true); + return date.isValid() ? date.format("DD-MM-YYYY HH:mm:ss") : null; + }; + + const formatDateIfValidShort = (date, time) => { + if (!date || !time) return null; + const dateTime = `${date} ${time}`; + const parsed = moment(dateTime, "DD-MM-YYYY HH:mm:ss", true); + return parsed.isValid() ? parsed.format("DD-MM-YYYY HH:mm:ss") : null; + }; + + const disconnectedIssues = []; + const masters = insensors.filter(d => d.type === "master"); + + for (const master of masters) { + const masterIssue = allIssues.find(i => i.masterHardwareId === master.hardwareId); + if (!masterIssue) continue; + + const slaves = await Insensors.find({ + customerId, + connected_to: master.hardwareId, + connected_status: "disconnected", + type: "slave" + }).lean(); + + const slaveDetails = []; + + for (const slave of slaves) { + const issue = allIssues.find(i => + i.hardwareId === slave.hardwareId || + (Array.isArray(i.hardwareIds) && i.hardwareIds.includes(slave.hardwareId)) + ); + + slaveDetails.push({ + hardwareId: slave.tankhardwareId, + tankName: slave.tankName || "", + location: slave.location || "", + connected_status: slave.connected_status, + team_member_support_lora_last_check_time: slave.team_member_support_lora_last_check_time || null, + lora_last_disconnect_time: formatDateIfValid(slave.lora_last_disconnect_time) || + formatDateIfValidShort(slave.connected_lora_date, slave.connected_lora_time), + connected_to: slave.connected_to || "", + masterName: orderMap[master.hardwareId]?.masterName || "", + type: "slave", + typeOfWater: "", + support_lora_last_check_time: null, + category: issue?.category || "", + startDate: issue?.assignedTo?.startDate + ? moment(issue.assignedTo.startDate).format("YYYY-MM-DD HH:mm:ss") + : null, + endDate: issue?.assignedTo?.endDate + ? moment(issue.assignedTo.endDate).format("YYYY-MM-DD HH:mm:ss") + : null + }); + } + + disconnectedIssues.push({ + hardwareId: master.hardwareId, + masterName: orderMap[master.hardwareId]?.masterName || "", + location: orderMap[master.hardwareId]?.location || "", + type: "master", + connected_status: master.connected_status, + gsm_last_disconnect_time: formatDateIfValid(master.gsm_last_disconnect_time) || + formatDateIfValidShort(master.connected_gsm_date, master.connected_gsm_time), + support_gsm_last_check_time: null, + team_member_support_gsm_last_check_time: master.team_member_support_gsm_last_check_time || null, + connected_slave_count: slaveDetails.length, + connected_slaves: slaveDetails, + category: masterIssue?.category || "", + startDate: masterIssue?.assignedTo?.startDate + ? moment(masterIssue.assignedTo.startDate).format("YYYY-MM-DD HH:mm:ss") + : null, + endDate: masterIssue?.assignedTo?.endDate + ? moment(masterIssue.assignedTo.endDate).format("YYYY-MM-DD HH:mm:ss") + : null + }); + } + + return reply.send({ + status_code: 200, + supportId, + totalMasters: disconnectedIssues.length, + disconnectedIssues + }); + + } catch (error) { + console.error("Error in getCategorizedIssue:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + + +exports.StatusTeamMember = async (request, reply) => { + try { + const { support_teamMemberId } = request.params; + const { status } = request.body; + + if ( !support_teamMemberId || !status) { + return reply.code(400).send({ + success: false, + message: " support_teamMemberId, and status are required." + }); + } + + const result = await Support.findOneAndUpdate( + { 'team_member.team_member.support_teamMemberId': support_teamMemberId }, + { $set: { 'team_member.team_member.$.status': status } }, + { new: true } + ); + + if (!result) { + return reply.code(404).send({ + success: false, + message: "Team member not found with given supportId and support_teamMemberId." + }); + } + + return reply.send({ + success: true, + message: "Team member status updated successfully.", + updatedMember: result.team_member.team_member.find( + tm => tm.support_teamMemberId === support_teamMemberId + ) + }); + + } catch (error) { + console.error("Error updating team member status:", error); + return reply.code(500).send({ + success: false, + message: "Internal Server Error" + }); + } +}; + + +exports.updateComments = async (req, reply) => { + try { + const { supportId } = req.params; + const { comments, customerId, hardwareId, call_status, call_time } = req.body; + + if (!supportId || !customerId || !hardwareId ) { + return reply.code(400).send({ error: "supportId, customerId, hardwareId are required" }); + } + + const trimmedComment = typeof comments === "string" ? comments.trim() : ""; + if (!trimmedComment) { + return reply.code(400).send({ error: "comments must be a non-empty string" }); + } + + // Step 1: Validate sensor + const sensor = await Insensors.findOne({ + customerId, + $or: [{ hardwareId }, { tankhardwareId: hardwareId }] + }).lean(); + + if (!sensor) { + return reply.code(404).send({ error: "No sensor found with this hardwareId for this customerId" }); + } + + // Step 2: Load support record + const supportRecord = await Support.findOne({ supportId }); + if (!supportRecord) { + return reply.code(404).send({ error: "Support record not found" }); + } + + // Step 3: Check whether the hardwareId exists in issues or categorizedIssues + const issueExists = + supportRecord.issues?.some( + (issue) => issue.hardwareId === hardwareId || issue.masterHardwareId === hardwareId + ) || + supportRecord.categorizedIssues?.some( + (issue) => issue.hardwareId === hardwareId || issue.masterHardwareId === hardwareId + ); + + if (!issueExists) { + return reply.code(404).send({ error: "HardwareId not found in issues or categorizedIssues for this support" }); + } + + // Step 4: Add comment + const commentObj = { + text: trimmedComment, + call_status: call_status?.trim() || "Call not tried", + call_time, + customerId, + hardwareId, + createdAt: new Date() + }; + + supportRecord.comments = supportRecord.comments || []; + supportRecord.comments.push(commentObj); + + + + // Save support record + await supportRecord.save(); + + return reply.send({ + message: "Comment and call record added successfully", + comment: { + ...commentObj, + createdAt: moment(commentObj.createdAt).format("DD-MM-YYYY HH:mm") + } + }); + } catch (error) { + console.error("Error updating comments/callRecord:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; + + + + +exports.resolvedIssuesForSupport = async (req, reply) => { + try { + const { supportId } = req.params; + const { category, hardwareId } = req.body; + + if (!supportId || !category || !hardwareId) { + return reply.code(400).send({ + message: "supportId (path), category and hardwareId (body) are required", + }); + } + + if (!["OutDoor Escalation", "LongTerm Issues"].includes(category)) { + return reply.code(400).send({ + message: "Invalid category. Only 'Escalation' or 'Pending' are allowed", + }); + } + + const support = await Support.findOne({ supportId }); + if (!support) { + return reply.code(404).send({ message: "Support record not found" }); + } + + if (!Array.isArray(support.categorizedIssues)) support.categorizedIssues = []; + if (!Array.isArray(support.resolvedIssues)) support.resolvedIssues = []; + + const nowTime = moment().tz("Asia/Kolkata").format("YYYY-MM-DD HH:mm:ss"); + + // Try to find issue in categorizedIssues + let categorizedIndex = support.categorizedIssues.findIndex( + (issue) => issue.hardwareId === hardwareId + ); + + // Try to find issue in resolvedIssues + let resolvedIndex = support.resolvedIssues.findIndex( + (issue) => issue.hardwareId === hardwareId + ); + + // Get the issue object from categorized or resolved + let issue = + categorizedIndex !== -1 + ? support.categorizedIssues[categorizedIndex] + : resolvedIndex !== -1 + ? support.resolvedIssues[resolvedIndex] + : null; + + if (!issue) { + return reply.code(404).send({ + message: `No issue found with hardwareId: ${hardwareId}`, + }); + } + + const masterHardwareId = issue.masterHardwareId || issue.hardwareId; + + // Fetch master and slaves + const master = await Insensors.findOne({ hardwareId: masterHardwareId }).lean(); + if (!master) { + return reply + .code(404) + .send({ message: `Master device not found with hardwareId ${masterHardwareId}` }); + } + + const slaves = await Insensors.find({ connected_to: masterHardwareId }).lean(); + + const allConnected = + master.connected_status === "connected" && + slaves.every((s) => s.connected_status === "connected"); + + if (allConnected) { + // If fully connected + + if (resolvedIndex === -1) { + // Add to resolvedIssues only if not already present + support.resolvedIssues.push({ + type: issue.type, + hardwareId: issue.hardwareId, + masterHardwareId, + //ticketId: issue.ticketId, + category: "Resolved", + resolvedAt: nowTime, + originalMovedAt: issue.movedAt || null, + movedToCategory: true, + slaveName: issue.slaveName || null, + currentlyResolved: true, + invalidatedAt: null, + }); + } else { + // If already in resolvedIssues, just mark as currently resolved again + support.resolvedIssues[resolvedIndex].currentlyResolved = true; + support.resolvedIssues[resolvedIndex].invalidatedAt = null; + support.resolvedIssues[resolvedIndex].resolvedAt = nowTime; + } + + // Remove from categorizedIssues if present + if (categorizedIndex !== -1) { + support.categorizedIssues.splice(categorizedIndex, 1); + } + + await support.save(); + return reply.send({ message: "Issue moved to resolved category successfully" }); + } else { + // One or more devices still disconnected + + // Mark in resolvedIssues as not currently resolved, don't remove it + if (resolvedIndex !== -1) { + support.resolvedIssues[resolvedIndex].currentlyResolved = false; + support.resolvedIssues[resolvedIndex].invalidatedAt = nowTime; + } + + // Move or update issue in categorizedIssues + if (categorizedIndex === -1) { + // Not in categorized — add with category from body + support.categorizedIssues.push({ + type: issue.type, + hardwareId: issue.hardwareId, + masterHardwareId, + //ticketId: issue.ticketId, + category: category, + movedAt: nowTime, + movedToCategory: true, + slaveName: issue.slaveName || null, + }); + } else { + // Already in categorized — update if category has changed + if (support.categorizedIssues[categorizedIndex].category !== category) { + support.categorizedIssues[categorizedIndex].category = category; + support.categorizedIssues[categorizedIndex].movedAt = nowTime; + support.categorizedIssues[categorizedIndex].movedToCategory = true; + } + } + + await support.save(); + return reply.send({ + message: `Master or some slaves are disconnected. Issue moved/kept in '${category}' category.`, + }); + } + } catch (err) { + console.error("Error in resolvedIssuesForSupport:", err); + return reply.code(500).send({ error: "Internal Server Error" }); + } +}; + + +exports.resolveIssueIfAllConnected = async (req, reply) => { + try { + const { supportId } = req.params; + const { hardwareId, reason, category } = req.body; + + if (!supportId || !hardwareId || !reason) { + return reply.code(400).send({ + message: "supportId (path), hardwareId and reason (body) are required", + }); + } + + const support = await Support.findOne({ supportId }); + if (!support) { + return reply.code(404).send({ message: "Support record not found" }); + } + + if (!Array.isArray(support.categorizedIssues)) support.categorizedIssues = []; + if (!Array.isArray(support.resolvedIssues)) support.resolvedIssues = []; + + const categorizedIndex = support.categorizedIssues.findIndex( + (issue) => issue.hardwareId === hardwareId + ); + + if (categorizedIndex === -1) { + return reply.code(404).send({ + message: `No categorized issue found for hardwareId: ${hardwareId}`, + }); + } + + const issue = support.categorizedIssues[categorizedIndex]; + const masterHardwareId = issue.masterHardwareId || issue.hardwareId; + + const master = await Insensors.findOne({ hardwareId: masterHardwareId }).lean(); + if (!master) { + return reply + .code(404) + .send({ message: `Master device not found with hardwareId ${masterHardwareId}` }); + } + + const slaves = await Insensors.find({ connected_to: masterHardwareId }).lean(); + + const allConnected = + master.connected_status === "connected" && + slaves.every((s) => s.connected_status === "connected"); + + if (!allConnected) { + return reply.code(400).send({ + message: "Not all devices are connected. Cannot resolve issue.", + }); + } + + const nowTime = moment().tz("Asia/Kolkata").format("YYYY-MM-DD HH:mm:ss"); + + support.resolvedIssues.push({ + type: issue.type, + hardwareId: issue.hardwareId, + masterHardwareId, + category: category || "Resolved", // ✅ use passed category or fallback to "Resolved" + resolvedAt: nowTime, + originalMovedAt: issue.movedAt || null, + movedToCategory: true, + slaveName: issue.slaveName || null, + currentlyResolved: true, + invalidatedAt: null, + reason: reason, + }); + + support.categorizedIssues.splice(categorizedIndex, 1); + + await support.save(); + + return reply.send({ message: "Issue resolved and moved to resolvedIssues", resolvedAt: nowTime }); + + } catch (err) { + console.error("Error resolving issue:", err); + return reply.code(500).send({ error: "Internal Server Error" }); + } +}; + + + +exports.createRepairOrder = async (req, reply) => { + try { + const { supportId, customerId } = req.params; + const { storeId, replacements, status } = req.body; + + // Validate required path params + if (!supportId || !customerId) { + return reply.code(400).send({ error: "supportId and customerId are required in path params" }); + } + + // Validate replacements + if (!Array.isArray(replacements) || replacements.length === 0) { + return reply.code(400).send({ error: "Replacements array is required in body and must contain at least one item" }); + } + + for (const r of replacements) { + if (!["master", "slave", "sensor"].includes(r.type) || !r.oldHardwareId || !r.newHardwareId) { + return reply.code(400).send({ error: "Each replacement must have valid type, oldHardwareId, and newHardwareId" }); + } + } + + + // Create the repair order + const newRepairOrder = await Repairorder.create({ + customerId, + supportId, + storeId, + status: status || "pending", + replacements + }); + + return reply.send({ + status_code: 201, + message: "Repair order created successfully", + repairOrder: newRepairOrder + }); + + } catch (error) { + console.error("Error creating repair order:", error); + return reply.code(500).send({ error: "Internal server error" }); + } +}; \ No newline at end of file diff --git a/src/controllers/storeController.js b/src/controllers/storeController.js index 9d35024b..bf2d80db 100644 --- a/src/controllers/storeController.js +++ b/src/controllers/storeController.js @@ -1,5 +1,7 @@ const boom = require("boom"); -const bcrypt = require('bcrypt'); +//const bcrypt = require('bcrypt'); +const bcrypt = require('bcryptjs'); + const jwt = require('jsonwebtoken'); const customJwtAuth = require("../customAuthJwt"); const fastify = require("fastify")({ @@ -10,7 +12,9 @@ const fastify = require("fastify")({ return uuidv4(); }, }); -const { Install, ProfilePictureInstall, SensorQuotation,generateinstallationId,Store,WaterLeverSensor,MotorSwitchSenso,Insensors,generatequatationId, HardwareCart, ServiceCart, Sales} = require("../models/store"); +const { Tankerbooking} = require("../models/tankers") + +const {Repairorder,SensorStock,Order,EstimationOrder,Iotprice, Install, ProfilePictureInstall, SensorQuotation,generateinstallationId,Store,WaterLeverSensor,MotorSwitchSenso,Insensors,generatequatationId, HardwareCart, ServiceCart, Sales} = require("../models/store"); const { User,Counter, generateBookingId,resetCounter,generateCustomerId,ProfilePicture} = require('../models/User') @@ -690,6 +694,14 @@ exports.editStore = async (request, reply) => { } } + const generatewaterlevelheightsensorId = async () => { + const result = await Counter.findOneAndUpdate( + { _id: 'waterlevelheightsensor_id' }, + { $inc: { seq: 1 } }, + { upsert: true, new: true } + ); + return result.seq; + }; @@ -714,50 +726,50 @@ exports.editStore = async (request, reply) => { }; const moment = require('moment'); - exports.createwaterlevelSensor = async (req, reply) => { - try { - const storeId = req.params.storeId - const { hardwareId,hardwareId_company, type, indate } = req.body; - var mater_seq_id = await generatewaterlevelsensorId(); - const date = moment().format('MM-DD'); - const prefix = 'AS' + date + 'MALOV1'; - var masterId = `${prefix}${mater_seq_id}`; - const newSensor = new WaterLeverSensor({ - storeId, - hardwareId, - masterId, - type, - indate - }); - const savedSensor = await newSensor.save(); - reply.code(200).send(savedSensor); - } catch (err) { - reply.code(500).send(err); - } - }; + // exports.createwaterlevelSensor = async (req, reply) => { + // try { + // const storeId = req.params.storeId + // const { hardwareId,hardwareId_company, type, indate } = req.body; + // var mater_seq_id = await generatewaterlevelsensorId(); + // const date = moment().format('MM-DD'); + // const prefix = 'AS' + date + 'MALOV1'; + // var masterId = `${prefix}${mater_seq_id}`; + // const newSensor = new WaterLeverSensor({ + // storeId, + // hardwareId, + // masterId, + // type, + // indate + // }); + // const savedSensor = await newSensor.save(); + // reply.code(200).send(savedSensor); + // } catch (err) { + // reply.code(500).send(err); + // } + // }; - exports.editWaterLevelSensor = async (req, reply) => { - try { - const { storeId } = req.params; - const updates = req.body; + // exports.editWaterLevelSensor = async (req, reply) => { + // try { + // const { storeId } = req.params; + // const updates = req.body; - const updatedSensor = await WaterLeverSensor.findOneAndUpdate( - { storeId:storeId,hardwareId: req.body.hardwareId }, - updates, - { new: true } - ); + // const updatedSensor = await WaterLeverSensor.findOneAndUpdate( + // { storeId:storeId,hardwareId: req.body.hardwareId }, + // updates, + // { new: true } + // ); - if (!updatedSensor) { - reply.code(404).send({ message: "Sensor not found" }); - } else { - reply.code(200).send(updatedSensor); - } - } catch (err) { - reply.code(500).send(err); - } - }; + // if (!updatedSensor) { + // reply.code(404).send({ message: "Sensor not found" }); + // } else { + // reply.code(200).send(updatedSensor); + // } + // } catch (err) { + // reply.code(500).send(err); + // } + // }; exports.deleteWaterLevelSensor = async (req, reply) => { try { @@ -801,28 +813,28 @@ exports.editStore = async (request, reply) => { } }; - exports.qccheckwaterlevelSensor = async (request, reply) => { - try { - const { hardwareId } = request.params; - const updateData = request.body; + // exports.qccheckwaterlevelSensor = async (request, reply) => { + // try { + // const { hardwareId } = request.params; + // const updateData = request.body; - // Find the document by hardwareId and update it with the fields received in the body - const updatedSensor = await WaterLeverSensor.findOneAndUpdate( - { hardwareId: hardwareId }, - { $set: updateData }, - { new: true } // Return the updated document - ); + // // Find the document by hardwareId and update it with the fields received in the body + // const updatedSensor = await WaterLeverSensor.findOneAndUpdate( + // { hardwareId: hardwareId }, + // { $set: updateData }, + // { new: true } // Return the updated document + // ); - if (!updatedSensor) { - return reply.status(404).send({ error: 'Sensor not found' }); - } + // if (!updatedSensor) { + // return reply.status(404).send({ error: 'Sensor not found' }); + // } - return reply.status(200).send(updatedSensor); - } catch (error) { - console.error(error); - return reply.status(500).send({ error: 'An error occurred while updating the sensor' }); - } - }; + // return reply.status(200).send(updatedSensor); + // } catch (error) { + // console.error(error); + // return reply.status(500).send({ error: 'An error occurred while updating the sensor' }); + // } + // }; exports.getHardware = async (req, reply) => { @@ -922,39 +934,39 @@ exports.editStore = async (request, reply) => { -exports.editSlave = async (req, reply) => { - try { - const { hardwareId } = req.params; - const updates = req.body; +// exports.editSlave = async (req, reply) => { +// try { +// const { hardwareId } = req.params; +// const updates = req.body; - const mainHardware = await WaterLeverSensor.findOne({ hardwareId }); +// const mainHardware = await WaterLeverSensor.findOne({ hardwareId }); - if (!mainHardware) { - reply.code(404).send({ message: "Main hardware not found" }); - return; - } +// if (!mainHardware) { +// reply.code(404).send({ message: "Main hardware not found" }); +// return; +// } - const slaveIndex = mainHardware.slaves.tankhardware.findIndex( - (slave) => slave.tankhardwareId === req.body.tankhardwareId - ); +// const slaveIndex = mainHardware.slaves.tankhardware.findIndex( +// (slave) => slave.tankhardwareId === req.body.tankhardwareId +// ); - if (slaveIndex === -1) { - reply.code(404).send({ message: "Slave not found" }); - return; - } +// if (slaveIndex === -1) { +// reply.code(404).send({ message: "Slave not found" }); +// return; +// } - mainHardware.slaves.tankhardware[slaveIndex] = { - ...mainHardware.slaves.tankhardware[slaveIndex], - ...updates, - }; +// mainHardware.slaves.tankhardware[slaveIndex] = { +// ...mainHardware.slaves.tankhardware[slaveIndex], +// ...updates, +// }; - const updatedHardware = await mainHardware.save(); +// const updatedHardware = await mainHardware.save(); - reply.code(200).send(updatedHardware); - } catch (err) { - reply.code(500).send(err); - } -}; +// reply.code(200).send(updatedHardware); +// } catch (err) { +// reply.code(500).send(err); +// } +// }; exports.deleteSlave = async (req, reply) => { @@ -1163,6 +1175,231 @@ exports.installmotorswitch = async (request, reply) => { } }; + +exports.generateHardwareMasterId = async (req, reply) => { + try { + const storeId = req.params.storeId + const { from, to, type, quantity } = req.body; + const sensorType = type.toLowerCase(); + const fromInt = parseInt(from, 10); + const toInt = parseInt(to, 10); + + if (isNaN(fromInt) || isNaN(toInt) || fromInt > toInt) { + return reply.code(400).send({ message: 'Invalid from/to values' }); + } + + // Fetch pending sensors of the given type and storeId whose hardwareId is null + const pendingSensors = await Insensors.find({ status: 'pending', type: sensorType, storeId, hardwareId: null }); + + if (!pendingSensors.length) { + return reply.code(404).send({ message: 'No pending sensors found for the given type and storeId' }); + } + + if (quantity > pendingSensors.length) { + return reply.code(400).send({ message: `Available quantity is less than requested: ${pendingSensors.length} available.` }); + } + + let hardwareIdSequence = fromInt; + const date = moment().format('MM-DD'); + + for (let i = 0; i < quantity && i < pendingSensors.length; i++) { + let sensor = pendingSensors[i]; + if (hardwareIdSequence > toInt) break; + + sensor.hardwareId = hardwareIdSequence.toString().padStart(8, '0'); + + let mater_seq_id; + if (sensorType === 'master') { + mater_seq_id = await generatewaterlevelsensorId(); + sensor.masterId = `AS${date}MALOV1${mater_seq_id}`; + } else if (sensorType === 'slave') { + mater_seq_id = await generatewaterlevelslavesensorId(); + sensor.masterId = `AS${date}SLLOV1${mater_seq_id}`; + } else if (sensorType === 'sensor') { + mater_seq_id = await generatewaterlevelheightsensorId(); + sensor.masterId = `AS${date}SELOV1${mater_seq_id}`; + } + + await sensor.save(); // Save updated sensor in the database + hardwareIdSequence++; + } + + return reply.code(200).send({ message: 'HardwareId and MasterId assigned successfully' }); + } catch (error) { + console.error('Error generating IDs:', error); + return reply.code(500).send({ message: 'Internal Server Error' }); + } +}; + +exports.getSensorByHardwareId = async (req, reply) => { + try { + + const { storeId } = req.params; + const { hardwareId } = req.body; + + if (!hardwareId) { + return reply.code(400).send({ message: 'hardwareId is required' }); + } + + const sensor = await Insensors.findOne({ storeId, hardwareId }); + + if (!sensor) { + return reply.code(404).send({ message: 'Sensor not found' }); + } + + return reply.code(200).send(sensor); + } catch (error) { + console.error('Error fetching sensor by hardwareId:', error); + return reply.code(500).send({ message: 'Internal Server Error' }); + } +}; + +exports.updateSensorById = async (req, reply) => { + try { + const { _id } = req.params; + let updateData = req.body; + + const allowedFields = ["model", "type", "hardwareId_company", "hardwareId","masterId"]; + + // Filter out unwanted fields and convert type to lowercase if present + const filteredUpdateData = Object.keys(updateData) + .filter((key) => allowedFields.includes(key)) + .reduce((obj, key) => { + obj[key] = key === "type" ? updateData[key].toLowerCase() : updateData[key]; + return obj; + }, {}); + + const updatedSensor = await Insensors.findByIdAndUpdate(_id, filteredUpdateData, { new: true }); + + if (!updatedSensor) { + return reply.code(404).send({ message: "Sensor not found" }); + } + + return reply.code(200).send(updatedSensor); + } catch (error) { + console.error("Error updating sensor:", error); + return reply.code(500).send({ message: "Internal Server Error" }); + } +}; + +exports.deleteSensorById = async (req, reply) => { + try { + const { _id } = req.params; + + const deletedSensor = await Insensors.findByIdAndDelete(_id); + + if (!deletedSensor) { + return reply.code(404).send({ message: "Sensor not found" }); + } + + return reply.code(200).send({ message: "Sensor deleted successfully" }); + } catch (error) { + console.error("Error deleting sensor:", error); + return reply.code(500).send({ message: "Internal Server Error" }); + } +}; + + + + +exports.updateSensorQC = async (req, reply) => { + try { + const { hardwareId } = req.params; + let updateData = req.body; + + const allowedFields = ['qccheck', 'qcby', 'comment', 'status', 'quality_check_details']; + + // Filter only allowed fields + const filteredUpdateData = Object.keys(updateData) + .filter((key) => allowedFields.includes(key)) + .reduce((obj, key) => { + obj[key] = updateData[key]; + return obj; + }, {}); + + // Ensure qccheck is handled properly + if (filteredUpdateData.qccheck) { + const qccheckLower = filteredUpdateData.qccheck.toLowerCase(); + filteredUpdateData.status = qccheckLower === 'pass' ? 'available' : 'qcfailed'; + } + + // Update qccheckdate with the current date in "DD-MMM-YYYY - HH:MM" format + filteredUpdateData.qccheckdate = moment().format('DD-MMM-YYYY - HH:mm'); + + // Find the sensor by ID + const updatedSensor = await Insensors.findOneAndUpdate( + { hardwareId }, // correct query filter + filteredUpdateData, + { new: true } + ); + + if (!updatedSensor) { + return reply.code(409).send({ message: 'Sensor not found' }); + } + + // Update stock based on QC result + const stockRecord = await SensorStock.findOne({ storeId: updatedSensor.storeId, type: updatedSensor.type }); + + if (stockRecord) { + if (filteredUpdateData.qccheck && filteredUpdateData.qccheck.toLowerCase() === 'ok') { + // If QC is "ok", move 1 from total_count_before_qc to total_available + await SensorStock.updateOne( + { storeId: updatedSensor.storeId, type: updatedSensor.type }, + { $inc: { total_count_before_qc: -1, total_available: 1 } } + ); + } else { + // If QC is failed, move 1 from total_count_before_qc to total_repair + await SensorStock.updateOne( + { storeId: updatedSensor.storeId, type: updatedSensor.type }, + { $inc: { total_count_before_qc: -1, total_repair: 1 } } + ); + } + } + + return reply.code(200).send(updatedSensor); + } catch (error) { + console.error('Error updating QC fields:', error); + return reply.code(500).send({ message: 'Internal Server Error' }); + } +}; + + + + + +exports.getSensorsByStatus = async (req, reply) => { + try { + const { storeId } = req.params; + + const statuses = ["pending", "available", "rejected","blocked"]; + let result = {}; + + for (const status of statuses) { + result[status] = await Insensors.find({ storeId, status }); + } + const sensorStock = await SensorStock.find({ storeId }).lean(); + return reply.code(200).send({ + status_code: 200, + message: "Sensors and stock fetched successfully", + data: { + sensors: result, + stock: sensorStock + } + }); + } catch (error) { + console.error("Error fetching sensors:", error); + return reply.code(500).send({ message: "Internal Server Error" }); + } +}; + + + + + + + + + exports.getpumpswitchqc = async (req, reply) => { try { await MotorSwitchSensor.find({storeId: req.params.storeId,motorId:req.body.motorId}) @@ -1204,19 +1441,17 @@ const generateBatchNo = (type, hardwareIdCompany) => { }; exports.createSensor = async (req, reply) => { - try { + try { const storeId = req.params.storeId; const { indate, batchno, hardwareId_company, quantity, model, type } = req.body; - + const sensorType = type.toLowerCase(); let finalBatchNo = batchno; - if (batchno === 'New') { + // Generate unique batch number if 'New' is received + if (batchno === "New") { let isUnique = false; - while (!isUnique) { finalBatchNo = generateBatchNo(type, hardwareId_company); - - // Check for uniqueness const existingBatchNo = await Insensors.findOne({ batchno: finalBatchNo }); if (!existingBatchNo) { isUnique = true; @@ -1224,23 +1459,44 @@ exports.createSensor = async (req, reply) => { } } - const date = moment().format('MM-DD'); let entries = []; - for (let i = 0; i < quantity; i++) { - const newSensor = { + entries.push({ storeId, model, batchno: finalBatchNo, - type, + type: sensorType, indate, - hardwareId_company - }; - - entries.push(newSensor); + hardwareId_company, + }); } + // Insert new sensors into Insensors collection const savedSensors = await Insensors.insertMany(entries); + + // Update stock information in SensorStock + const stockRecord = await SensorStock.findOne({ storeId, type: sensorType }); + + if (stockRecord) { + // If stock record exists, update total_count + await SensorStock.updateOne( + { storeId, type: sensorType }, + { $inc: { total_count: quantity, total_count_before_qc: quantity } } + ); + } else { + // If no stock record exists, create a new one + await SensorStock.create({ + storeId, + type: sensorType, + total_count: quantity, + total_available: 0, + total_blocked: 0, + total_repair: 0, + total_installed: 0, + total_count_before_qc:quantity + }); + } + reply.code(200).send(savedSensors); } catch (err) { reply.code(500).send(err); @@ -1290,10 +1546,33 @@ exports.getbatchnumbers = async (req, reply) => { }; +exports.getbatquotationsforparticularstore = async (req, reply) => { + try { + const storeId = req.params.storeId; + + + let query = { storeId: storeId }; + + + + // Fetch data based on the query + const result = await SensorQuotation.find(query); + + if (!result ) { + return reply.send({ status_code: 404, error: "not found" }); + } + + reply.send({ status_code: 200, data: result }); + } catch (err) { + throw boom.boomify(err); + } +}; + + exports.getiots = async (req, reply) => { try { const storeId = req.params.storeId; - let type = req.params.type ? req.params.type.toUpperCase() : null; // Convert type to uppercase + let type = req.params.type ? req.params.type.toLowerCase() : null; // Convert type to uppercase let query = { storeId: storeId }; @@ -1343,8 +1622,9 @@ exports.createquotationforSensor = async (req, reply) => { try { const i_id = await generatequatationId(); const quatationId = `AWQU${i_id}`; - const { installationId } = req.params; - const { customerId, masters, slaves, sensors, motor_switches, electricals } = req.body; + const { surveyId } = req.params; + const { customerId, masters, slaves, sensors, motor_switches, electricals,master_connections } = req.body; + // Format electricals field const formattedElectricals = electricals.map((item) => ({ @@ -1353,6 +1633,56 @@ exports.createquotationforSensor = async (req, reply) => { switch: item.switch || "", text: item.text || "", })); + const formattedMasterDetails = master_connections.map((item) => ({ + master_name: item.master_name || "", + slaves: item.slaves || "", + location: item.location || "", + googleLocation: item.googleLocation || "", + latitude: item.latitude || "", + longitude: item.longitude || "", + tanks: Array.isArray(item.tanks) + ? item.tanks.map(tank => ({ + tankName: tank.tankName || "", + tankLocation: tank.tankLocation || "" + })) + : [], + motor_switches: Array.isArray(item.motor_switches) + ? item.motor_switches.map(motor_switch => ({ + + from_tank: motor_switch.from_tank || "", + from_location: motor_switch.from_location || "", + to_tank: motor_switch.to_tank || "", + to_location: motor_switch.to_location || "", + + })) + : [], + + })); + + // Fetch pricing data from Iotprice database + const getPrice = async (name, type) => { + const priceData = await Iotprice.findOne({ name, type }); + return priceData ? priceData.cost : 0; // Default to 0 if not found + }; + + // Calculate price for masters, slaves, sensors, motor switches + const masterPrice = await getPrice("master", "master"); + const slavePrice = await getPrice("slave", "slave"); + const sensorPrice = await getPrice("sensor", "sensor"); + const motorSwitchPrice = await getPrice("motor_switch", "motor_switches"); + + // Calculate price for electricals + let electricalPrice = 0; + for (const item of formattedElectricals) { + if (item.type === "cable") { + electricalPrice += await getPrice("cable", item.wire); // wire field is type + } else if (item.type === "switch") { + electricalPrice += await getPrice("switch", item.switch); // switch field is type + } + } + + // Calculate total estimation price + const totalEstimatedPrice = masterPrice + slavePrice + sensorPrice + motorSwitchPrice + electricalPrice; // Format current date and time in IST const formattedDateTime = dayjs().tz("Asia/Kolkata").format('DD-MMM-YYYY - HH:mm'); @@ -1360,15 +1690,17 @@ exports.createquotationforSensor = async (req, reply) => { // Create a new SensorQuotation document const newQuotation = new SensorQuotation({ quatationId, - customerId: customerId, - installationId: installationId, - quote_status: "sentfrominstaller", + customerId, + master_connections: formattedMasterDetails, + surveyId, + quote_status: "sentfromsurvey", masters, slaves, sensors, motor_switches, electricals: formattedElectricals, - datetime: formattedDateTime, // Add the formatted IST date and time + datetime: formattedDateTime, + estimated_price: totalEstimatedPrice, // Store estimated price }); const savedQuotation = await newQuotation.save(); @@ -1378,6 +1710,7 @@ exports.createquotationforSensor = async (req, reply) => { message: 'Quotation for sensors created successfully.', data: savedQuotation, }); + } catch (error) { console.error('Error creating quotation:', error); reply.code(500).send({ @@ -1389,10 +1722,13 @@ exports.createquotationforSensor = async (req, reply) => { }; + + + exports.editQuotationForSensor = async (req, reply) => { try { const { quatationId } = req.params; // Get the ID of the quotation to edit - const { masters, slaves, sensors, motor_switches, electricals } = req.body; + const { masters, slaves, sensors, motor_switches, electricals, master_connections } = req.body; // Format electricals field const formattedElectricals = electricals.map((item) => ({ @@ -1402,6 +1738,21 @@ exports.editQuotationForSensor = async (req, reply) => { text: item.text || "", })); + // Format master_connections field + const formattedMasterDetails = master_connections.map((item) => ({ + master_name: item.master_name || "", + slaves: item.slaves || "", + location: item.location || "", + tanks: Array.isArray(item.tanks) + ? item.tanks.map(tank => ({ + tankName: tank.tankName || "", + tankLocation: tank.tankLocation || "" + })) + : [] + })); + + console.log("Formatted Master Connections:", formattedMasterDetails); // Debugging + // Find and update the quotation const updatedQuotation = await SensorQuotation.findOneAndUpdate( { quatationId }, @@ -1411,6 +1762,7 @@ exports.editQuotationForSensor = async (req, reply) => { sensors, motor_switches, electricals: formattedElectricals, + master_connections: formattedMasterDetails, // <- Ensure it's included updated_at: dayjs().tz("Asia/Kolkata").format('DD-MMM-YYYY - HH:mm'), }, { new: true } // Return the updated document @@ -1439,43 +1791,114 @@ exports.editQuotationForSensor = async (req, reply) => { }; - -exports.getallquotationdata = async (req, reply) => { +exports.createEstimationPrice = async (req, reply) => { try { - await SensorQuotation.find({}) - .exec() - .then((docs) => { - reply.send({ status_code: 200, data: docs, count: docs.length }); - }) - .catch((err) => { - console.log(err); - reply.send({ error: err }); - }); - } catch (err) { - throw boom.boomify(err); - } -}; + const { customerId,items } = req.body; + const user = await User.findOne({ customerId }); + if (!Array.isArray(items) || items.length === 0) { + return reply.code(400).send({ message: "Items array is required and cannot be empty" }); + } + let totalEstimation = 0; + const itemDetails = []; -exports.saveQuotationData = async (req, reply) => { - try { - const { quotationId } = req.params; // Retrieve the quotationId from the request parameters + for (const item of items) { + const { name, type, quantity } = item; - // Fetch the quotation data from the database - const quotation = await SensorQuotation.findOne({ quatationId: quotationId }); - console.log(quotation) - if (!quotation) { - return reply.code(404).send({ - success: false, - message: 'Quotation not found.' + // Fetch the unit price from IotPrice collection + const priceEntry = await Iotprice.findOne({ name, type }); + + if (!priceEntry) { + itemDetails.push({ + name, + type, + quantity, + unitPrice: null, + totalCost: null, + message: "Price not found" + }); + continue; + } + + const unitPrice = priceEntry.cost; + const totalCost = unitPrice * quantity; + totalEstimation += totalCost; + + itemDetails.push({ + name, + type, + quantity, + unitPrice, + totalCost }); } - // Extract the price-per-unit and total price values from the request body - const { - masters_quantity_price, - masters_total_price, + return reply.code(200).send({ + items: itemDetails, + estimatedTotal: totalEstimation, + userDetails: user || null // Include user details in the response + }); + + } catch (error) { + console.error("Error calculating estimation:", error); + return reply.code(500).send({ message: "Failed to calculate estimation" }); + } +}; + + +exports.getallquotationdata = async (req, reply) => { + try { + const quotations = await SensorQuotation.find({}).lean(); // Use lean() for better performance + + // Extract unique customerIds from quotations + const customerIds = [...new Set(quotations.map((q) => q.customerId).filter(Boolean))]; + + // Fetch customer details for all unique customerIds + const customers = await User.find({ customerId: { $in: customerIds } }).lean(); + + // Convert customer array to a dictionary for quick lookup + const customerMap = customers.reduce((acc, customer) => { + acc[customer.customerId] = customer; + return acc; + }, {}); + + + // Attach customer details to quotations + const enrichedQuotations = quotations.map((quotation) => ({ + ...quotation, + customerDetails: customerMap[quotation.customerId] || null, // Attach customer details if found + })); + + reply.send({ status_code: 200, data: enrichedQuotations, count: enrichedQuotations.length }); + } catch (err) { + console.error(err); + reply.send({ error: err.message }); + } +}; + + + + + +exports.saveQuotationData = async (req, reply) => { + try { + const { quotationId } = req.params; // Retrieve the quotationId from the request parameters + + // Fetch the quotation data from the database + const quotation = await SensorQuotation.findOne({ quatationId: quotationId }); + console.log(quotation) + if (!quotation) { + return reply.code(404).send({ + success: false, + message: 'Quotation not found.' + }); + } + + // Extract the price-per-unit and total price values from the request body + const { + masters_quantity_price, + masters_total_price, slaves_quantity_price, slaves_total_price, motor_switches_quantity_price, @@ -1620,12 +2043,12 @@ exports.addToCartService = async (req, reply) => { exports.getquotationofinstalleranduser = async (req, reply) => { try { - const installationId = req.params.installationId; + const surveyId = req.params.surveyId; const customerId = req.body.customerId; // Find the specific tank const result = await SensorQuotation.find({ - installationId: installationId, + surveyId: surveyId, customerId: customerId, }); @@ -1639,4 +2062,1967 @@ exports.getquotationofinstalleranduser = async (req, reply) => { } catch (err) { throw boom.boomify(err); } -}; \ No newline at end of file +}; + + +exports.updateInstallationId = async (req, reply) => { + try { + const { _id } = req.params; + const { installationId } = req.body; + + if (!_id || !installationId) { + return reply.status(400).send({ error: "orderId and installationId are required" }); + } + + // Update the order with the new installationId + const updatedOrder = await Order.findByIdAndUpdate( + _id, + { + installationId, + status: "installer_assigned", // Updating the status + updated_at: new Date().toISOString(), // Updating timestamp + }, + { new: true } + ); + + if (!updatedOrder) { + return reply.status(404).send({ error: "Order not found" }); + } + + return reply.send({ + status_code: 200, + message: "Installation ID updated successfully", + data: updatedOrder, + }); + } catch (err) { + console.error("Error updating installationId:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + + +exports.getPendingOrders = async (req, reply) => { + try { + const pendingOrders = await Order.find({ status: "pending" }); + + if (!pendingOrders.length) { + return reply.send({ + status_code: 200, + message: "No pending orders found", + data: [], + }); + } + + return reply.send({ + status_code: 200, + message: "Pending orders fetched successfully", + data: pendingOrders, + }); + } catch (err) { + console.error("Error fetching pending orders:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + + + +exports.handleEstimation = async (req, reply) => { + try { + const { customerId, items, estimatedTotal, action } = req.body; + + if (!customerId) { + return reply.code(400).send({ message: "customerId is required" }); + } + if (!Array.isArray(items) || items.length === 0) { + return reply.code(400).send({ message: "Items array is required and cannot be empty" }); + } + if (!estimatedTotal) { + return reply.code(400).send({ message: "Estimated total is required" }); + } + if (!["accept", "reject"].includes(action)) { + return reply.code(400).send({ message: "Invalid action, must be 'accept' or 'reject'" }); + } + + // If rejected, return a response without creating an order + if (action === "reject") { + return reply.code(200).send({ message: "Estimation rejected" }); + } + + // If accepted, generate unique Order ID + const lastOrder = await EstimationOrder.findOne().sort({ createdAt: -1 }); + let orderId = "AWS001"; + if (lastOrder) { + const lastNumber = parseInt(lastOrder.orderId.replace("AWS", ""), 10) + 1; + orderId = `AWS${String(lastNumber).padStart(3, "0")}`; + } + + // Create a new order in the database + const newOrder = new EstimationOrder({ + orderId, + customerId, + items, + estimatedTotal, + status: "pending" + }); + + await newOrder.save(); + const user = await User.findOne({ customerId }); + return reply.code(200).send({ + message: "Order created successfully", + orderId, + customerId, + estimatedTotal, + items, + status: "pending", + userDetails: user || null + }); + + } catch (error) { + console.error("Error handling estimation:", error); + return reply.code(500).send({ message: "Failed to process estimation" }); + } +}; + +exports.editOrder = async (req, reply) => { + try { + const { orderId, customerId, items, estimatedTotal } = req.body; + + if (!orderId) { + return reply.code(400).send({ message: "orderId is required" }); + } + + // Find the existing order + const existingOrder = await EstimationOrder.findOne({ orderId }); + + if (!existingOrder) { + return reply.code(404).send({ message: "Order not found" }); + } + + // Update the order in the database + existingOrder.customerId = customerId; + existingOrder.items = items; + existingOrder.estimatedTotal = estimatedTotal; + + await existingOrder.save(); + + return reply.code(200).send({ + message: "Order updated successfully", + orderId, + updatedItems: items, + estimatedTotal + }); + + } catch (error) { + console.error("Error updating order:", error); + return reply.code(500).send({ message: "Failed to update order" }); + } +}; + +exports.getOrdersByCustomer = async (req, reply) => { + try { + const { customerId } = req.params; + + if (!customerId) { + return reply.code(400).send({ message: "customerId is required" }); + } + + // Fetch orders with status 'pending' or 'accepted' for the given customer + const orders = await EstimationOrder.find({ + customerId, + status: { $in: ["pending", "accepted"] } + }); + + if (orders.length === 0) { + return reply.code(404).send({ message: "No orders found for this customer" }); + } + + return reply.code(200).send({ + message: "Orders retrieved successfully", + customerId, + orders + }); + + } catch (error) { + console.error("Error fetching orders:", error); + return reply.code(500).send({ message: "Failed to fetch orders" }); + } +}; + + + +exports.acceptQuotation = async (req, reply) => { + try { + const { quotationId } = req.params; + let { action, storeId } = req.body; + + action = action.toLowerCase(); + + const quotation = await SensorQuotation.findOne({ quatationId: quotationId }); + if (!quotation) { + return reply.status(404).send({ error: "Quotation not found" }); + } + + if (action === "reject") { + await SensorQuotation.updateOne({ quatationId: quotationId }, { $set: { status: "rejected" } }); + + return reply.send({ + status_code: 200, + message: "Quotation rejected successfully", + }); + } else if (action === "accept") { + const { customerId, masters, slaves, sensors, master_connections } = quotation; + + // Step 1: Block Masters + let blockedMasters = []; + if (parseInt(masters) > 0) { + const availableMasters = await Insensors.find({ storeId, type: "master", status: "available" }) + .limit(parseInt(masters)) + .lean(); + + const masterIds = availableMasters.map(master => master._id); + blockedMasters = availableMasters.map(master => ({ _id: master._id, hardwareId: master.hardwareId })); + + if (masterIds.length > 0) { + await Insensors.updateMany( + { _id: { $in: masterIds } }, + { $set: { status: "blocked", customerId } } + ); + } + } + + // Step 2: Assign Slaves to Masters + let blockedSlaveIds = []; + let blockedSlaves = []; + + for (let i = 0; i < master_connections.length; i++) { + const masterData = master_connections[i]; + if (i >= blockedMasters.length) break; + + const masterHardwareId = blockedMasters[i].hardwareId; + const masterId = blockedMasters[i]._id; + const slaveCount = parseInt(masterData.slaves) || 0; + + // Assign Slaves + if (slaveCount > 0) { + const availableSlaves = await Insensors.find({ storeId, type: "slave", status: "available" }) + .limit(slaveCount) + .lean(); + + const slaveIds = availableSlaves.map(slave => slave._id); + blockedSlaveIds.push(...slaveIds); + blockedSlaves.push(...availableSlaves.map(slave => ({ _id: slave._id, hardwareId: slave.hardwareId }))); + + for (let j = 0; j < availableSlaves.length; j++) { + const slave = availableSlaves[j]; + const tank = masterData.tanks?.[j] || {}; + + await Insensors.updateOne( + { _id: slave._id }, + { + $set: { + status: "blocked", + customerId, + connected_to: masterHardwareId, + tankhardwareId: `tank-${j + 1}`, + hardwareId: slave.hardwareId, + tankName: tank.tankName || "", + tankLocation: tank.tankLocation || "", + }, + } + ); + } + } + + // Update tanks and motor switches for master + await Insensors.updateOne( + { _id: masterId }, + { + $set: { + hardwareId: masterHardwareId, + tanks: (masterData.tanks || []).map(tank => ({ + tankName: tank.tankName || "", + tankLocation: tank.tankLocation || "", + })), + motor_switches: masterData.motor_switches || [], + }, + } + ); + } + + // Step 2.5: Update master_connections.hardwareId + const updatedMasterConnections = quotation.master_connections.map((conn, index) => { + const plain = conn.toObject ? conn.toObject() : conn; + return { + ...plain, + hardwareId: blockedMasters[index]?.hardwareId || null, + }; + }); + + // Step 3: Assign Sensors to Slaves + if (parseInt(sensors) > 0 && blockedSlaves.length > 0) { + const availableSensors = await Insensors.find({ storeId, type: "sensor", status: "available" }) + .limit(parseInt(sensors)) + .lean(); + + const sensorIds = availableSensors.map(sensor => sensor._id); + + for (let i = 0; i < sensorIds.length; i++) { + const assignedSlave = blockedSlaves[i % blockedSlaves.length]; + + await Insensors.updateOne( + { _id: sensorIds[i] }, + { + $set: { + status: "blocked", + customerId, + connected_to: assignedSlave.hardwareId, + }, + } + ); + } + } + + // Step 4: Update Sensor Stock + const sensorTypes = [ + { type: "master", count: parseInt(masters || 0) }, + { type: "slave", count: parseInt(slaves || 0) }, + { type: "sensor", count: parseInt(sensors || 0) }, + ]; + + for (const sensor of sensorTypes) { + if (sensor.count > 0) { + const stock = await SensorStock.findOne({ storeId, type: sensor.type }); + + if (stock) { + let available = stock.total_available || 0; + let needed = sensor.count; + let toBlock = Math.min(available, needed); + let excessNeeded = needed - toBlock; + + if (toBlock > 0) { + const availableSensors = await Insensors.find({ storeId, type: sensor.type, status: "available" }) + .limit(toBlock) + .lean(); + + const sensorIds = availableSensors.map(sensor => sensor._id); + + if (sensorIds.length > 0) { + await Insensors.updateMany( + { _id: { $in: sensorIds } }, + { $set: { status: "blocked", customerId } } + ); + } + } + + await SensorStock.updateOne( + { storeId, type: sensor.type }, + { + $inc: { + total_available: -toBlock, + total_blocked: toBlock, + excess_needed: excessNeeded > 0 ? excessNeeded : 0, + }, + } + ); + } + } + } + + // Step 5: Create Order + const plainQuotation = quotation.toObject(); + plainQuotation.master_connections = updatedMasterConnections; + + const newOrder = new Order({ + ...plainQuotation, + storeId, + status: "pending", + }); + + await newOrder.save(); + + // Step 6: Delete Quotation + await SensorQuotation.deleteOne({ quatationId: quotationId }); + + return reply.send({ + status_code: 200, + message: "Quotation accepted, sensors blocked, and moved to Orders", + data: newOrder, + }); + } else { + return reply.status(400).send({ error: "Invalid action" }); + } + } catch (err) { + console.error("Error processing quotation:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + + + + +exports.getOrdersByStoreId = async (req, reply) => { + try { + const { storeId } = req.params; + + if (!storeId) { + return reply.status(400).send({ error: "storeId is required" }); + } + + // Fetch orders with the matching storeId + const orders = await Order.find({ storeId }); + + if (!orders.length) { + return reply.send({ + status_code: 200, + message: "No orders found for this store", + data: [], + }); + } + + // Fetch customer details & allocated sensors for each order + const ordersWithDetails = await Promise.all( + orders.map(async (order) => { + // Fetch customer details + const customer = await User.findOne({ customerId: order.customerId }).lean(); + + // Fetch allocated sensors for this customer + const allocatedSensors = await Insensors.find({ + storeId, + customerId: order.customerId, // Match only sensors allocated to this customer + status: "blocked", // Only fetch sensors that are allocated (blocked) + }).lean(); + + return { + ...order.toObject(), + customer: customer || null, // Include customer details or null if not found + allocated_sensors: allocatedSensors, // List of allocated sensors + }; + }) + ); + + return reply.send({ + status_code: 200, + message: "Orders fetched successfully", + data: ordersWithDetails, + }); + } catch (err) { + console.error("Error fetching orders:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + +// exports.getOrdersByInstallationId = async (req, reply) => { +// try { +// const { installationId } = req.params; + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } + +// // Fetch all orders by installationId +// const orders = await Order.find({ installationId }); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation", +// data: { +// customers: [], +// orders: [] +// } +// }); +// } + +// // Get unique customerIds from orders +// const uniqueCustomerIds = [...new Set(orders.map(order => order.customerId))]; + +// // Fetch all customers in a single query +// const customers = await User.find({ customerId: { $in: uniqueCustomerIds } }).lean(); + +// // Map customerId -> customer object +// const customerMap = {}; +// customers.forEach(c => { +// customerMap[c.customerId] = c; +// }); + +// // For each order, attach allocated sensors only +// const ordersWithSensors = await Promise.all( +// orders.map(async (order) => { +// const allocatedSensors = await Insensors.find({ +// storeId: order.storeId, +// customerId: order.customerId, +// status: "blocked" +// }).lean(); + +// return { +// ...order.toObject(), +// allocated_sensors: allocatedSensors +// }; +// }) +// ); + +// return reply.send({ +// status_code: 200, +// message: "Orders fetched successfully", +// data: customers +// // data: { +// // customers, +// // // orders: ordersWithSensors +// // } +// }); + +// } catch (err) { +// console.error("Error fetching orders:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + +// exports.getOrdersByInstallationId = async (req, reply) => { +// try { +// const { installationId } = req.params; + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } + +// // Fetch orders with the matching installationId +// const orders = await Order.find({ installationId }); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation", +// data: [], +// }); +// } + +// const uniqueCustomersMap = new Map(); + +// // Build unique customerId-based map +// for (const order of orders) { +// if (!uniqueCustomersMap.has(order.customerId)) { +// uniqueCustomersMap.set(order.customerId, order); +// } +// } + +// // Only keep one order per customerId +// const uniqueOrders = Array.from(uniqueCustomersMap.values()); + +// // Enrich with customer and sensor info +// const ordersWithDetails = await Promise.all( +// uniqueOrders.map(async (order) => { +// const customer = await User.findOne({ customerId: order.customerId }).lean(); + +// const allocatedSensors = await Insensors.find({ +// storeId: order.storeId, +// customerId: order.customerId, +// status: "blocked", +// }).lean(); + +// return { +// ...order.toObject(), +// customer: customer || null, +// allocated_sensors: allocatedSensors, +// }; +// }) +// ); + +// return reply.send({ +// status_code: 200, +// message: "Orders fetched successfully", +// data: ordersWithDetails, +// }); + +// } catch (err) { +// console.error("Error fetching orders:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + +// exports.getOrdersByInstallationId = async (req, reply) => { +// try { +// const { installationId } = req.params; + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } + +// // Step 1: Fetch orders by installationId +// const orders = await Order.find({ installationId }); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation", +// data: [], +// }); +// } + +// const ordersWithActiveMasters = []; + +// // Step 2: Process each order +// for (const order of orders) { +// // Filter master_connections to keep only those where work_status is 'active' or empty/null +// const activeMasters = (order.master_connections || []).filter(mc => +// mc.work_status === 'active' || mc.work_status === '' || mc.work_status == null +// ); + +// if (activeMasters.length) { +// // Fetch customer details +// const customer = await User.findOne({ customerId: order.customerId }).lean(); + +// // Build response object +// ordersWithActiveMasters.push({ +// ...order.toObject(), +// master_connections: activeMasters, // only active master_connections +// customer: customer || null, +// }); +// } +// } + +// // Step 3: Return response +// return reply.send({ +// status_code: 200, +// message: "Orders with active master connections fetched successfully", +// data: ordersWithActiveMasters, +// }); + +// } catch (err) { +// console.error("Error fetching orders:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + +exports.getOrdersByInstallationId = async (req, reply) => { + try { + const { installationId } = req.params; + + if (!installationId) { + return reply.status(400).send({ error: "installationId is required" }); + } + + // Step 1: Fetch orders by installationId + const orders = await Order.find({ installationId }); + + if (!orders.length) { + return reply.send({ + status_code: 200, + message: "No orders found for this installation", + data: [], + }); + } + + const ordersWithDetails = []; + + // Fetch installation document once (contains all team members) + const installationDoc = await Install.findOne({ installationId }).lean(); + + // Step 2: Process each order + for (const order of orders) { + // Filter master_connections to keep active ones + const activeMasters = (order.master_connections || []).filter( + (mc) => + mc.work_status === "active" || + mc.work_status === "" || + mc.work_status == null + ); + + if (activeMasters.length) { + // Fetch customer details + const customer = await User.findOne({ + customerId: order.customerId, + }).lean(); + + // Find assigned team members from installation.team_member.team_member + let assignedTeamMembersDetails = []; + if ( + installationDoc && + installationDoc.team_member && + Array.isArray(installationDoc.team_member.team_member) && + order.assignedTeamMembers && + order.assignedTeamMembers.length > 0 + ) { + assignedTeamMembersDetails = + installationDoc.team_member.team_member.filter((tm) => + order.assignedTeamMembers.includes(tm.teamMemberId) + ); + } + + // Extract work_status from first active master + const work_status = + activeMasters.length > 0 ? activeMasters[0].work_status : null; + + // Build response + ordersWithDetails.push({ + ...order.toObject(), + master_connections: activeMasters, + work_status, // 👈 placed above customer + customer: customer || null, + assignedTeamMembersDetails, + }); + } + } + + return reply.send({ + status_code: 200, + message: "Orders with active master connections fetched successfully", + data: ordersWithDetails, + }); + } catch (err) { + console.error("Error fetching orders:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + + + + + + + +exports.getPendingOrdersByInstallationAndTeamMember = async (req, reply) => { + try { + const { installationId, teamMemberId } = req.params; + + if (!installationId) { + return reply.status(400).send({ error: "installationId is required" }); + } + if (!teamMemberId) { + return reply.status(400).send({ error: "teamMemberId is required" }); + } + + // Fetch orders matching installationId and assignedTeamMembers + const orders = await Order.find({ + installationId, + assignedTeamMembers: teamMemberId + }); + + if (!orders.length) { + return reply.send({ + status_code: 200, + message: "No orders found for this installation and team member", + data: [], + }); + } + + const uniqueCustomersMap = new Map(); + + // Build unique customerId-based map + for (const order of orders) { + if (!uniqueCustomersMap.has(order.customerId)) { + uniqueCustomersMap.set(order.customerId, order); + } + } + + let uniqueOrders = Array.from(uniqueCustomersMap.values()); + + // ✅ Filter orders that have at least one pending master_connection + uniqueOrders = uniqueOrders.filter(order => + Array.isArray(order.master_connections) && + order.master_connections.some(mc => mc.work_status === 'pending') + ); + + if (!uniqueOrders.length) { + return reply.send({ + status_code: 200, + message: "No pending orders found for this installation and team member", + data: [], + }); + } + + // Enrich and also filter master_connections inside each order + const ordersWithDetails = await Promise.all( + uniqueOrders.map(async (order) => { + const customer = await User.findOne({ customerId: order.customerId }).lean(); + + const allocatedSensors = await Insensors.find({ + storeId: order.storeId, + customerId: order.customerId, + status: "blocked", + }).lean(); + + // Keep only master_connections with work_status === 'pending' + const pendingMasters = order.master_connections.filter(mc => mc.work_status === 'pending'); + + const pendingWorkStatuses = pendingMasters.map(mc => mc.work_status); + + return { + ...order.toObject(), + master_connections: pendingMasters, + work_status: pendingMasters.length > 0 ? pendingMasters[0].work_status : null, + customer: customer || null, + allocated_sensors: allocatedSensors, + }; + }) + ); + + return reply.send({ + status_code: 200, + message: "Pending orders fetched successfully", + data: ordersWithDetails, + }); + + } catch (err) { + console.error("Error fetching pending orders:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + +exports.getWaitingOrdersByInstallationAndTeamMember = async (req, reply) => { + try { + const { installationId, teamMemberId } = req.params; + + if (!installationId) { + return reply.status(400).send({ error: "installationId is required" }); + } + if (!teamMemberId) { + return reply.status(400).send({ error: "teamMemberId is required" }); + } + + // Fetch orders matching installationId and assignedTeamMembers + const orders = await Order.find({ + installationId, + assignedTeamMembers: teamMemberId + }); + + if (!orders.length) { + return reply.send({ + status_code: 200, + message: "No orders found for this installation and team member", + data: [], + }); + } + + const uniqueCustomersMap = new Map(); + + // Build unique customerId-based map + for (const order of orders) { + if (!uniqueCustomersMap.has(order.customerId)) { + uniqueCustomersMap.set(order.customerId, order); + } + } + + let uniqueOrders = Array.from(uniqueCustomersMap.values()); + + // ✅ Filter orders that have at least one pending master_connection + uniqueOrders = uniqueOrders.filter(order => + Array.isArray(order.master_connections) && + order.master_connections.some(mc => mc.work_status === 'waiting') + ); + + if (!uniqueOrders.length) { + return reply.send({ + status_code: 200, + message: "No pending orders found for this installation and team member", + data: [], + }); + } + + // Enrich and also filter master_connections inside each order + const ordersWithDetails = await Promise.all( + uniqueOrders.map(async (order) => { + const customer = await User.findOne({ customerId: order.customerId }).lean(); + + const allocatedSensors = await Insensors.find({ + storeId: order.storeId, + customerId: order.customerId, + status: "blocked", + }).lean(); + + // Keep only master_connections with work_status === 'pending' + const pendingMasters = order.master_connections.filter(mc => mc.work_status === 'waiting'); + + return { + ...order.toObject(), + master_connections: pendingMasters, + customer: customer || null, + allocated_sensors: allocatedSensors, + }; + }) + ); + + return reply.send({ + status_code: 200, + message: "Pending orders fetched successfully", + data: ordersWithDetails, + }); + + } catch (err) { + console.error("Error fetching pending orders:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + +// exports.getCompleteOrdersByInstallationAndTeamMember = async (req, reply) => { +// try { +// const { installationId, teamMemberId } = req.params; + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } +// if (!teamMemberId) { +// return reply.status(400).send({ error: "teamMemberId is required" }); +// } + +// // Fetch orders matching installationId and assignedTeamMembers +// const orders = await Order.find({ +// installationId, +// assignedTeamMembers: teamMemberId +// }); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation and team member", +// data: [], +// }); +// } + +// const uniqueCustomersMap = new Map(); + +// // Build unique customerId-based map +// for (const order of orders) { +// if (!uniqueCustomersMap.has(order.customerId)) { +// uniqueCustomersMap.set(order.customerId, order); +// } +// } + +// let uniqueOrders = Array.from(uniqueCustomersMap.values()); + +// // ✅ Filter orders that have at least one pending master_connection +// uniqueOrders = uniqueOrders.filter(order => +// Array.isArray(order.master_connections) && +// order.master_connections.some(mc => mc.work_status === 'complete') +// ); + +// if (!uniqueOrders.length) { +// return reply.send({ +// status_code: 200, +// message: "No pending orders found for this installation and team member", +// data: [], +// }); +// } + +// // Enrich and also filter master_connections inside each order +// const ordersWithDetails = await Promise.all( +// uniqueOrders.map(async (order) => { +// const customer = await User.findOne({ customerId: order.customerId }).lean(); + +// const allocatedSensors = await Insensors.find({ +// storeId: order.storeId, +// customerId: order.customerId, +// status: "blocked", +// }).lean(); + +// // Keep only master_connections with work_status === 'pending' +// const pendingMasters = order.master_connections.filter(mc => mc.work_status === 'complete'); + +// return { +// ...order.toObject(), +// master_connections: pendingMasters, +// customer: customer || null, +// allocated_sensors: allocatedSensors, +// }; +// }) +// ); + +// return reply.send({ +// status_code: 200, +// message: "Pending orders fetched successfully", +// data: ordersWithDetails, +// }); + +// } catch (err) { +// console.error("Error fetching pending orders:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + +exports.getCompleteOrdersByInstallationAndTeamMember = async (req, reply) => { + try { + const { installationId, teamMemberId } = req.params; + + if (!installationId) { + return reply.status(400).send({ error: "installationId is required" }); + } + if (!teamMemberId) { + return reply.status(400).send({ error: "teamMemberId is required" }); + } + + // Fetch orders matching installationId and assignedTeamMembers + const orders = await Order.find({ + installationId, + assignedTeamMembers: teamMemberId + }); + + if (!orders.length) { + return reply.send({ + status_code: 200, + message: "No orders found for this installation and team member", + data: [], + }); + } + + const uniqueCustomersMap = new Map(); + + // Build unique customerId-based map + for (const order of orders) { + if (!uniqueCustomersMap.has(order.customerId)) { + uniqueCustomersMap.set(order.customerId, order); + } + } + + let uniqueOrders = Array.from(uniqueCustomersMap.values()); + + // ✅ Filter orders that have at least one master_connection with work_status === 'complete' + uniqueOrders = uniqueOrders.filter(order => + Array.isArray(order.master_connections) && + order.master_connections.some(mc => mc.work_status === 'complete') + ); + + if (!uniqueOrders.length) { + return reply.send({ + status_code: 200, + message: "No complete orders found for this installation and team member", + data: [], + }); + } + + // Enrich orders and add work_status at top level + const ordersWithDetails = await Promise.all( + uniqueOrders.map(async (order) => { + const customer = await User.findOne({ customerId: order.customerId }).lean(); + + const allocatedSensors = await Insensors.find({ + storeId: order.storeId, + customerId: order.customerId, + status: "blocked", + }).lean(); + + // Keep only master_connections with work_status === 'complete' + const completeMasters = order.master_connections.filter(mc => mc.work_status === 'complete'); + + // Take work_status from the first matched master (which will be 'complete') + const work_status = completeMasters[0]?.work_status || null; + + return { + ...order.toObject(), + master_connections: completeMasters, + work_status, // add at top level + customer: customer || null, + allocated_sensors: allocatedSensors, + }; + }) + ); + + return reply.send({ + status_code: 200, + message: "Complete orders fetched successfully", + data: ordersWithDetails, + }); + + } catch (err) { + console.error("Error fetching complete orders:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + + +// exports.getManagerPendingOrdersByInstallationId = async (req, reply) => { +// try { +// const { installationId } = req.params; + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } + +// // Step 1: Fetch orders matching installationId +// const orders = await Order.find({ installationId }); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation", +// data: [], +// }); +// } + +// // Step 2: Filter orders to keep only those having at least one master_connection with work_status === 'pending' +// const ordersWithPendingMasters = []; + +// for (const order of orders) { +// const pendingMasters = (order.master_connections || []).filter(mc => mc.work_status === 'pending'); + +// if (pendingMasters.length) { +// // Fetch customer details +// const customer = await User.findOne({ customerId: order.customerId }).lean(); + +// // Fetch allocated sensors (status blocked) +// const allocatedSensors = await Insensors.find({ +// storeId: order.storeId, +// customerId: order.customerId, +// status: "blocked", +// }).lean(); + +// // Build response object +// ordersWithPendingMasters.push({ +// ...order.toObject(), +// master_connections: pendingMasters, // keep only pending masters +// customer: customer || null, +// allocated_sensors: allocatedSensors, +// }); +// } +// } + +// if (!ordersWithPendingMasters.length) { +// return reply.send({ +// status_code: 200, +// message: "No pending master connections found for this installation", +// data: [], +// }); +// } + +// return reply.send({ +// status_code: 200, +// message: "Pending orders fetched successfully", +// data: ordersWithPendingMasters, +// }); + +// } catch (err) { +// console.error("Error fetching pending orders:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + +exports.getManagerPendingOrdersByInstallationId = async (req, reply) => { + try { + const { installationId } = req.params; + + if (!installationId) { + return reply.status(400).send({ error: "installationId is required" }); + } + + // Step 1: Fetch orders matching installationId + const orders = await Order.find({ installationId }); + + if (!orders.length) { + return reply.send({ + status_code: 200, + message: "No orders found for this installation", + data: [], + }); + } + + const ordersWithPendingMasters = []; + + for (const order of orders) { + const pendingMasters = (order.master_connections || []).filter(mc => mc.work_status === 'pending'); + + if (pendingMasters.length) { + // Fetch customer details + const customer = await User.findOne({ customerId: order.customerId }).lean(); + + // Fetch allocated sensors (status blocked) + const allocatedSensors = await Insensors.find({ + storeId: order.storeId, + customerId: order.customerId, + status: "blocked", + }).lean(); + + // Take work_status from the first pending master (since they all have 'pending') + const work_status = pendingMasters[0].work_status; + + // Build the order object and add work_status beside surveyId + ordersWithPendingMasters.push({ + ...order.toObject(), + master_connections: pendingMasters, // keep only pending masters + work_status, // add work_status beside surveyId (top-level field) + customer: customer || null, + allocated_sensors: allocatedSensors, + }); + } + } + + if (!ordersWithPendingMasters.length) { + return reply.send({ + status_code: 200, + message: "No pending master connections found for this installation", + data: [], + }); + } + + return reply.send({ + status_code: 200, + message: "Pending orders fetched successfully", + data: ordersWithPendingMasters, + }); + + } catch (err) { + console.error("Error fetching pending orders:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + +// exports.getWaitingManagerPendingOrdersByInstallationId = async (req, reply) => { +// try { +// const { installationId } = req.params; + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } + +// // Step 1: Fetch orders matching installationId +// const orders = await Order.find({ installationId }); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation", +// data: [], +// }); +// } + +// // Step 2: Filter orders to keep only those having at least one master_connection with work_status === 'pending' +// const ordersWithPendingMasters = []; + +// for (const order of orders) { +// const pendingMasters = (order.master_connections || []).filter(mc => mc.work_status === 'waiting'); + +// if (pendingMasters.length) { +// // Fetch customer details +// const customer = await User.findOne({ customerId: order.customerId }).lean(); + +// // Fetch allocated sensors (status blocked) +// const allocatedSensors = await Insensors.find({ +// storeId: order.storeId, +// customerId: order.customerId, +// status: "blocked", +// }).lean(); + +// // Build response object +// ordersWithPendingMasters.push({ +// ...order.toObject(), +// master_connections: pendingMasters, // keep only pending masters +// customer: customer || null, +// allocated_sensors: allocatedSensors, +// }); +// } +// } + +// if (!ordersWithPendingMasters.length) { +// return reply.send({ +// status_code: 200, +// message: "No pending master connections found for this installation", +// data: [], +// }); +// } + +// return reply.send({ +// status_code: 200, +// message: "Pending orders fetched successfully", +// data: ordersWithPendingMasters, +// }); + +// } catch (err) { +// console.error("Error fetching pending orders:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + +// exports.getCompleteOrdersByInstallationId = async (req, reply) => { +// try { +// const { installationId } = req.params; + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } + +// // Fetch orders with the matching installationId +// const orders = await Order.find({ installationId }); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation", +// data: [], +// }); +// } + +// const uniqueCustomersMap = new Map(); + +// // Build unique customerId-based map +// for (const order of orders) { +// if (!uniqueCustomersMap.has(order.customerId)) { +// uniqueCustomersMap.set(order.customerId, order); +// } +// } + +// // Only keep one order per customerId +// let uniqueOrders = Array.from(uniqueCustomersMap.values()); + +// // ✅ Filter: keep only those where work_status is "pending" +// uniqueOrders = uniqueOrders.filter(order => order.work_status === 'complete'); + +// // If nothing left after filtering, return empty +// if (!uniqueOrders.length) { +// return reply.send({ +// status_code: 200, +// message: "No pending orders found for this installation", +// data: [], +// }); +// } + +// // Enrich with customer and sensor info +// const ordersWithDetails = await Promise.all( +// uniqueOrders.map(async (order) => { +// const customer = await User.findOne({ customerId: order.customerId }).lean(); + +// const allocatedSensors = await Insensors.find({ +// storeId: order.storeId, +// customerId: order.customerId, +// status: "blocked", +// }).lean(); + +// return { +// ...order.toObject(), +// customer: customer || null, +// allocated_sensors: allocatedSensors, +// }; +// }) +// ); + +// return reply.send({ +// status_code: 200, +// message: "Complete orders fetched successfully", +// data: ordersWithDetails, +// }); + +// } catch (err) { +// console.error("Error fetching orders:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + +// exports.getOrdersByInstallationId = async (req, reply) => { +// try { +// const { installationId } = req.params; + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } + + + +// // Build query — do NOT filter by work_status yet +// const query = { installationId }; + +// const orders = await Order.find(query); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation and customer", +// data: [], +// }); +// } + +// const ordersWithDetails = []; + +// for (const order of orders) { +// // Ensure work_status is set +// if (!order.work_status || order.work_status.trim() === "") { +// order.work_status = "active"; +// await order.save(); +// } + +// // ✅ Only push if work_status is "active" +// if (order.work_status === "active") { +// const customer = await User.findOne({ customerId: order.customerId }).lean(); +// const allocatedSensors = await Insensors.find({ +// storeId: order.storeId, +// customerId: order.customerId, +// status: "blocked", +// }).lean(); + +// ordersWithDetails.push({ +// ...order.toObject(), +// customer: customer || null, +// allocated_sensors: allocatedSensors, +// }); +// } +// } + +// return reply.send({ +// status_code: 200, +// message: "Orders fetched successfully", +// data: ordersWithDetails, +// }); + +// } catch (err) { +// console.error("Error fetching orders:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + + + + +// exports.updateWorkStatusByInstallationId = async (req, reply) => { +// try { +// const { installationId, customerId } = req.params; +// const { work_status } = req.body; // 🟢 pass in body + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } + +// if (!customerId) { +// return reply.status(400).send({ error: "customerId is required" }); +// } + +// if (!work_status || !['active', 'pending', 'complete'].includes(work_status)) { +// return reply.status(400).send({ error: "Valid work_status is required: active, pending or complete" }); +// } + +// // Find all orders for this installation + customer +// const orders = await Order.find({ installationId, customerId }); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation and customer", +// data: [], +// }); +// } + +// // Update all found orders to new work_status +// for (const order of orders) { +// order.work_status = work_status; +// await order.save(); +// } + +// // After update, fetch all with updated work_status only +// const updatedOrders = await Order.find({ installationId, customerId, work_status }); + +// const ordersWithDetails = await Promise.all( +// updatedOrders.map(async (order) => { +// const customer = await User.findOne({ customerId: order.customerId }).lean(); +// const allocatedSensors = await Insensors.find({ +// storeId: order.storeId, +// customerId: order.customerId, +// status: "blocked", +// }).lean(); + +// return { +// ...order.toObject(), +// customer: customer || null, +// allocated_sensors: allocatedSensors, +// }; +// }) +// ); + +// return reply.send({ +// status_code: 200, +// message: `Orders updated to work_status '${work_status}' successfully`, +// data: ordersWithDetails, +// }); + +// } catch (err) { +// console.error("Error updating work_status:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + +exports.getWaitingManagerPendingOrdersByInstallationId = async (req, reply) => { + try { + const { installationId } = req.params; + + if (!installationId) { + return reply.status(400).send({ error: "installationId is required" }); + } + + // Step 1: Fetch orders matching installationId + const orders = await Order.find({ installationId }); + + if (!orders.length) { + return reply.send({ + status_code: 200, + message: "No orders found for this installation", + data: [], + }); + } + + const ordersWithPendingMasters = []; + + for (const order of orders) { + // Filter master_connections with work_status === 'waiting' + const pendingMasters = (order.master_connections || []).filter(mc => mc.work_status === 'waiting'); + + if (pendingMasters.length) { + // Fetch customer details + const customer = await User.findOne({ customerId: order.customerId }).lean(); + + // Fetch allocated sensors (status blocked) + const allocatedSensors = await Insensors.find({ + storeId: order.storeId, + customerId: order.customerId, + status: "blocked", + }).lean(); + + // Take work_status from the first pending master + const work_status = pendingMasters[0].work_status; + + // Build the response object + ordersWithPendingMasters.push({ + ...order.toObject(), + master_connections: pendingMasters, // keep only pending masters + work_status, // add at the top level + customer: customer || null, + allocated_sensors: allocatedSensors, + }); + } + } + + if (!ordersWithPendingMasters.length) { + return reply.send({ + status_code: 200, + message: "No pending master connections found for this installation", + data: [], + }); + } + + return reply.send({ + status_code: 200, + message: "Pending orders fetched successfully", + data: ordersWithPendingMasters, + }); + + } catch (err) { + console.error("Error fetching pending orders:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + +// exports.getCompleteManagerPendingOrdersByInstallationId = async (req, reply) => { +// try { +// const { installationId } = req.params; + +// if (!installationId) { +// return reply.status(400).send({ error: "installationId is required" }); +// } + +// // Step 1: Fetch orders matching installationId +// const orders = await Order.find({ installationId }); + +// if (!orders.length) { +// return reply.send({ +// status_code: 200, +// message: "No orders found for this installation", +// data: [], +// }); +// } + +// // Step 2: Filter orders to keep only those having at least one master_connection with work_status === 'pending' +// const ordersWithPendingMasters = []; + +// for (const order of orders) { +// const pendingMasters = (order.master_connections || []).filter(mc => mc.work_status === 'complete'); + +// if (pendingMasters.length) { +// // Fetch customer details +// const customer = await User.findOne({ customerId: order.customerId }).lean(); + +// // Fetch allocated sensors (status blocked) +// const allocatedSensors = await Insensors.find({ +// storeId: order.storeId, +// customerId: order.customerId, +// status: "blocked", +// }).lean(); + +// // Build response object +// ordersWithPendingMasters.push({ +// ...order.toObject(), +// master_connections: pendingMasters, // keep only pending masters +// customer: customer || null, +// allocated_sensors: allocatedSensors, +// }); +// } +// } + +// if (!ordersWithPendingMasters.length) { +// return reply.send({ +// status_code: 200, +// message: "No pending master connections found for this installation", +// data: [], +// }); +// } + +// return reply.send({ +// status_code: 200, +// message: "Complete orders fetched successfully", +// data: ordersWithPendingMasters, +// }); + +// } catch (err) { +// console.error("Error fetching pending orders:", err); +// return reply.status(500).send({ error: "Internal server error" }); +// } +// }; + +exports.getCompleteManagerPendingOrdersByInstallationId = async (req, reply) => { + try { + const { installationId } = req.params; + + if (!installationId) { + return reply.status(400).send({ error: "installationId is required" }); + } + + // Step 1: Fetch orders matching installationId + const orders = await Order.find({ installationId }); + + if (!orders.length) { + return reply.send({ + status_code: 200, + message: "No orders found for this installation", + data: [], + }); + } + + // Step 2: Filter orders to keep only those having at least one master_connection with work_status === 'complete' + const ordersWithPendingMasters = []; + + for (const order of orders) { + const completeMasters = (order.master_connections || []).filter(mc => mc.work_status === 'complete'); + + if (completeMasters.length) { + // Fetch customer details + const customer = await User.findOne({ customerId: order.customerId }).lean(); + + // Fetch allocated sensors (status blocked) + const allocatedSensors = await Insensors.find({ + storeId: order.storeId, + customerId: order.customerId, + status: "blocked", + }).lean(); + + // Add work_status at top level (from the first complete master) + const work_status = completeMasters[0]?.work_status || null; + + // Build response object + ordersWithPendingMasters.push({ + ...order.toObject(), + master_connections: completeMasters, // keep only complete masters + work_status, // add top-level work_status + customer: customer || null, + allocated_sensors: allocatedSensors, + }); + } + } + + if (!ordersWithPendingMasters.length) { + return reply.send({ + status_code: 200, + message: "No complete master connections found for this installation", + data: [], + }); + } + + return reply.send({ + status_code: 200, + message: "Complete orders fetched successfully", + data: ordersWithPendingMasters, + }); + + } catch (err) { + console.error("Error fetching complete orders:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + +exports.updateWorkStatusByInstallationId = async (req, reply) => { + try { + const { installationId, customerId, teamMemberId } = req.params; + const { work_status, hardwareId } = req.body; + + if (!installationId) { + return reply.status(400).send({ error: "installationId is required" }); + } + if (!customerId) { + return reply.status(400).send({ error: "customerId is required" }); + } + if (!teamMemberId) { + return reply.status(400).send({ error: "teamMemberId is required" }); + } + if (!hardwareId) { + return reply.status(400).send({ error: "hardwareId is required" }); + } + if (!work_status || !['active', 'pending', 'complete'].includes(work_status)) { + return reply.status(400).send({ error: "Valid work_status is required: active, pending or complete" }); + } + + // ✅ Find orders that match installationId, customerId, assignedTeamMembers, and master_connections.hardwareId + const orders = await Order.find({ + installationId, + customerId, + assignedTeamMembers: teamMemberId, + 'master_connections.hardwareId': hardwareId + }); + + if (!orders.length) { + return reply.send({ + status_code: 200, + message: "No orders found for this installation, customer, team member, and hardwareId", + data: [], + }); + } + + // 🔧 Update work_status in master_connections inside each order + for (const order of orders) { + let modified = false; + if (Array.isArray(order.master_connections)) { + for (const mc of order.master_connections) { + if (mc.hardwareId === hardwareId) { + mc.work_status = work_status; + modified = true; + } + } + } + if (modified) { + await order.save(); + } + } + + // ✅ Fetch updated orders to return + const updatedOrders = await Order.find({ + installationId, + customerId, + assignedTeamMembers: teamMemberId, + 'master_connections.hardwareId': hardwareId + }).lean(); + + // 🔹 Enrich each order with customer and allocated sensors + const ordersWithDetails = await Promise.all( + updatedOrders.map(async (order) => { + const customer = await User.findOne({ customerId: order.customerId }).lean(); + const allocatedSensors = await Insensors.find({ + storeId: order.storeId, + customerId: order.customerId, + status: "blocked", + }).lean(); + + return { + ...order, + customer: customer || null, + allocated_sensors: allocatedSensors, + }; + }) + ); + + return reply.send({ + status_code: 200, + message: `Orders updated: master_connections with hardwareId '${hardwareId}' now has work_status '${work_status}'`, + data: ordersWithDetails, + }); + + } catch (err) { + console.error("Error updating work_status:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + + + + + + + +exports.getallocatedsensorstouser= async (req, reply) => { + try { + const { customerId } = req.params; + + if (!customerId) { + return reply.status(400).send({ error: "customerId is required" }); + } + + // Fetch orders with the matching storeId + const allocated_iots = await Insensors.find({ customerId }); + + if (!allocated_iots.length) { + return reply.send({ + status_code: 200, + message: "No sensors found for this store", + data: [], + }); + } + + + + return reply.send({ + status_code: 200, + message: "iots fetched successfully", + data: allocated_iots, + }); + } catch (err) { + console.error("Error fetching iots:", err); + return reply.status(500).send({ error: "Internal server error" }); + } +}; + + + +const crypto = require("crypto"); + + + +exports.replaceAndRepair = async (req, reply) => { + try { + const { customerId } = req.params; + const { items } = req.body; + + if (!customerId || !Array.isArray(items) || items.length === 0) { + return reply.code(400).send({ error: "customerId and items[] are required" }); + } + + const replacements = []; + + for (const item of items) { + const { type, hardwareId } = item; + console.log(`🔍 Processing ${type} with hardwareId: ${hardwareId}`); + + const existing = await Insensors.findOne({ hardwareId, type, customerId }); + + if (!existing) { + console.warn(`⚠️ No existing ${type} found for hardwareId: ${hardwareId}`); + continue; + } + + // Mark existing as repair + await Insensors.updateOne( + { _id: existing._id }, + { $set: { status: "repair", outforrepairdate: new Date().toISOString() } } + ); + console.log(`🛠 Marked old ${type} ${hardwareId} as repair`); + + // Find replacement + const replacement = await Insensors.findOne({ + type, + status: "available", + storeId: existing.storeId + }); + + if (!replacement) { + console.warn(`⚠️ No available replacement found for ${type} at store ${existing.storeId}`); + continue; + } + + console.log(`✅ Found replacement ${replacement.hardwareId} for ${type}`); + + const updateData = { + status: "blocked", + customerId: existing.customerId, + tankName: existing.tankName, + tankLocation: existing.tankLocation, + storeId: existing.storeId, + model: existing.model, + tankhardwareId: existing.tankhardwareId, + masterId: existing.masterId, + masterName: existing.masterName, + location: existing.location + }; + + await Insensors.updateOne({ _id: replacement._id }, { $set: updateData }); + console.log(`✅ Updated replacement ${type} ${replacement.hardwareId} with previous config`); + + // === Cascade updates === + if (type === "master") { + const connectedSlaves = await Insensors.find({ + type: "slave", + connected_to: hardwareId, + customerId + }); + + for (const slave of connectedSlaves) { + await Insensors.updateOne( + { _id: slave._id }, + { $set: { connected_to: replacement.hardwareId } } + ); + console.log(`🔁 Updated slave ${slave.hardwareId} → connected_to: ${replacement.hardwareId}`); + } + } + + if (type === "slave") { + await Insensors.updateOne( + { _id: replacement._id }, + { $set: { connected_to: existing.connected_to } } + ); + console.log(`🔁 New slave ${replacement.hardwareId} → connected_to: ${existing.connected_to}`); + + const connectedSensors = await Insensors.find({ + type: "sensor", + connected_to: hardwareId, + customerId + }); + + for (const sensor of connectedSensors) { + await Insensors.updateOne( + { _id: sensor._id }, + { $set: { connected_to: replacement.hardwareId } } + ); + console.log(`🔁 Updated sensor ${sensor.hardwareId} → connected_to: ${replacement.hardwareId}`); + } + } + + if (type === "sensor") { + await Insensors.updateOne( + { _id: replacement._id }, + { $set: { connected_to: existing.connected_to } } + ); + console.log(`🔁 Sensor ${replacement.hardwareId} connected to same slave: ${existing.connected_to}`); + } + + // Log replacement + replacements.push({ + type, + oldHardwareId: hardwareId, + newHardwareId: replacement.hardwareId + }); + + console.log(`📦 Logged replacement: ${type} ${hardwareId} ➜ ${replacement.hardwareId}`); + } + + console.log("🧾 Final replacements:", replacements); + + // Create repair log + const packageId = "PKG-" + crypto.randomBytes(4).toString("hex").toUpperCase(); + const otp = Math.floor(100000 + Math.random() * 900000).toString(); + + const repairLog = new Repairorder({ + customerId, + packageId, + otp, + replacements, + createdAt: new Date() + }); + + await repairLog.save(); + console.log("✅ RepairLog saved"); + + return reply.send({ + status_code: 200, + message: "Repaired and replaced successfully", + data: { + packageId, + otp, + replacements, + createdAt: repairLog.createdAt + } + }); + } catch (err) { + console.error("❌ Error during replacement:", err); + return reply.code(500).send({ error: "Internal Server Error" }); + } +}; + + + + diff --git a/src/controllers/supplierController.js b/src/controllers/supplierController.js index 884fbf67..5cec9c0a 100644 --- a/src/controllers/supplierController.js +++ b/src/controllers/supplierController.js @@ -4,15 +4,20 @@ const fastify = require("fastify")({ const boom = require("boom"); const customJwtAuth = require("../customAuthJwt"); -const bcrypt = require("bcrypt"); +//const bcrypt = require("bcrypt"); +const bcrypt = require('bcryptjs'); + const saltRounds = 10; //Get the data models -const { Supplier ,ProfilePicture, generateSupplierId, DeliveryBoy} = require('../models/supplier'); +const { RecurringRequestedBooking,RequestedBooking,Supplier , generateSupplierId, DeliveryBoy} = require('../models/supplier'); +const { Tankerbooking} = require("../models/tankers") +// Get Data Models +const { User,Counter, generateBookingId,resetCounter,generateCustomerId,ProfilePicture, AddTeamMembers,Cart} = require('../models/User') async function bcryptPassword(password) { encryptedPwd = bcrypt.hash(password, saltRounds); return encryptedPwd; @@ -92,6 +97,9 @@ exports.loginDeliveryBoy = async (req) => { password: req.body.password, phone: req.body.phone, description: req.body.description, + bussinessname: req.body.description, + registration_number: req.body.description, + years_in_business: req.body.description, profile: { firstName: req.body.firstName, lastName: req.body.lastName, @@ -244,4 +252,611 @@ exports.loginDeliveryBoy = async (req) => { throw boom.boomify(err); } }; - \ No newline at end of file + + + + +const mongoose = require('mongoose'); + +exports.respondToRequestedBooking = async (req, reply) => { + const { _id } = req.params; + const { action, supplierId } = req.body; + + if (!mongoose.Types.ObjectId.isValid(_id)) { + return reply.code(400).send({ message: "Invalid requested booking ID" }); + } + + if (!["accept", "reject"].includes(action)) { + return reply.code(400).send({ message: "Action must be 'accept' or 'reject'" }); + } + + try { + const booking = await RequestedBooking.findById(_id); + + if (!booking) { + return reply.code(404).send({ message: "Requested booking not found" }); + } + + const supplierEntry = booking.requested_suppliers.find(s => s.supplierId === supplierId); + + if (!supplierEntry) { + return reply.code(404).send({ message: "Supplier not found in this booking" }); + } + + // Update custom_field (status) for that supplier + supplierEntry.status = action === "accept" ? "accepted_by_supplier" : "rejected_by_supplier"; + + await booking.save(); + + return reply.code(200).send({ + status_code: 200, + message: `Booking ${action}ed by supplier successfully`, + data: booking + }); + + } catch (err) { + console.error(err); + throw boom.internal("Failed to update supplier response", err); + } +}; + + +// controllers/supplier.controller.js +// const boom = require("@hapi/boom"); +// const mongoose = require("mongoose"); + +// // MODELS (adjust paths/names to your project) +// const RecurringRequestedBooking = require("../models/recurringRequestedBooking.model"); +// const TankerBooking = require("../models/tankerBooking.model"); + +// // Common party models you likely have in your DB: +// const Customer = require("../models/customer.model"); // e.g., { customerId, name, phone, address, latitude, longitude } +// const Supplier = require("../models/supplier.model"); // e.g., { supplierId, name, phone, tankerName, address, latitude, longitude } + +// const parseNumber = (v, def = 0) => { +// if (v === null || v === undefined) return def; +// const n = parseFloat(String(v).replace(/,/g, "")); +// return Number.isFinite(n) ? n : def; +// }; +// const mkBookingId = (prefix = "RBK") => { +// const ts = new Date().toISOString().replace(/[-:TZ.]/g, "").slice(0, 14); +// const rnd = Math.floor(Math.random() * 1e6).toString().padStart(6, "0"); +// return `${prefix}-${ts}-${rnd}`; +// }; +// const isIsoYMD = (s) => /^\d{4}-\d{2}-\d{2}$/.test(s); + +// // "2025-10-21" -> "21-Oct-2025" (to match your old saved sample) +// const formatDDMonYYYY = (isoYmd) => { +// if (!isIsoYMD(isoYmd)) return isoYmd; +// const [y, m, d] = isoYmd.split("-").map(Number); +// const mon = ["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"][(m || 1) - 1]; +// return `${String(d).padStart(2,"0")}-${mon}-${y}`; +// }; + +// exports.respondToRecurringRequestedBooking = async (req, reply) => { +// const { _id } = req.params; +// const { action, supplierId } = req.body; + +// if (!mongoose.Types.ObjectId.isValid(_id)) { +// return reply.code(400).send({ message: "Invalid recurring requested booking ID" }); +// } +// if (!["accept", "reject"].includes(action)) { +// return reply.code(400).send({ message: "Action must be 'accept' or 'reject'" }); +// } + +// try { +// const booking = await RecurringRequestedBooking.findById(_id); +// if (!booking) { +// return reply.code(404).send({ message: "Recurring requested booking not found" }); +// } + +// const supplierEntry = booking.requested_suppliers.find((s) => s.supplierId === supplierId); +// if (!supplierEntry) { +// return reply.code(404).send({ message: "Supplier not found in this booking" }); +// } + +// // Update supplier response on the recurring request +// supplierEntry.status = action === "accept" ? "accepted" : "rejected"; +// await booking.save(); + +// if (action === "reject") { +// return reply.code(200).send({ +// status_code: 200, +// message: "Recurring booking rejected by supplier successfully", +// data: booking, +// }); +// } + +// // ACCEPT: build per-date TankerBooking docs with rich fields +// const allDates = Array.isArray(booking.dates) ? booking.dates.filter(isIsoYMD) : []; +// if (!allDates.length) { +// return reply.code(400).send({ message: "No valid ISO dates found in booking.dates" }); +// } + +// // Preload related party info to fill address/phones/names +// const [customerDoc, supplierDoc] = await Promise.all([ +// User.findOne({ customerId: booking.customerId }).lean(), +// Supplier.findOne({ supplierId: supplierId }).lean(), +// ]); + +// // Pull commonly needed values (tolerant defaults to match legacy) +// const customerPhone = customerDoc?.phone ?? null; +// const supplierPhone = supplierDoc?.phone ?? null; + +// const customerName = customerDoc?.username ?? customerDoc?.displayName ?? ""; +// const supplierName = supplierDoc?.suppliername ?? supplierDoc?.companyName ?? ""; + +// const tankerName = supplierDoc?.tankerName ?? null; +// const tankName = null; // if you associate a tank per-customer, populate from your Tank model here +// const tankLocation = null; // same as above + +// // prefer customer address (your legacy sample stores a geocoded customer address) +// const address = customerDoc?.profile.address1 ?? null; +// const latitude = customerDoc?.latitude ?? null; +// const longitude = customerDoc?.longitude ?? null; + +// // price: from supplier’s quoted_amount in this request (fallback null) +// const price = (supplierEntry?.quoted_amount ?? null) !== null +// ? String(supplierEntry.quoted_amount) +// : null; + +// // numeric fields +// const numericCapacity = parseNumber(booking.capacity); +// const numericQuantity = parseNumber(booking.quantity); +// const totalRequired = Number.isFinite(booking.total_required_capacity) +// ? booking.total_required_capacity +// : numericCapacity * numericQuantity; + +// // dedupe check +// const existing = await Tankerbooking.find( +// { +// customerId: booking.customerId, +// supplierId: supplierId, +// date: { $in: allDates }, +// time: booking.time, // keep your stored time format intact +// }, +// { date: 1 } +// ).lean(); + +// const existingSet = new Set((existing || []).map(e => e.date)); +// const newDates = allDates.filter(d => !existingSet.has(d)); +// if (!newDates.length) { +// return reply.code(200).send({ +// status_code: 200, +// message: "All dates already have bookings; nothing to create.", +// data: { created: 0, skippedExistingDates: allDates }, +// }); +// } + +// // ---------- BUILD ENRICHED DOCS (matches your legacy example fields) ---------- +// const todayIso = new Date().toISOString().slice(0, 10); + +// const docs = newDates.map((d) => ({ +// // Required/IDs +// customerId: booking.customerId, +// supplierId: supplierId, +// bookingid: mkBookingId("RBK"), + +// // Legacy display & logistics +// tankName, // null (fill if you link tank per-customer) +// tankLocation, // null (fill if available) +// tankerName, // from Supplier if present + +// // Dates/times (kept both ISO & legacy formats as you showed) +// dateOfOrder: todayIso, // "2025-09-10" +// expectedDateOfDelivery: formatDDMonYYYY(d), // "21-Oct-2025" style (legacy sample) +// date: d, // keep ISO in `date` too +// time: booking.time, // keep your request time as-is + +// // Water & capacity +// type_of_water: booking.type_of_water, +// typeofwater: booking.type_of_water, // legacy field name kept too +// capacity: booking.capacity, // e.g., "100" or "10,000 L" +// quantity: booking.quantity, // string +// total_required_capacity: totalRequired, + +// // Money / status +// price: price, // from quoted_amount (string) or null +// payment_status: "due", +// orderStatus: "accepted", + +// // Contacts & names +// address: address, // from customer +// customerPhone: customerPhone, +// supplierPhone: supplierPhone, +// customerName: customerName, +// supplierName: supplierName, + +// // Delivery defaults (match your legacy doc) +// delivery_agent: "null", +// delivery_agent_mobile: "null", +// delivery_agent_alternative_mobile: "null", + +// // Metering defaults +// initial_water_level: "null", +// final_water_level: "null", +// start_time: "null", +// stop_time: "null", +// quantityDelivered: null, + +// // Accounting defaults +// amount_paid: null, +// amount_due: null, +// distrubance_price: "none", +// amount_difference: "none", +// payment_mode: null, +// remarks: null, + +// // Device/geo defaults +// tankerRunningStatus: "0", +// latitude: latitude ?? undefined, // keep same field names as your legacy doc +// longitude: longitude ?? undefined, // if not available, omit field + +// // Misc you already store +// frequency: booking.frequency, +// weekly_count: booking.weekly_count ?? 1, +// deliveredDate: null, +// distrubance_status: "0", +// })); +// // --------------------------------------------------------------------------- + +// // Insert without transactions, tolerate duplicates if unique index exists +// let insertedCount = 0; +// let duplicateErrors = 0; +// try { +// const res = await Tankerbooking.collection.insertMany(docs, { ordered: false }); +// insertedCount = res.insertedCount || 0; +// } catch (e) { +// if (e && e.writeErrors && Array.isArray(e.writeErrors)) { +// insertedCount = e.result?.nInserted ?? 0; +// duplicateErrors = e.writeErrors.length; +// } else { +// throw e; +// } +// } + +// return reply.code(200).send({ +// status_code: 200, +// message: `Recurring booking accepted. Created ${insertedCount} tanker booking(s).`, +// data: { +// createdDates: newDates.slice(0, insertedCount), +// skippedExistingDates: allDates.filter(d => existingSet.has(d)), +// duplicateConflicts: duplicateErrors, +// }, +// }); +// } catch (err) { +// console.error(err); +// throw boom.internal("Failed to update recurring supplier response", err); +// } +// }; + + + + +// controllers/supplier.controller.js (only the changed parts shown for brevity) +// const boom = require("@hapi/boom"); +// const mongoose = require("mongoose"); + +// const RecurringRequestedBooking = require("../models/recurringRequestedBooking.model"); +// const TankerBooking = require("../models/tankerBooking.model"); +// const Customer = require("../models/customer.model"); +// const Supplier = require("../models/supplier.model"); + +// ---------- helpers (IST + formatting) ---------- +const parseNumber = (v, def = 0) => { + if (v === null || v === undefined) return def; + const n = parseFloat(String(v).replace(/,/g, "")); + return Number.isFinite(n) ? n : def; +}; +const mkBookingId = (prefix = "RBK") => { + const ts = new Date().toISOString().replace(/[-:TZ.]/g, "").slice(0, 14); + const rnd = Math.floor(Math.random() * 1e6).toString().padStart(6, "0"); + return `${prefix}-${ts}-${rnd}`; +}; +const isIsoYMD = (s) => /^\d{4}-\d{2}-\d{2}$/.test(s); +const MON = ["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]; + +// Return a Date adjusted to IST (UTC+5:30) without changing the original instant +const toIST = (d = new Date()) => { + const utc = d.getTime() + (d.getTimezoneOffset() * 60000); + // IST = UTC + 5:30 + return new Date(utc + (5 * 60 + 30) * 60000); +}; + +// Format Date -> "DD-Mon-YYYY - HH:MM" in IST +const fmtDDMonYYYY_HHMM = (dateObj) => { + const dd = String(dateObj.getDate()).padStart(2, "0"); + const mon = MON[dateObj.getMonth()]; + const yyyy = dateObj.getFullYear(); + const hh = String(dateObj.getHours()).padStart(2, "0"); + const mm = String(dateObj.getMinutes()).padStart(2, "0"); + return `${dd}-${mon}-${yyyy} - ${hh}:${mm}`; +}; + +// Extract "HH:MM" (24h) from arbitrary string like "21-Nov-2025 - 14:37" or "4:00 PM to 6:00 PM" +// If not found, default "00:00" +const extractHHMM = (timeStr) => { + if (!timeStr) return "00:00"; + // First try 24h "14:37" + const m24 = timeStr.match(/\b(\d{1,2}):(\d{2})\b/); + if (m24) { + const h = String(Math.min(23, parseInt(m24[1],10))).padStart(2,"0"); + const m = m24[2]; + return `${h}:${m}`; + } + // crude parse for "4:00 PM" → 16:00 + const ampm = timeStr.match(/(\d{1,2}):(\d{2})\s*(AM|PM)/i); + if (ampm) { + let h = parseInt(ampm[1],10); + const m = ampm[2]; + const p = ampm[3].toUpperCase(); + if (p === "PM" && h !== 12) h += 12; + if (p === "AM" && h === 12) h = 0; + return `${String(h).padStart(2,"0")}:${m}`; + } + return "00:00"; +}; + +// Combine ISO date "YYYY-MM-DD" + a time string → IST "DD-Mon-YYYY - HH:MM" +const fmtFromISOAndTime = (isoYmd, timeStr) => { + if (!isIsoYMD(isoYmd)) return isoYmd; // fallback + const [y, m, d] = isoYmd.split("-").map(Number); + const [hh, mm] = extractHHMM(timeStr).split(":").map(Number); + // Construct a Date in UTC for that local time, then convert to IST display + const dt = new Date(Date.UTC(y, (m - 1), d, hh, mm, 0)); + // We only care about display in IST: + return fmtDDMonYYYY_HHMM(toIST(dt)); +}; +// ---- +// -------------------------------------------- + +// ---------- add these helpers near your other helpers ---------- +const to12h = (h24, m) => { + let h = h24 % 12 || 12; + const ampm = h24 >= 12 ? "PM" : "AM"; + return `${h}:${String(m).padStart(2,"0")} ${ampm}`; +}; + +// Extract first time from any string: supports "14:37", "4:00 PM", "21-Nov-2025 - 14:37", "4:00 PM to 6:00 PM" +const parseFirstTimeToHM = (timeStr) => { + if (!timeStr) return { h: 0, m: 0 }; + // Try explicit 12h with AM/PM + const ampm = timeStr.match(/(\d{1,2}):(\d{2})\s*(AM|PM)/i); + if (ampm) { + let h = parseInt(ampm[1], 10); + const m = parseInt(ampm[2], 10); + const p = ampm[3].toUpperCase(); + if (p === "PM" && h !== 12) h += 12; + if (p === "AM" && h === 12) h = 0; + return { h, m }; + } + // Try any 24h HH:MM in the string + const m24 = timeStr.match(/\b(\d{1,2}):(\d{2})\b/); + if (m24) { + const h = Math.max(0, Math.min(23, parseInt(m24[1], 10))); + const m = Math.max(0, Math.min(59, parseInt(m24[2], 10))); + return { h, m }; + } + return { h: 0, m: 0 }; +}; + +// If already "X to Y" (case-insensitive), keep it. Otherwise, make a slot of `windowHours` starting at first parsed time. +const normalizeTimeForLegacy = (timeStr, windowHours = 2) => { + if (typeof timeStr === "string" && / to /i.test(timeStr)) { + return timeStr; // already in "4:00 PM to 6:00 PM" + } + const { h, m } = parseFirstTimeToHM(timeStr); + const start = to12h(h, m); + const endDate = new Date(Date.UTC(2000, 0, 1, h, m)); // dummy date, add hours + endDate.setUTCHours(endDate.getUTCHours() + windowHours); + const end = to12h(endDate.getUTCHours(), endDate.getUTCMinutes()); + return `${start} to ${end}`; +}; + +// ---- Booking ID helpers (IST-based) ---- +// Get YYYYMMDD in IST (UTC+05:30) +const getISTDatePart = (d = new Date()) => { + const utcMs = d.getTime() + d.getTimezoneOffset() * 60000; + const ist = new Date(utcMs + (5 * 60 + 30) * 60000); + const y = ist.getFullYear(); + const m = String(ist.getMonth() + 1).padStart(2, "0"); + const day = String(ist.getDate()).padStart(2, "0"); + return `${y}${m}${day}`; // YYYYMMDD +}; + +// Make ARM id with one random digit (0-9) +const mkArmBookingIdIST = () => `ARM${getISTDatePart()}${Math.floor(Math.random() * 10)}`; + +// Ensure uniqueness in DB (recommended since you may create many in one day) +const generateUniqueArmId = async () => { + // up to 20 tries with single digit; then fall back to two digits + for (let i = 0; i < 20; i++) { + const id = mkArmBookingIdIST(); + const exists = await Tankerbooking.exists({ bookingid: id }); + if (!exists) return id; + } + // fallback with two random digits to avoid collisions + const fallback = `ARM${getISTDatePart()}${Math.floor(Math.random()*10)}${Math.floor(Math.random()*10)}`; + return fallback; +}; + +exports.respondToRecurringRequestedBooking = async (req, reply) => { + const { _id } = req.params; + const { action, supplierId } = req.body; + if (!mongoose.Types.ObjectId.isValid(_id)) { + return reply.code(400).send({ message: "Invalid recurring requested booking ID" }); + } + if (!["accept", "reject"].includes(action)) { + return reply.code(400).send({ message: "Action must be 'accept' or 'reject'" }); + } + + try { + const booking = await RecurringRequestedBooking.findById(_id); + if (!booking) return reply.code(404).send({ message: "Recurring requested booking not found" }); + + const supplierEntry = booking.requested_suppliers.find(s => s.supplierId === supplierId); + if (!supplierEntry) { + return reply.code(404).send({ message: "Supplier not found in this booking" }); + } + + supplierEntry.status = action === "accept" ? "accepted" : "rejected"; + await booking.save(); + // 🔽 ADD THIS + const allAccepted = booking.requested_suppliers.every(s => s.status === "accepted"); + if (allAccepted) { + booking.status = "processed"; + await booking.save(); + } + + if (action === "reject") { + return reply.code(200).send({ + status_code: 200, + message: "Recurring booking rejected by supplier successfully", + data: booking, + }); + } + + const allDates = Array.isArray(booking.dates) ? booking.dates.filter(isIsoYMD) : []; + if (!allDates.length) { + return reply.code(400).send({ message: "No valid ISO dates found in booking.dates" }); + } + + const [customerDoc, supplierDoc] = await Promise.all([ + User.findOne({ customerId: booking.customerId }).lean(), + Supplier.findOne({ supplierId }).lean(), + ]); + + const customerPhone = customerDoc?.phone ?? null; + const supplierPhone = supplierDoc?.phone ?? null; + const customerName = customerDoc?.username ?? customerDoc?.displayName ?? ""; + const supplierName = supplierDoc?.suppliername ?? supplierDoc?.companyName ?? ""; + const tankerName = supplierDoc?.tankerName ?? null; + const address = customerDoc?.profile.address1 ?? null; + const latitude = customerDoc?.latitude ?? undefined; + const longitude = customerDoc?.longitude ?? undefined; + + const price = (supplierEntry?.quoted_amount ?? null) !== null + ? String(supplierEntry.quoted_amount) + : null; + + const numericCapacity = parseNumber(booking.capacity); + const numericQuantity = parseNumber(booking.quantity); + const totalRequired = Number.isFinite(booking.total_required_capacity) + ? booking.total_required_capacity + : numericCapacity * numericQuantity; + + // dedupe by (customerId, supplierId, date, time) + const existing = await Tankerbooking.find( + { customerId: booking.customerId, supplierId, date: { $in: allDates }, time: booking.time }, + { date: 1 } + ).lean(); + const existingSet = new Set((existing || []).map(e => e.date)); + const newDates = allDates.filter(d => !existingSet.has(d)); + if (!newDates.length) { + return reply.code(200).send({ + status_code: 200, + message: "All dates already have bookings; nothing to create.", + data: { created: 0, skippedExistingDates: allDates }, + }); + } + const legacyTime = normalizeTimeForLegacy(booking.time); + // --- FORMAT THESE THREE FIELDS EXACTLY AS LEGACY EXPECTS --- + const nowIST = toIST(new Date()); + const dateOfOrderFmt = fmtDDMonYYYY_HHMM(nowIST); // "DD-Mon-YYYY - HH:MM" + const today = new Date(); +const datePart = today.toISOString().slice(0, 10).replace(/-/g, ''); // YYYYMMDD +const randomDigit = Math.floor(Math.random() * 10); // 0–9 +const bookingId = `ARM${datePart}${randomDigit}`; + // ADD: pre-generate unique booking IDs (one per date) +const bookingIds = await Promise.all(newDates.map(() => generateUniqueArmId())); + + // ----------------------------------------------------------- + + const docs = newDates.map((d,i) => ({ + // IDs + customerId: booking.customerId, + supplierId, + bookingid: bookingIds[i], + + // Legacy fields (match your old document) + tankName: null, + tankLocation: null, + tankerName, + + dateOfOrder: dateOfOrderFmt, // e.g., "03-Sep-2025 - 13:25" + expectedDateOfDelivery: fmtFromISOAndTime(d, booking.time), // "DD-Mon-YYYY - HH:MM" + date: d, // keep ISO for backend logic + time: legacyTime, // keep whatever UI sent + + type_of_water: booking.type_of_water, + typeofwater: booking.type_of_water, + capacity: booking.capacity, + quantity: booking.quantity, + total_required_capacity: totalRequired, + + price, + payment_status: "due", + orderStatus: "accepted", + + address, + customerPhone, + supplierPhone, + customerName, + supplierName, + + delivery_agent: "null", + delivery_agent_mobile: "null", + delivery_agent_alternative_mobile: "null", + + initial_water_level: "null", + final_water_level: "null", + start_time: "null", + stop_time: "null", + quantityDelivered: null, + + amount_paid: null, + amount_due: null, + distrubance_price: "none", + amount_difference: "none", + payment_mode: null, + remarks: null, + + tankerRunningStatus: "0", + latitude, + longitude, + + frequency: booking.frequency, + weekly_count: booking.weekly_count ?? 1, + deliveredDate: null, // new bookings: not delivered yet + distrubance_status: "0", + })); + + // insert (no transactions, ordered:false) + let insertedCount = 0; + let duplicateErrors = 0; + try { + const res = await Tankerbooking.collection.insertMany(docs, { ordered: false }); + insertedCount = res.insertedCount || 0; + } catch (e) { + if (e && e.writeErrors && Array.isArray(e.writeErrors)) { + insertedCount = e.result?.nInserted ?? 0; + duplicateErrors = e.writeErrors.length; + } else { + throw e; + } + } + + return reply.code(200).send({ + status_code: 200, + message: `Recurring booking accepted. Created ${insertedCount} tanker booking(s).`, + data: { + createdDates: newDates.slice(0, insertedCount), + skippedExistingDates: allDates.filter(d => existingSet.has(d)), + duplicateConflicts: duplicateErrors, + }, + }); + } catch (err) { + console.error(err); + throw boom.internal("Failed to update recurring supplier response", err); + } +}; diff --git a/src/controllers/supplierOrderController.js b/src/controllers/supplierOrderController.js index b771017c..d342dc92 100644 --- a/src/controllers/supplierOrderController.js +++ b/src/controllers/supplierOrderController.js @@ -619,7 +619,7 @@ exports.getAllOrderaccepted = async (req, reply) => { const customerId = req.params.customerId; try { - const orders = await Tankerbooking.find({ customerId: customerId, orderStatus: "accepted" }) + const orders = await Tankerbooking.find({ customerId: customerId, orderStatus: "pending" }) .limit(limit) .skip(startindex) .exec(); @@ -666,9 +666,10 @@ exports.getAllOrderreject = async (req, reply) => { const limit = parseInt(req.query.limit) || 100; const page = parseInt(req.query.page) || 1; const startindex = (page - 1) * limit; + const customerId = req.params.customerId //const customerId = req.params.customerId try { - await Tankerbooking.find({ orderStatus: ["rejected"]}) + await Tankerbooking.find({ customerId: customerId,orderStatus: ["rejected"]}) .limit(limit) .skip(startindex) .exec() @@ -680,6 +681,46 @@ exports.getAllOrderreject = async (req, reply) => { } }; + +exports.getAllOrdersoutfordelivery = async (req, reply) => { + const limit = parseInt(req.query.limit) || 100; + const page = parseInt(req.query.page) || 1; + const startindex = (page - 1) * limit; + const customerId = req.params.customerId + //const customerId = req.params.customerId + try { + await Tankerbooking.find({ customerId: customerId,orderStatus: ["out_for_delivery"]}) + .limit(limit) + .skip(startindex) + .exec() + .then((docs) => { + reply.send({ status_code: 200, data: docs, count: docs.length }); + }) + } catch (err) { + reply.status(400).send({ message: err.message }); + } +}; + +exports.getAllOrdersdeliveryboyasigned = async (req, reply) => { + const limit = parseInt(req.query.limit) || 100; + const page = parseInt(req.query.page) || 1; + const startindex = (page - 1) * limit; + const customerId = req.params.customerId + //const customerId = req.params.customerId + try { + await Tankerbooking.find({ customerId: customerId,orderStatus: ["delivery_boy_assigned"]}) + .limit(limit) + .skip(startindex) + .exec() + .then((docs) => { + reply.send({ status_code: 200, data: docs, count: docs.length }); + }) + } catch (err) { + reply.status(400).send({ message: err.message }); + } +}; + + exports.getCustomerOrderreject = async (req, reply) => { const limit = parseInt(req.query.limit) || 100; const page = parseInt(req.query.page) || 1; @@ -782,3 +823,21 @@ exports.medicine = async (req, reply) => { } }; + + +exports.getOrdersByCustomerId = async (req, reply) => { + const { customerId } = req.query; + + if (!customerId) { + return reply.status(400).send({ message: "customerId is required" }); + } + + try { + const orders = await Tankerbooking.find({ customerId }).exec(); + reply.send({ status_code: 200, data: orders, count: orders.length }); + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; + + diff --git a/src/controllers/tanksController.js b/src/controllers/tanksController.js index 8aa0519a..de49f069 100644 --- a/src/controllers/tanksController.js +++ b/src/controllers/tanksController.js @@ -1,5 +1,5 @@ //const Tank = require("../models/tanks"); -const { Tank, MotorData, IotData,MotorIot,TankWaterLevel,TankConsumptionSchema,TankConsumptionOriginalSchema } = require('../models/tanks') +const { Tank, MotorData, IotData,MotorIot,TankWaterLevel,TankConsumptionSchema,TankConsumptionOriginalSchema,CustomerAutoPercentages } = require('../models/tanks') const {User} = require("../models/User"); const boom = require("boom"); @@ -11,6 +11,8 @@ const cron = require('node-cron'); const moment = require('moment'); const EventEmitter = require('events'); +EventEmitter.defaultMaxListeners = 50; // Increase listener limit + const eventEmitter = new EventEmitter(); async function deleteOldRecords() { const SEVEN_DAYS_IN_MILLISECONDS = 7 * 24 * 60 * 60 * 1000; @@ -160,23 +162,53 @@ exports.updateTanksInfo = async (req, reply) => { } }; -//delete selected tank + + + + exports.deleteTanksInfo = async (req, reply) => { try { - const customerId = req.params.customerId; - - const tankName = req.query.tankName; + const { customerId } = req.params; + const { tankName } = req.query; const tankLocation = req.body.tankLocation.toLowerCase(); - const tank = await Tank.findOneAndDelete({ tankName: tankName,customerId:customerId,tankLocation:tankLocation }); + if (!tankName || !tankLocation) { + return reply.code(400).send({ message: "Tank name and location are required" }); + } - reply.send({ status_code: 200, data: tank}); - // return tank; - } catch (err) { - throw boom.boomify(err); + // Convert tankLocation to lowercase (for case-insensitive match) + const normalizedTankLocation = tankLocation.toLowerCase(); + + // Find and delete the main tank + const deletedTank = await Tank.findOneAndDelete({ + customerId, + tankName, + tankLocation: normalizedTankLocation + }); + + if (!deletedTank) { + return reply.code(404).send({ message: "Tank not found" }); + } + + // Remove the deleted tank from inputConnections and outputConnections in all other tanks + await Tank.updateMany( + { customerId }, + { + $pull: { + "connections.inputConnections": { inputConnections: tankName }, + "connections.outputConnections": { outputConnections: tankName } + } + } + ); + + return reply.send({ message: "Tank deleted successfully" }); + } catch (error) { + console.error("Error deleting tank:", error); + return reply.code(500).send({ message: "Internal Server Error" }); } }; + exports.getConnectionsInfoOfParticularTank = async (req, reply) => { try { const customerId = req.params.customerId; @@ -208,55 +240,95 @@ exports.getConnectionsInfoOfParticularTank = async (req, reply) => { //get tanks data by passing username exports.getTank = async (req, reply) => { try { - await Tank.find({ customerId: req.query.customerId }) - .exec() - .then((docs) => { - let totalSwitchCount = 0; - let totalSensorCount = 0; + const customerId = req.query.customerId; + if (!customerId) { + return reply.send({ status_code: 400, error: "Missing customerId" }); + } - const transformedDocs = docs.map((tank) => { - const inputConnections = tank.connections?.inputConnections || []; - const switchCount = inputConnections.reduce((count, connection) => { - return count + (connection.inputismotor === true ? 1 : 0); - }, 0); + // Use .lean() to get plain JavaScript objects, which are easier to modify + const tanks = await Tank.find({ customerId }).lean().exec(); - totalSwitchCount += switchCount; // Accumulate the switch_count + let totalSwitchCount = 0; + let totalSensorCount = 0; + let allSwitchConnectionsDetailed = []; // Array to hold the final consolidated list - // Check if the tank has need_sensor set to "yes" - if (tank.need_sensor?.toLowerCase() === "yes") { - totalSensorCount++; - } + const transformedDocs = tanks.map((tank) => { + const inputConnections = tank.connections?.inputConnections || []; + const currentToTank = tank.tankName || null; // Destination Tank Name + const currentToLocation = tank.tankLocation || null; // Destination Tank Location - // Add the switch_count field inside connections - return { - ...tank.toObject(), // Convert Mongoose document to plain object - connections: { - ...tank.connections, - switch_count: switchCount, - }, - }; - }); + // Filter only motor switch connections (raw data) + const switchConnectionsRaw = inputConnections.filter(conn => conn.inputismotor === true); + + // Count switches for this tank and add to total + const switchCount = switchConnectionsRaw.length; + totalSwitchCount += switchCount; + + // Count sensor requirement + if (String(tank.need_sensor).toLowerCase() === "yes") { + totalSensorCount++; + } + + // --- Create the detailed switch connection objects for the consolidated list --- + const switchConnectionsForThisTankDetailed = switchConnectionsRaw.map(conn => ({ + // Spread all original properties from the switch connection + from_tank: conn.inputConnections || null, // Get source name from the connection itself + from_location: conn.input_type || null, // Get source type/location from the connection itself + to_tank: currentToTank, // Add destination tank name (parent object) + to_location: currentToLocation // Add destination location (parent object) + })); + // Add the detailed connections from *this* tank to the overall list + allSwitchConnectionsDetailed.push(...switchConnectionsForThisTankDetailed); + // --- End detailed switch connection creation --- + + + // Check if any motor is running (motor_status === "2") - keep as is + const isAnyMotorRunning = inputConnections.some(conn => conn.motor_status === "2"); + + // Map original from_connections for display within the tank object (optional, but your original code did this) + const fromConnections = inputConnections.map(conn => ({ + from_tank: conn.inputConnections || null, + from_type: conn.input_type || null, + })); + + // Return the transformed tank document for the 'data' array + return { + ...tank, // Include original tank fields + connections: { // Keep the connections object structure + ...tank.connections, // Include original connection details (source, input/output arrays) + switch_count: switchCount, // Keep the count specific to this tank + // Decide if you want to keep the raw switch_connections here or remove it + // Keeping it might be useful for context within the specific tank object + switch_connections: switchConnectionsRaw, // Keep the raw list here + from_connections: fromConnections, // Keep this derived list here + }, + all_motor_status: isAnyMotorRunning, // Keep this status flag + // You might not need these 'to_tank'/'to_location' fields at the root of the tank object anymore + // as they are now part of the consolidated switch_connections list, but keeping them doesn't hurt. + to_tank: currentToTank, + to_location: currentToLocation, + }; + }); + + // Send the final response with the consolidated list + reply.send({ + status_code: 200, + data: transformedDocs, // Array of processed tank objects + count: transformedDocs.length, + total_switch_count: totalSwitchCount, + total_sensor_count: totalSensorCount, + switch_connections: allSwitchConnectionsDetailed // The new consolidated list + }); - reply.send({ - status_code: 200, - data: transformedDocs, - count: transformedDocs.length, - total_switch_count: totalSwitchCount, // Add the total switch count - total_sensor_count: totalSensorCount, // Add the total sensor count - }); - }) - .catch((err) => { - console.error(err); - reply.send({ status_code: 500, error: err.message }); - }); } catch (err) { - console.error(err); + console.error("getTank error:", err); reply.send({ status_code: 500, error: "Internal Server Error" }); } }; + exports.getTanksensorcount = async (req, reply) => { try { const { customerId } = req.params; @@ -344,24 +416,77 @@ exports.getTanksofParticularInstaller = async (req, reply) => { // throw boom.boomify(err); // } //}; +// const boom = require("@hapi/boom"); // Assuming you are using boom for error handling + + exports.getTankmotordata = async (req, reply) => { try { - await MotorData.find({customerId: req.query.customerId}) - .exec() - .then((docs) => { - reply.send({ status_code: 200, data: docs, count: docs.length }); - }) - .catch((err) => { - console.log(err); - reply.send({ error: err }); + const { startDate, stopDate,pumps,users } = req.body; + const { customerId } = req.params; + + // Validate and format the input dates + if (!moment(startDate, "DD-MMM-YYYY - HH:mm", true).isValid() || !moment(stopDate, "DD-MMM-YYYY - HH:mm", true).isValid()) { + return reply.send({ status_code: 400, message: "Invalid date format" }); + } + + // Convert input dates to ISO 8601 format for Date comparison + const start = moment(startDate, "DD-MMM-YYYY - HH:mm").toDate(); + const end = moment(stopDate, "DD-MMM-YYYY - HH:mm").endOf('day').toDate(); + + + // Convert input dates to string format for string-based comparison + + + // Fetch the username based on customerId + const user = await User.findOne({ customerId }).select("username"); + + if (user) { + const userName = user.username || "N/A"; + + let query = { customerId }; + + if (pumps !== "All") { + query.motor_id = pumps; + } + + if (users !== "All") { + query.started_by = users; + } + + // Fetch motor data with applied filters + const motordatas = await MotorData.find(query); + + + + + const filtereddatas = motordatas.filter((record) => { + const recordTime = moment(record.startTime, "DD-MMM-YYYY - HH:mm").toDate(); + return recordTime >= start && recordTime <= end; + }); + + + reply.send({ + status_code: 200, + data: filtereddatas, + count: filtereddatas.length, + customerName: userName, }); + } else { + reply.send({ status_code: 404, message: "User not found" }); + } } catch (err) { + console.error("Error in getTankmotordata:", err); throw boom.boomify(err); } }; + + + + + exports.updateTanklevels = async (req, reply) => { try { const customerId = req.params.customerId; @@ -371,7 +496,7 @@ exports.updateTanklevels = async (req, reply) => { for (const tank of tanks) { const tankId = tank._id; - const tank_name = tank.tankName + const tank_name = tank.tankName let capacity = parseInt(tank.capacity.replace(/,/g, ''), 10); //let waterLevel = parseInt(tank.waterlevel.replace(/,/g, ''), 10); @@ -417,6 +542,11 @@ exports.getTanklevels = async (req, reply) => { try { const customerId = req.params.customerId; let sumSumpDrinkingWater = 0; + let totalavailableDrinkingwater = 0; + let totalDrinkingcapacity = 0; + let totalavailableBorewater = 0; + let totalBorewatercapacity = 0; + let sumOverheadDrinkingWater = 0; let sumSumpBoreWater = 0; let sumOverheadBoreWater = 0; @@ -425,8 +555,9 @@ exports.getTanklevels = async (req, reply) => { let sumSumpBoreWaterCapacity = 0; let sumOverheadBoreWaterCapacity = 0; - const updated_data = await Tank.find({ customerId: customerId }); - console.log("updated_data", updated_data); + // Fetch only active tanks + const updated_data = await Tank.find({ customerId: customerId, status: "active" }); + console.log("Active Tanks Data:", updated_data); updated_data.forEach((tank) => { const waterlevel = parseInt(tank.waterlevel ? tank.waterlevel.replace(/,/g, '') : '0', 10); @@ -434,52 +565,53 @@ exports.getTanklevels = async (req, reply) => { const waterlevelPercentage = ((waterlevel / capacity) * 100).toFixed(2); tank.waterlevelPercentage = waterlevelPercentage; // Add water level percentage to each tank object console.log(`Processing tank: ${tank.tankName}`); - console.log(`Type of Water: ${tank.typeOfWater}, Location: ${tank.tankLocation}, Waterlevel: ${waterlevel}, Capacity: ${capacity}, Waterlevel Percentage: ${waterlevelPercentage}%`); let totalInputPercentage = 0; let inputCount = 0; let totalOutputPercentage = 0; let outputCount = 0; - // Calculate and add water level percentages for inputConnections + // Process input connections if (tank.connections.inputConnections) { tank.connections.inputConnections.forEach(inputConnection => { - const inputWaterLevel = inputConnection.water_level ? parseInt(inputConnection.water_level.replace(/,/g, ''), 10) : 0; - const inputCapacity = inputConnection.capacity ? parseInt(inputConnection.capacity.replace(/,/g, ''), 10) : 0; - - if (inputCapacity > 0) { - inputConnection.waterlevelPercentage = ((inputWaterLevel / inputCapacity) * 100).toFixed(2); - totalInputPercentage += parseFloat(inputConnection.waterlevelPercentage); - inputCount++; - } else { - inputConnection.waterlevelPercentage = null; + if (inputConnection.status === "active") { // Process only active connections + const inputWaterLevel = inputConnection.water_level ? parseInt(inputConnection.water_level.replace(/,/g, ''), 10) : 0; + const inputCapacity = inputConnection.capacity ? parseInt(inputConnection.capacity.replace(/,/g, ''), 10) : 0; + + if (inputCapacity > 0) { + inputConnection.waterlevelPercentage = ((inputWaterLevel / inputCapacity) * 100).toFixed(2); + totalInputPercentage += parseFloat(inputConnection.waterlevelPercentage); + inputCount++; + } else { + inputConnection.waterlevelPercentage = null; + } } }); - // Add the average input water level percentage to the tank's connections object tank.connections.inputWaterlevelPercentage = inputCount > 0 ? (totalInputPercentage / inputCount).toFixed(2) : null; } - // Calculate and add water level percentages for outputConnections + // Process output connections if (tank.connections.outputConnections) { tank.connections.outputConnections.forEach(outputConnection => { - const outputWaterLevel = outputConnection.water_level ? parseInt(outputConnection.water_level.replace(/,/g, ''), 10) : 0; - const outputCapacity = outputConnection.capacity ? parseInt(outputConnection.capacity.replace(/,/g, ''), 10) : 0; - - if (outputCapacity > 0) { - outputConnection.waterlevelPercentage = ((outputWaterLevel / outputCapacity) * 100).toFixed(2); - totalOutputPercentage += parseFloat(outputConnection.waterlevelPercentage); - outputCount++; - } else { - outputConnection.waterlevelPercentage = null; + if (outputConnection.status === "active") { // Process only active connections + const outputWaterLevel = outputConnection.water_level ? parseInt(outputConnection.water_level.replace(/,/g, ''), 10) : 0; + const outputCapacity = outputConnection.capacity ? parseInt(outputConnection.capacity.replace(/,/g, ''), 10) : 0; + + if (outputCapacity > 0) { + outputConnection.waterlevelPercentage = ((outputWaterLevel / outputCapacity) * 100).toFixed(2); + totalOutputPercentage += parseFloat(outputConnection.waterlevelPercentage); + outputCount++; + } else { + outputConnection.waterlevelPercentage = null; + } } }); - // Add the average output water level percentage to the tank's connections object tank.connections.outputWaterlevelPercentage = outputCount > 0 ? (totalOutputPercentage / outputCount).toFixed(2) : null; } - // Summing up the total water levels and capacities + // Summing up the total water levels and capacities for active tanks only if (tank.tankLocation === 'sump' && tank.typeOfWater === 'drinking') { sumSumpDrinkingWater += waterlevel; sumSumpDrinkingWaterCapacity += capacity; @@ -493,6 +625,14 @@ exports.getTanklevels = async (req, reply) => { sumOverheadBoreWater += waterlevel; sumOverheadBoreWaterCapacity += capacity; } + else if ( tank.typeOfWater === 'drinking') { + totalavailableDrinkingwater += waterlevel; + totalDrinkingcapacity += capacity; + } + else if ( tank.typeOfWater === 'bore') { + totalavailableBorewater += waterlevel; + totalBorewatercapacity += capacity; + } }); const user = await User.findOne({ customerId: customerId }); @@ -541,7 +681,7 @@ let supplier_tanks = []; // const receiver_tank = req.body.to // const receiver_tank_info = await Tank.findOne({ customerId ,tankName:receiver_tank,tankLocation:(req.body.to_type).toLowerCase()}); // const receiver_capacity = parseInt((receiver_tank_info.capacity).replace(/,/g, ''), 10) -// const desired_water_percentage = parseInt((req.body.percentage).replace(/,/g, ''), 10) +// const desired_water_percentage = parseInt((req.body.percentage).replace(/,/g, ''), 10) // const supplier_tank = req.body.from @@ -1031,88 +1171,180 @@ let supplier_tanks = []; + exports.consumption = async (request, reply) => { try { const { customerId } = request.params; const { startDate, stopDate, block } = request.body; - let { typeofwater } = request.body; - + let typeofwater = request.body.typeofwater.toLowerCase(); + // Convert typeofwater to lowercase typeofwater = typeofwater.toLowerCase(); const start = moment(startDate, "DD-MMM-YYYY - HH:mm").toDate(); const end = moment(stopDate, "DD-MMM-YYYY - HH:mm").toDate(); - // Construct the query object based on block and typeofwater inputs + // Construct the query object const tankQuery = { customerId, tankLocation: "overhead" }; + if (block !== "All") tankQuery.blockName = block; - if (block !== "All") { - tankQuery.blockName = block; // Filter by specific block if not "all" - } - - if (typeofwater !== "all") { - tankQuery.typeOfWater = typeofwater; // Filter by specific type of water if not "all" + // Add typeofwater filter + if (typeofwater === "bore") { + tankQuery.typeOfWater = { $in: ["bore", "Bore Water"] }; + } else if (typeofwater === "drinking") { + tankQuery.typeOfWater = { $in: ["drinking", "Drinking Water"] }; } const tanks = await Tank.find(tankQuery); const tankData = []; const tankconsumptionData = []; - // Variable to track total consumption for the selected block and typeofwater let totalConsumptionForSelectedBlockAndTypeOfWater = 0; + let totalBoreConsumptionForSelectedBlockAndTypeOfWater = 0; + let totalBoreCapacityForSelectedBlockAndTypeOfWater = 0; + let totalDrinkingConsumptionForSelectedBlockAndTypeOfWater = 0; + let totalDrinkingCapacityForSelectedBlockAndTypeOfWater = 0; for (const tank of tanks) { const waterlevel_at_midnight = parseInt(tank.waterlevel_at_midnight.replace(/,/g, ''), 10); const total_water_added_from_midnight = parseInt(tank.total_water_added_from_midnight.replace(/,/g, ''), 10); const waterlevel = parseInt(tank.waterlevel.replace(/,/g, ''), 10); const tankname = tank.tankName; - - - + const capacity = parseInt(tank.capacity.replace(/,/g, ''), 10); const tankConsumptions = await TankConsumptionOriginalSchema.find({ customerId, tankName: tank.tankName, tankLocation: tank.tankLocation, - - ...(block !== "All" && { block: tank.blockName }), // Ensure correct field names - ...(typeofwater !== "all" && { typeofwater: tank.typeOfWater }) // Ensure correct field names - }); - - const filteredConsumptions = tankConsumptions.filter((record) => { - const recordTime = moment(record.time, "DD-MMM-YYYY - HH:mm").toDate(); - return recordTime >= start && recordTime <= end; + ...(block !== "All" && { block: tank.blockName }), + ...(typeofwater === "bore" && { typeofwater: { $in: ["bore", "Bore Water"] } }), + ...(typeofwater === "drinking" && { typeofwater: { $in: ["drinking", "Drinking Water"] } }), }); + let filteredConsumptions; + if (start.getTime() === end.getTime()) { + // If start and end are the same, filter for exact match + filteredConsumptions = tankConsumptions.filter((record) => { + return moment(record.time, "DD-MMM-YYYY - HH:mm").toDate().getTime() === start.getTime(); + }); + } else { + // Normal range filter + filteredConsumptions = tankConsumptions.filter((record) => { + const recordTime = moment(record.time, "DD-MMM-YYYY - HH:mm").toDate(); + return recordTime >= start && recordTime <= end; + }); + } const total_consumption_from_records = filteredConsumptions.reduce((acc, record) => { return acc + parseInt(record.consumption, 10); }, 0); - tankconsumptionData.push({ - tankname, - consumptionRecordsdatewise: filteredConsumptions - }) - const consumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel + total_consumption_from_records; + let consumption; - // Add to the total consumption for the selected block and typeofwater - totalConsumptionForSelectedBlockAndTypeOfWater += consumption; + + +// const isSameTime = start.getTime() === end.getTime(); +// const isEndToday = moment(end).isSame(moment(), 'day'); + +// if (isSameTime || !isEndToday) { +// // If single timestamp OR end is not today, only use records +// consumption = total_consumption_from_records; +// } else { +// // If end is today, add real-time values +// consumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel + total_consumption_from_records; +// } + +const isSameTime = start.getTime() === end.getTime(); +const isEndToday = moment(end).isSame(moment(), 'day'); + +if (isSameTime && isEndToday) { + // Start and End are same AND it's today => only realtime tank calc + consumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel; +} else if (isSameTime && !isEndToday) { + // Same time and NOT today => only record data + consumption = total_consumption_from_records; +} else if (!isSameTime && isEndToday) { + // Range query ending today => combine record + realtime + consumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel + total_consumption_from_records; +} else { + // Range query not ending today => only record data + consumption = total_consumption_from_records; +} + + + + +// const isSameTime = start.getTime() === end.getTime(); +// const isToday = moment(start).isSame(moment(), 'day'); + +// if (isSameTime && !isToday) { +// // Same date & time and NOT today => use only records +// consumption = total_consumption_from_records; +// } else { +// // Normal case => use full calculation +// consumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel + total_consumption_from_records; +// } + + // Add to the total consumption and capacities based on water type + if (tank.typeOfWater === "bore" || tank.typeOfWater === "Bore Water") { + totalBoreConsumptionForSelectedBlockAndTypeOfWater += consumption; + totalConsumptionForSelectedBlockAndTypeOfWater += consumption + totalBoreCapacityForSelectedBlockAndTypeOfWater += capacity; + } else if (tank.typeOfWater === "drinking" || tank.typeOfWater === "Drinking Water") { + totalDrinkingConsumptionForSelectedBlockAndTypeOfWater += consumption; + totalConsumptionForSelectedBlockAndTypeOfWater += consumption + totalDrinkingCapacityForSelectedBlockAndTypeOfWater += capacity; + } tankData.push({ tankname, - totalConsumption: consumption, block: tank.blockName, TypeofWater: tank.typeOfWater, location: tank.tankLocation, capacity: tank.capacity, - waterlevel: tank.waterlevel + waterlevel: tank.waterlevel, + }); + + tankconsumptionData.push({ + tankname, + consumptionRecordsdatewise: filteredConsumptions, }); } + // Calculate total consumption percentages + const boreConsumptionPercentage = totalBoreCapacityForSelectedBlockAndTypeOfWater + ? ((totalBoreConsumptionForSelectedBlockAndTypeOfWater / totalBoreCapacityForSelectedBlockAndTypeOfWater) * 100).toFixed(2) + : 0; + + const drinkingConsumptionPercentage = totalDrinkingCapacityForSelectedBlockAndTypeOfWater + ? ((totalDrinkingConsumptionForSelectedBlockAndTypeOfWater / totalDrinkingCapacityForSelectedBlockAndTypeOfWater) * 100).toFixed(2) + : 0; + + const totalConsumptionPercentage = + typeofwater === "bore" + ? boreConsumptionPercentage + : typeofwater === "drinking" + ? drinkingConsumptionPercentage + : ((totalBoreConsumptionForSelectedBlockAndTypeOfWater + totalDrinkingConsumptionForSelectedBlockAndTypeOfWater) / + (totalBoreCapacityForSelectedBlockAndTypeOfWater + totalDrinkingCapacityForSelectedBlockAndTypeOfWater) * + 100 + ).toFixed(2); + // Include the total consumption in the response - const response = { - status_code: 200, - tankData,consumptiorecordsdatewise:tankconsumptionData, - [`total consumption of ${typeofwater} and selected block`]: totalConsumptionForSelectedBlockAndTypeOfWater - }; + // Construct the response +const response = { + status_code: 200, + tankData, + consumptiorecordsdatewise: tankconsumptionData, + totalConsumptionPercentage, + totalConsumptionPercentageForBore: typeofwater !== "drinking" ? boreConsumptionPercentage : 0, + totalConsumptionPercentageForDrinking: typeofwater !== "bore" ? drinkingConsumptionPercentage : 0, + totalConsumptionForBore: typeofwater !== "drinking" ? totalBoreConsumptionForSelectedBlockAndTypeOfWater : 0, + totalConsumptionForDrinking: typeofwater !== "bore" ? totalDrinkingConsumptionForSelectedBlockAndTypeOfWater : 0, + [`total consumption of all and selected block`]: totalConsumptionForSelectedBlockAndTypeOfWater +}; + +// Send the response + + reply.send(response); } catch (err) { @@ -1120,6 +1352,8 @@ exports.consumption = async (request, reply) => { } }; + + exports.consumptiondatewiseofalltanks = async (request, reply) => { try { const { customerId } = request.params; @@ -1141,11 +1375,10 @@ exports.consumptiondatewiseofalltanks = async (request, reply) => { const tanks = await Tank.find(tankQuery); const tankconsumptionData = {}; let totalConsumptionForSelectedBlockAndTypeOfWater = 0; + let totalAvailableCapacity = 0; + let totalConsumed = 0; for (const tank of tanks) { - const waterlevel_at_midnight = parseInt(tank.waterlevel_at_midnight.replace(/,/g, ''), 10); - const total_water_added_from_midnight = parseInt(tank.total_water_added_from_midnight.replace(/,/g, ''), 10); - const waterlevel = parseInt(tank.waterlevel.replace(/,/g, ''), 10); const tankname = tank.tankName; const tankConsumptions = await TankConsumptionOriginalSchema.find({ @@ -1156,21 +1389,37 @@ exports.consumptiondatewiseofalltanks = async (request, reply) => { ...(typeofwater !== "all" && { typeofwater: tank.typeOfWater }) }); - const filteredConsumptions = tankConsumptions.filter((record) => { - const recordTime = moment(record.time, "DD-MMM-YYYY - HH:mm").toDate(); - return recordTime >= start && recordTime <= end; - }); - - const total_consumption_from_records = filteredConsumptions.reduce((acc, record) => { - return acc + parseInt(record.consumption, 10); - }, 0); - - const consumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel + total_consumption_from_records; - totalConsumptionForSelectedBlockAndTypeOfWater += consumption; + let filteredConsumptions; + if (start.getTime() === end.getTime()) { + // If start and end are the same, filter for exact match + filteredConsumptions = tankConsumptions.filter((record) => { + return moment(record.time, "DD-MMM-YYYY - HH:mm").toDate().getTime() === start.getTime(); + }); + } else { + // Normal range filter + filteredConsumptions = tankConsumptions.filter((record) => { + const recordTime = moment(record.time, "DD-MMM-YYYY - HH:mm").toDate(); + return recordTime >= start && recordTime <= end; + }); + } + // filteredConsumptions.forEach(record => { + // totalConsumed += parseInt(record.consumption, 10); + // totalAvailableCapacity += parseInt(record.capacity, 10); + // }); for (const record of filteredConsumptions) { const recordTime = moment(record.time, "DD-MMM-YYYY - HH:mm").format("DD-MMM-YYYY - HH:mm"); - + const tank_info = await Tank.findOne({ + customerId:record.customerId, + tankName: record.tankName, + tankLocation: record.tankLocation, + + }); + totalConsumptionForSelectedBlockAndTypeOfWater+=parseInt(record.consumption, 10); + totalConsumed += parseInt(record.consumption, 10); + ; + // console.log( parseInt(tank_info.capacity.replace(/,/g, ''), 10)) + totalAvailableCapacity += parseInt(tank_info.capacity.replace(/,/g, ''), 10) if (!tankconsumptionData[recordTime]) { tankconsumptionData[recordTime] = { date: recordTime, @@ -1185,6 +1434,7 @@ exports.consumptiondatewiseofalltanks = async (request, reply) => { tankconsumptionData[recordTime].consumptionRecordsdatewise.push({ tankName: record.tankName, consumption: record.consumption, + available_capacity: record.available_capacity, time: record.time }); tankconsumptionData[recordTime].count++; @@ -1192,36 +1442,14 @@ exports.consumptiondatewiseofalltanks = async (request, reply) => { } } - // Fetch all tanks in the customerId and block (or all blocks if block is set to "All") - const allTanksInBlock = await Tank.find({ - customerId, - ...(block !== "All" && { blockName: block }), - tankLocation: "overhead" - }); - - // Ensure each tank has records for each date - const dates = Object.keys(tankconsumptionData); - for (const date of dates) { - for (const tank of allTanksInBlock) { - const recordExists = tankconsumptionData[date].consumptionRecordsdatewise.some(record => record.tankName === tank.tankName); - if (!recordExists) { - const randomConsumption = Math.floor(Math.random() * (7000 - 3000 + 1)) + 3000; - tankconsumptionData[date].consumptionRecordsdatewise.push({ - tankName: tank.tankName, - consumption: randomConsumption.toString(), - time: date - }); - tankconsumptionData[date].count++; - } - } - } - const responseData = Object.values(tankconsumptionData); - + const totalConsumptionPercentage = totalAvailableCapacity > 0 ? ((totalConsumed / totalAvailableCapacity) * 100).toFixed(2) : "0"; + console.log(totalConsumed,"totalConsumed",totalAvailableCapacity,"totalAvailableCapacity",totalConsumptionForSelectedBlockAndTypeOfWater,"totalConsumptionForSelectedBlockAndTypeOfWater") const response = { status_code: 200, consumptiorecordsdatewise: responseData, - [`total consumption of ${typeofwater} and selected block`]: totalConsumptionForSelectedBlockAndTypeOfWater + [`total consumption of ${typeofwater} and selected block`]: totalConsumptionForSelectedBlockAndTypeOfWater, + totalConsumptionPercentage: `${totalConsumptionPercentage}%` }; reply.send(response); @@ -1239,6 +1467,8 @@ exports.consumptiondatewiseofalltanks = async (request, reply) => { + + const delay = ms => new Promise(resolve => setTimeout(resolve, ms)); //const moment = require('moment'); // Import moment.js for date/time operations @@ -1253,369 +1483,168 @@ const formatDate = (date) => { }; -// exports.motorAction = async (req, reply) => { -// try { -// const customerId = req.params.customerId; -// const action = req.body.action; -// const motorId = req.body.motor_id; -// const start_instance_id = req.body.start_instance_id -// console.log(req.body.startTime) -// // Ensure motor_id is provided -// if (!motorId) { -// throw new Error("Motor ID is required."); -// } +const admin = require('firebase-admin'); -// // Determine the motor stop status based on the action -// let motorStopStatus; -// if (action === "start") { -// motorStopStatus = "2"; // If action is start, set stop status to "2" -// } else if (action === "stop") { -// motorStopStatus = "1"; // If action is stop, set stop status to "1" -// } else { -// throw new Error("Invalid action provided."); -// } +// Initialize Firebase Admin SDK (make sure this is done once in your app) +// const serviceAccount = require('../waternotifications-ab81a-firebase-adminsdk-ereny-8b0bdac787.json'); +const serviceAccount = require('../waternotifications-ab81a-firebase-adminsdk-ereny-8b0bdac787.json'); -// // Update the motor stop status immediately if action is stop -// if (action === "stop") { -// // Update the motor stop status and other fields -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { -// $set: { -// "connections.inputConnections.$.motor_stop_status": "1", -// "connections.inputConnections.$.stopTime": req.body.stopTime, -// "connections.inputConnections.$.threshold_type": null, -// "connections.inputConnections.$.manual_threshold_time": null, -// "connections.inputConnections.$.manual_threshold_percentage": null -// } -// } -// ); +admin.initializeApp({ + credential: admin.credential.cert(serviceAccount), +}); -// // Send immediate response to the client -// reply.code(200).send({ message: "Motor stopped successfully." }); -// // Perform stop operations in the background -// (async () => { -// await delay(300000); +eventEmitter.on( + "motorStart", + async (customerId, fcmTokens, tankName, blockName, startTime, motorOnType, manual_threshold_time, typeOfWater, hw_Id, phoneNumber) => { + try { + console.log("Motor Start Event Triggered for:", customerId); -// // Update the existing motor data entry with stop details -// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); -// if (motorData) { -// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); -// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); -// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); -// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); -// const totalwaterpumped = quantityDelivered + water_pumped_till_now; -// await Tank.findOneAndUpdate( -// { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, -// { $set: { total_water_added_from_midnight: totalwaterpumped } } -// ); + // Fetch user or staff based on customerId + const user = await User.findOne({ customerId }); -// await MotorData.updateOne( -// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, -// { -// $set: { -// stopTime: req.body.stopTime, -// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), -// quantity_delivered: quantityDelivered.toString() -// } -// } -// ); -// } -// })(); - -// // Return here to ensure the rest of the code is not executed for the stop action -// return; -// } else { -// // Update the motor stop status to "2" for start action -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { $set: { "connections.inputConnections.$.motor_stop_status": "2" } } -// ); -// } - -// // Check threshold settings if action is start -// if (action === "start") { -// if (req.body.threshold_type === "time") { -// // If threshold type is time, update threshold time -// // await Tank.updateOne( -// // { customerId, "connections.inputConnections.motor_id": motorId }, -// // { $set: { "connections.inputConnections.$.manual_threshold_time": req.body.manual_threshold_time,startTime:req.body.startTime } } -// // ); -// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); - -// const newMotorData = new MotorData({ -// customerId: customerId, -// motor_id: motorId, -// start_instance_id: start_instance_id, -// supplierTank: req.body.from, -// receiverTank: req.body.to, -// supplier_type: req.body.from_type, -// receiver_type: req.body.to_type, -// startTime: req.body.startTime, -// receiverInitialwaterlevel:parseInt(receiver_tank_info7.waterlevel, 10) -// }); -// await newMotorData.save(); - -// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { -// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); -// if (index !== -1) { -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { $set: { [`connections.inputConnections.${index}.manual_threshold_time`]: req.body.manual_threshold_time, [`connections.inputConnections.${index}.startTime`]: req.body.startTime,[`connections.inputConnections.${index}.start_instance_id`]: start_instance_id } } -// ); -// } -// } - - -// // Start monitoring water level based on threshold time -// const thresholdTime = moment().add(req.body.manual_threshold_time, 'minutes').toDate(); -// const intervalId = setInterval(async () => { -// const splr_tank_info3 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// const splr_tank_info3_waterlevel = parseInt(splr_tank_info3.waterlevel, 10); -// //console.log(splr_tank_info3_waterlevel,"splr_tank_info3_waterlevel") -// const splr_tank_info3_capacity = parseInt(splr_tank_info3.capacity.replace(/,/g, ''), 10); -// // const splr_tank_info3_capacity = parseInt(splr_tank_info3.capacity, 10); -// // console.log(splr_tank_info3.capacity,splr_tank_info3_capacity,"splr_tank_info3_capacity") -// const splr_tank_info3_percentage = (splr_tank_info3_waterlevel / splr_tank_info3_capacity) * 100; -// // console.log(splr_tank_info3_percentage, "percentage for less than 20"); - -// if (new Date() >= thresholdTime || splr_tank_info3_percentage <= 20) { -// console.log(splr_tank_info3_percentage,) -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { -// $set: { -// "connections.inputConnections.$.motor_stop_status": "1", -// "connections.inputConnections.$.threshold_type": null, -// "connections.inputConnections.$.manual_threshold_time": null, -// "connections.inputConnections.$.manual_threshold_percentage": null -// } -// } -// ); -// clearInterval(intervalId); - -// await delay(300000); - -// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); -// if (motorData) { -// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); -// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); -// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); -// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); -// const totalwaterpumped = quantityDelivered + water_pumped_till_now -// await Tank.findOneAndUpdate({customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase()}, { $set: { total_water_added_from_midnight: totalwaterpumped } }) - -// const stopTime = formatDate(new Date()); - -// await MotorData.updateOne( -// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, -// { -// $set: { -// stopTime:stopTime, -// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), -// quantity_delivered: quantityDelivered.toString() -// } -// } -// ); -// } -// } -// }, 60000); -// } else if (req.body.threshold_type === "litres") { -// console.log("entered litres") -// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); - -// const newMotorData = new MotorData({ -// customerId: customerId, -// motor_id: motorId, -// start_instance_id: start_instance_id, -// supplierTank: req.body.from, -// receiverTank: req.body.to, -// supplier_type: req.body.from_type, -// receiver_type: req.body.to_type, -// startTime: req.body.startTime, -// receiverInitialwaterlevel:parseInt(receiver_tank_info7.waterlevel, 10) -// }); -// await newMotorData.save(); -// // If threshold type is percentage, calculate percentage threshold -// const receiver_tank_info = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); -// const supplier_tank_info = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// if (!receiver_tank_info) { -// throw new Error("Receiver tank not found."); -// } -// if (!supplier_tank_info) { -// throw new Error("Supplierr tank not found."); -// } -// const supplier_capacity = parseInt(supplier_tank_info.capacity, 10); -// const supplier_waterLevel = parseInt(supplier_tank_info.waterlevel, 10); - -// const capacity = parseInt(receiver_tank_info.capacity, 10); -// const waterLevel = parseInt(receiver_tank_info.waterlevel, 10); -// const desired_percentage = parseInt(req.body.manual_threshold_litres.replace(/,/g, ''), 10); - -// console.log(desired_percentage) -// const threshold_water_level = waterLevel+desired_percentage; - -// const supplier_threshold = supplier_waterLevel-desired_percentage -// console.log(supplier_threshold,"supplier_threshold") - -// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { -// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); -// if (index !== -1) { -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { $set: { [`connections.inputConnections.${index}.manual_threshold_percentage`]: supplier_threshold.toString(), [`connections.inputConnections.${index}.startTime`]: req.body.startTime } } -// ); -// } -// } - - - -// // Update water level threshold - - -// // Start monitoring water level based on threshold percentage -// const intervalId = setInterval(async () => { -// // Check if water level has reached the threshold percentage -// const supplier_tank_info1 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// const current_water_level = parseInt(supplier_tank_info1.waterlevel, 10); -// if (current_water_level <= supplier_threshold) { -// // Stop the motor pump -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { -// $set: { -// "connections.inputConnections.$.motor_stop_status": "1", - -// "connections.inputConnections.$.threshold_type": null, -// "connections.inputConnections.$.manual_threshold_time": null, -// "connections.inputConnections.$.manual_threshold_percentage": null -// } -// } -// ); -// clearInterval(intervalId); // Stop monitoring water level -// await delay(300000); - -// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); -// if (motorData) { -// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); -// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); -// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); - - -// const stopTime = formatDate(new Date()); - -// await MotorData.updateOne( -// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, -// { -// $set: { -// stopTime:stopTime, -// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), -// quantity_delivered: quantityDelivered.toString() -// } -// } -// ); -// } -// } -// }, 20000); // Check water level every minute -// } -// } - -// // Respond with success message -// reply.code(200).send({ message: `Motor ${action === "start" ? "started" : "stopped"} successfully.` }); -// } catch (err) { -// // Handle errors -// throw boom.boomify(err); -// } -// }; + if (!user) { + console.log(`No user found for customerId: ${customerId}`); + return; + } -const admin = require('firebase-admin'); + // Identify who started the motor + let startedBy = "Unknown"; // Default value -// Initialize Firebase Admin SDK (make sure this is done once in your app) -// const serviceAccount = require('../waternotifications-ab81a-firebase-adminsdk-ereny-8b0bdac787.json'); -const serviceAccount = require('../waternotifications-ab81a-firebase-adminsdk-ereny-8b0bdac787.json'); + if (user.phone === phoneNumber) { + startedBy = user.username; // Customer's name + } else { + const staffMember = user.staff.staff.find(staff => staff.phone === phoneNumber); + if (staffMember) { + startedBy = staffMember.name; // Staff's name + } + } -admin.initializeApp({ - credential: admin.credential.cert(serviceAccount), -}); + if (startedBy === "Unknown") { + console.log("User not found. Cannot proceed with motorStart event."); + return; + } -// // Handle motor start event -// eventEmitter.on('motorStart', async (fcmTokens) => { -// await sendNotification(fcmTokens, 'Motor Started', 'The motor has been started successfully.'); -// }); + const startMethod = motorOnType === "Mobile APP" ? "Mobile APP" : "Manual"; + const motorName = `${tankName}-${blockName}-${typeOfWater}`; -// // Handle motor stop event -// eventEmitter.on('motorStop', async (fcmTokens) => { -// await sendNotification(fcmTokens, 'Motor Stopped', 'The motor has been stopped successfully.'); -// }); + const message = + `🚰 Motor Name: ${motorName}\n` + + `🚰 Tank Name: '${tankName}'\n` + + `🏢 Block Name: '${blockName}'\n` + + `👤 Started by: ${startedBy}\n` + // Only name is displayed + `📱 Mode: '${startMethod}'\n` + + `🕒 Pump started at: ${startTime} \n` + + `Will stop after: '${manual_threshold_time}' mins`; -// // Handle low water level event -// eventEmitter.on('lowWaterLevel', async (fcmTokens) => { -// await sendNotification(fcmTokens, 'Low Water Level', 'The water level is below 20%.'); -// }); + await sendNotification(hw_Id, customerId, fcmTokens, "ARMINTA : MOTOR STARTED", message); + } catch (error) { + console.error("Error in motorStart event:", error); + } + } +); + +eventEmitter.on( + "motorStop", + async ( + customerId, + fcmTokens, + tankName, + blockName, + stopTime, + motorOnType, + totalWaterPumped, + typeOfWater, + hw_Id, + phoneNumber + ) => { + try { + console.log("Motor Stop Event Triggered for:", customerId); -// // Handle high water level event -// eventEmitter.on('highWaterLevel', async (fcmTokens) => { -// await sendNotification(fcmTokens, 'High Water Level', 'The water level has reached above 90%.'); -// }); + // Fetch user or staff based on customerId + const user = await User.findOne({ customerId }); -// Handle motor start event with timestamp -// eventEmitter.on('motorStart', async (fcmTokens, timestamp, motorId, waterLevel) => { -// await sendNotification(fcmTokens, 'Motor Started', `Motor ID: ${motorId} started successfully at ${timestamp}. Current Water Level: ${waterLevel} Ltrs`); -// }); + if (!user) { + console.log(`No user found for customerId: ${customerId}`); + return; + } -eventEmitter.on('motorStart', async (fcmTokens, timestamp, motorId, waterLevel, blockName, tankName, startTime, motorOnType, stopCriteria) => { - const message = `MotorId '${motorId}' Water supply from '${blockName}' to '${tankName}' started at ${startTime} by '${motorOnType}' mode and will stop after ${stopCriteria}. Current Water Level: ${waterLevel} Ltrs.`; - await sendNotification(fcmTokens, 'Motor Started', message); -}); + // Identify who stopped the motor + let stoppedBy = "Unknown"; // Default value + if (user.phone === phoneNumber) { + stoppedBy = user.username; // Customer's name + } else if (user.staff && user.staff.staff) { + const staffMember = user.staff.staff.find((staff) => staff.phone === phoneNumber); + if (staffMember) { + stoppedBy = staffMember.name; // Staff's name + } + } -// Emit motor stop event with motorId -eventEmitter.on('motorStop', async (fcmTokens, motorId, waterLevel, blockName, tankName, stopTime, motorOnType) => { - const message = `MotorId '${motorId}' Water supply from '${blockName}' to '${tankName}' stopped at ${stopTime} by '${motorOnType}' mode. Current Water Level: ${waterLevel} Ltrs.`; - await sendNotification(fcmTokens, 'Motor Stopped', message); -}); + if (stoppedBy === "Unknown") { + console.log("User not found. Cannot proceed with motorStop event."); + return; + } + const stopMethod = motorOnType === "Mobile APP" ? "Mobile APP" : "Manual"; + const motorName = `${tankName}-${blockName}-${typeOfWater}`; -// Event listener to handle notification -eventEmitter.on('sendLowWaterNotification', async (fcmToken, tankInfo) => { - const message = formatWaterLevelMessage(tankInfo, 'low'); - sendNotification(fcmToken, message); -}); + const message = + `🚰 Motor Name: ${motorName}\n` + + `🛢️ Tank Name: '${tankName}'\n` + + `🏢 Block Name: '${blockName}'\n` + + `👤 Stopped by: ${stoppedBy}\n` + // Only name is displayed + `📱 Mode: '${stopMethod}'\n` + + `🕒 Pump stopped at: ${stopTime}\n` + + `💧 Total water pumped: ${totalWaterPumped} liters\n`; -eventEmitter.on('sendVeryLowWaterNotification', async (fcmToken, tankInfo) => { - const message = formatWaterLevelMessage(tankInfo, 'very low'); - sendNotification(fcmToken, message); -}); + await sendNotification(hw_Id, customerId, fcmTokens, "ARMINTA : MOTOR STOPPED", message); + } catch (error) { + console.error("Error in motorStop event:", error); + } + } +); -eventEmitter.on('sendCriticalLowWaterNotification', async (fcmToken, tankInfo) => { - const message = formatWaterLevelMessage(tankInfo, 'critically low'); - sendNotification(fcmToken, message); -}); -const formatWaterLevelMessage = (tankInfo, levelType) => { - const tankName = tankInfo.tankName; - const tankLocation = tankInfo.tankLocation; - const waterLevel = parseInt(tankInfo.waterlevel, 10); - const capacity = parseInt(tankInfo.capacity, 10); - const volumeInLitres = (capacity * waterLevel) / 100; // assuming the capacity is in litres - - let levelDescription = ''; - if (levelType === 'low') { - levelDescription = `${waterLevel}% (${volumeInLitres.toFixed(2)} L)`; - } else if (levelType === 'very low') { - levelDescription = `${waterLevel}% (${volumeInLitres.toFixed(2)} L)`; - } else if (levelType === 'critically low') { - levelDescription = `${waterLevel}% (${volumeInLitres.toFixed(2)} L)`; +eventEmitter.on( + 'motorStartAutomatic', + async (fcmTokens, tankName, blockName, startTime, motorOnType, manual_threshold_time, typeOfWater,threshold) => { + try { + // Retrieve the user information + const users = await User.find({ fcmIds: { $in: fcmTokens } }); + console.log("users", users); + const userNames = users.map(user => user.username).join(', '); + console.log("userNames", userNames); + + const startMethod = motorOnType === "Automatic" ? "Automatic" : "Manual"; + + // Generate motor name dynamically based on tank name, block name, and type of water + const motorName = `${tankName}-${blockName}-${typeOfWater}`; + + // Get current date and time for the motor start time + const currentDateTime = new Date(); + const formattedDate = currentDateTime.toLocaleDateString(); // Customize this format as needed + const formattedTime = currentDateTime.toLocaleTimeString(); // Customize this format as needed + + // Prepare the message + const message = + `🚰 Motor Name: ${motorName}\n` + + `🛢️ Tank Name: '${tankName}'\n` + + `🏢 Block Name: '${blockName}'\n` + + `👤 Started by: '${startMethod}' \n` + + `📱 Mode: "Atomatic System" \n` + + `🕒 Pump started at: ${startTime} \n` + + `Will stop after: '${threshold}' `; + + // Send the notification + await sendNotification(fcmTokens, 'Arminta Water Management', message); + } catch (error) { + console.error('Error in motorStart event:', error); + } } +); - return `Water level in '${tankName}', located at '${tankLocation}', type of water: ${tankInfo.waterType} is ${levelType} at ${levelDescription}. Action: start motor now.`; -}; -// Emit low water level event with motorId -// eventEmitter.on('lowWaterLevel', async (fcmTokens, timestamp, motorId, waterLevel) => { -// await sendNotification(fcmTokens, 'Low Water Level', `Motor ID: ${motorId}, water level dropped below 20% at ${timestamp}. Current Water Level: ${waterLevel} Ltrs`); -// }); eventEmitter.on('sendHighWaterNotification', async (fcmTokens, tankInfo, startTime, stopTime) => { const message = `Attention: Water level in '${tankInfo.tankName}' located at '${tankInfo.tankLocation}' is high at ${tankInfo.waterLevel}% (${tankInfo.volumeInLitres} L). Please stop the motor. Motor running from ${startTime} to ${stopTime}.`; @@ -1639,660 +1668,839 @@ eventEmitter.on('sendCriticalHighWaterNotification', async (fcmTokens, tankInfo, // }); -// Function to emit events with timestamps -const emitWithTimestamp = (eventName, fcmTokens, motorId, waterLevel) => { - const timestamp = moment().format('HH:mm:ss YYYY-MM-DD '); - eventEmitter.emit(eventName, fcmTokens, timestamp, motorId, waterLevel); -}; - - -const sendNotification = async (fcmTokens, title, body) => { - if (!Array.isArray(fcmTokens) || fcmTokens.length === 0) { - console.error('No FCM tokens provided.'); - return; +eventEmitter.on('sendThresholdTimeNotification', async (customerId, fcmTokens, thresholdTime, hw_Id, tankName, blockName) => { + try { + const message = + `🛢️ Tank Name: '${tankName}'\n` + + `🏢 Block Name: '${blockName}'\n` + + `Motor Stopped as it completed ${thresholdTime} minutes.`; + await sendNotification(hw_Id, customerId, fcmTokens, 'Motor Alert', message); + console.log("Threshold time notification sent successfully."); + } catch (error) { + console.error("Error sending threshold time notification:", error); } +}); - for (const token of fcmTokens) { - const message = { - token: token, - notification: { - title: title, - body: body, - }, - data: { - target: 'tank_levels', - }, - }; +// eventEmitter.on( +// 'sendMotorStartNotification', +// async (fcmTokens, motorId, waterLevel, blockName, tankName, motorOnType, stopCriteria, manual_threshold_time) => { +// try { +// // Get the latest timestamp +// const startTime = new Date().toLocaleString('en-IN', { timeZone: 'Asia/Kolkata' }); + +// // Retrieve the user information +// const users = await User.find({ fcmIds: { $in: fcmTokens } }); +// const userNames = users.map(user => user.username).join(', '); +// const startMethod = motorOnType.toUpperCase() === "Forced Manual"; + +// // Prepare the message +// const message = +// `🚰 Tank Name: '${tankName}'\n` + +// `🕒 Pump started at: '${startTime}'\n` + +// `👤 Initiated by: ${userNames}\n` + +// `🔄 Pump started by: '${startMethod}'`; + +// // Send the notification +// await sendNotification(fcmTokens, 'Motor Started 🚀', message); +// console.log('Motor start notification sent successfully!'); +// } catch (error) { +// console.error('Error in sendMotorStartNotification event:', error); +// } +// } +// ); - try { - const response = await admin.messaging().send(message); // Send each message individually - console.log('Notification sent successfully:', response); - } catch (error) { - console.error(`Failed to send notification to token ${token}:`, error); - } - } -}; +// eventEmitter.on( +// "sendMotorStartNotification", +// async (hw_Id, fcmTokens, waterLevel, blockName, tankName, motorOnType, stopCriteria, typeOfWater, manualThresholdTime) => { +// try { +// const formattedTime = new Date().toLocaleTimeString("en-IN", { timeZone: "Asia/Kolkata" }); +// if (motorOnType !== "forced_manual") { +// console.log(`Skipping notification: Motor was started in ${motorOnType} mode.`); +// return; +// } +// const stopConditionMessage = stopCriteria === "manual" +// ? `⚠️ Pump will stop **manually**.` +// : `🚨 Pump will stop when the water level reaches **${manualThresholdTime}%**.`; -// const sendPushNotification = async (registrationToken, title, body) => { -// const message = { -// notification: { -// title: title, -// body: body, -// }, -// data: { -// title: title, -// body: body, -// }, -// }; +// const message = `🚰 **Motor Started** 🚀\n` + +// `🔹 **Motor Name:** ${tankName}-${blockName}-${typeOfWater}\n` + +// `🏢 **Block Name:** ${blockName}\n` + +// `💧 **Water Level:** ${waterLevel}%\n` + +// `📱 **Mode:** Physically Started\n` + +// `🕒 **Pump started at:** ${formattedTime}\n` + +// stopConditionMessage; -// const options = { -// priority: "high", -// timeToLive: 60 * 60 * 24, -// }; +// await sendNotification(hw_Id, fcmTokens, "Motor Started 🚀", message); +// console.log(`✅ Motor start notification sent for Motor ID: ${hw_Id}`); -// try { -// const response = await admin.messaging().sendToDevice(registrationToken, message, options); -// console.log('FCM response:', response); // Log the FCM response -// return response; // Return the FCM response object -// } catch (error) { -// console.error('FCM error:', error); -// throw error; // Throw the error to handle it further up the call stack +// } catch (error) { +// console.error(`❌ Error in sendMotorStartNotification for Motor ID: ${hw_Id}`, error); +// } // } -// }; +// ); -exports. publishMotorStopStatus = async (motor_id, motor_stop_status) => { - const payload = { - topic: 'operation', - object: { - 'motor-id': motor_id, - control: motor_stop_status - } - }; - console.log("enetred publish") - console.log(payload) - client.publish('water/operation', JSON.stringify(payload)); -}; -const stat_stop_intervals = {}; -// exports.motorAction = async (req, reply) => { -// try { -// const customerId = req.params.customerId; -// const action = req.body.action; -// const motorId = req.body.motor_id; -// const start_instance_id = req.body.start_instance_id -// console.log(req.body.startTime) -// // Ensure motor_id is provided -// if (!motorId) { -// throw new Error("Motor ID is required."); -// } -// const users = await User.find({ customerId: customerId }); -// const fcmToken = users.map(user => user.fcmId).filter(fcmId => fcmId); -// console.log(fcmToken) +// eventEmitter.on( +// 'sendMotorStartNotification', +// async (fcmTokens, motorId, waterLevel, blockName, tankName, motorOnType, stopCriteria, manual_threshold_time, typeOfWater) => { +// try { +// // Get the latest timestamp +// const currentDateTime = new Date(); +// const startTime = currentDateTime.toLocaleString('en-IN', { timeZone: 'Asia/Kolkata' }); +// const formattedDate = currentDateTime.toLocaleDateString('en-IN', { timeZone: 'Asia/Kolkata' }); +// const formattedTime = currentDateTime.toLocaleTimeString('en-IN', { timeZone: 'Asia/Kolkata' }); -// // Determine the motor stop status based on the action -// let motorStopStatus; -// if (action === "start") { -// motorStopStatus = "2"; // If action is start, set stop status to "2" -// // eventEmitter.emit('motorStart', fcmToken); // Emit motor start event -// emitWithTimestamp('motorStart', fcmToken); // Emit motor start event with timestamp -// console.log( eventEmitter.emit('motorStart', fcmToken)) -// } else if (action === "stop") { -// motorStopStatus = "1"; // If action is stop, set stop status to "1" -// // eventEmitter.emit('motorStop', fcmToken); // Emit motor stop event -// emitWithTimestamp('motorStop', fcmToken); // Emit motor stop event with timestamp -// } else { -// throw new Error("Invalid action provided."); +// // Retrieve the user information +// const users = await User.find({ fcmIds: { $in: fcmTokens } }); +// const userNames = users.map(user => user.username).join(', '); + +// // Determine the pump initiation method +// const startMethod = motorOnType === "Mobile APP" ? "Mobile APP" : "Manual"; + +// // Dynamically generate the motor name +// const motorName = `${tankName}-${blockName}-${typeOfWater}`; + +// // Prepare the notification message +// const message = +// `🚰 Motor Name: '${motorName}'\n` + +// `🚰 Tank Name: '${tankName}'\n` + +// `🏢 Block Name: '${blockName}'\n` + +// `💧 Water Level: '${waterLevel}%'\n` + +// `👤 Initiated by: ${userNames}\n` + +// `📱 Pump started by: '${startMethod}'\n` + +// `🕒 Start Time: '${startTime}'\n` + +// `⏳ Will stop after: '${manual_threshold_time}' mins\n` + +// `📅 Date: '${formattedDate}'\n` + +// `⏰ Time: '${formattedTime}'`; + +// // Send the notification +// await sendNotification(fcmTokens, 'Motor Started 🚀', message); +// console.log('Motor start notification sent successfully!'); +// } catch (error) { +// console.error('Error in sendMotorStartNotification event:', error); // } +// } +// ); -// // Update the motor stop status immediately if action is stop -// if (action === "stop") { -// // Update the motor stop status and other fields -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { -// $set: { -// "connections.inputConnections.$.motor_stop_status": "1", -// "connections.inputConnections.$.stopTime": req.body.stopTime, -// "connections.inputConnections.$.threshold_type": null, -// "connections.inputConnections.$.manual_threshold_time": null, -// "connections.inputConnections.$.manual_threshold_percentage": null -// } -// } -// ); - -// reply.code(200).send({ message: "Motor stopped successfully." }); - -// // Perform stop operations in the background -// (async () => { -// await delay(300000); -// // Update the existing motor data entry with stop details -// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); -// if (motorData) { -// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); -// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); -// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); -// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); -// const totalwaterpumped = quantityDelivered + water_pumped_till_now; -// await Tank.findOneAndUpdate( -// { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, -// { $set: { total_water_added_from_midnight: totalwaterpumped } } -// ); +// eventEmitter.on( +// "sendMotorStopNotification", +// async (hw_Id, fcmTokens, waterLevel, blockName, tankName, motorOnType, typeOfWater) => { +// try { +// const formattedTime = new Date().toLocaleTimeString("en-IN", { timeZone: "Asia/Kolkata" }); -// await MotorData.updateOne( -// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, -// { -// $set: { -// stopTime: req.body.stopTime, -// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), -// quantity_delivered: quantityDelivered.toString() -// } -// } -// ); +// const message = `⏹️ **Motor Stopped** 🛑\n` + +// `🔹 **Motor Name:** ${tankName}-${blockName}-${typeOfWater}\n` + +// `🏢 **Block Name:** ${blockName}\n` + +// `💧 **Water Level:** ${waterLevel}%\n` + +// `📱 **Mode:** Forced Manual\n` + +// `🕒 **Pump stopped at:** ${formattedTime}`; -// } -// })(); +// await sendNotification(hw_Id, fcmTokens, "Motor Stopped 🛑", message); +// console.log(`✅ Motor stop notification sent for Motor ID: ${hw_Id}`); -// // Return here to ensure the rest of the code is not executed for the stop action -// return; -// } else { -// // Update the motor stop status to "2" for start action -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { $set: { "connections.inputConnections.$.motor_stop_status": "2" } } -// ); +// } catch (error) { +// console.error(`❌ Error in sendMotorStopNotification for Motor ID: ${hw_Id}`, error); // } +// } +// ); -// // Check threshold settings if action is start -// if (action === "start") { -// if (req.body.threshold_type === "time") { -// // If threshold type is time, update threshold time -// // await Tank.updateOne( -// // { customerId, "connections.inputConnections.motor_id": motorId }, -// // { $set: { "connections.inputConnections.$.manual_threshold_time": req.body.manual_threshold_time,startTime:req.body.startTime } } -// // ); -// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); +// 🚀 Motor Start Notification -// const newMotorData = new MotorData({ -// customerId: customerId, -// motor_id: motorId, -// start_instance_id: start_instance_id, -// supplierTank: req.body.from, -// receiverTank: req.body.to, -// supplier_type: req.body.from_type, -// receiver_type: req.body.to_type, -// startTime: req.body.startTime, -// receiverInitialwaterlevel:parseInt(receiver_tank_info7.waterlevel, 10) -// }); -// await newMotorData.save(); +eventEmitter.on("sendMotorStartNotification", async (hw_Id, customerId, fcmTokens, waterLevel, blockName, tankName, motorOnType, manualThresholdTime) => { + try { + console.log(`✅ Received sendMotorStartNotification event for ${customerId}`); + + const formattedTime = new Date().toLocaleTimeString("en-IN", { timeZone: "Asia/Kolkata" }); -// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { -// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); -// if (index !== -1) { -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { $set: { [`connections.inputConnections.${index}.manual_threshold_time`]: req.body.manual_threshold_time, [`connections.inputConnections.${index}.startTime`]: req.body.startTime,[`connections.inputConnections.${index}.start_instance_id`]: start_instance_id } } -// ); -// } -// } - + const normalizedMotorOnType = motorOnType.toLowerCase(); + if (normalizedMotorOnType !== "forced_manual") { + console.log(`⚠️ Skipping notification: Motor started in **${motorOnType}** mode.`); + return; + } -// // Start monitoring water level based on threshold time -// const thresholdTime = moment().add(req.body.manual_threshold_time, 'minutes').toDate(); -// const intervalId = setInterval(async () => { -// const splr_tank_info3 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// const splr_tank_info3_waterlevel = parseInt(splr_tank_info3.waterlevel, 10); -// //console.log(splr_tank_info3_waterlevel,"splr_tank_info3_waterlevel") -// const splr_tank_info3_capacity = parseInt(splr_tank_info3.capacity.replace(/,/g, ''), 10); -// // const splr_tank_info3_capacity = parseInt(splr_tank_info3.capacity, 10); -// // console.log(splr_tank_info3.capacity,splr_tank_info3_capacity,"splr_tank_info3_capacity") -// const splr_tank_info3_percentage = (splr_tank_info3_waterlevel / splr_tank_info3_capacity) * 100; -// // console.log(splr_tank_info3_percentage, "percentage for less than 20"); + if (!Array.isArray(fcmTokens) || fcmTokens.length === 0) { + console.warn(`⚠️ No valid FCM tokens found for Customer ID: ${customerId}`); + return; + } -// if (new Date() >= thresholdTime || splr_tank_info3_percentage <= 20) { -// console.log(splr_tank_info3_percentage,) -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { -// $set: { -// "connections.inputConnections.$.motor_stop_status": "1", -// "connections.inputConnections.$.threshold_type": null, -// "connections.inputConnections.$.manual_threshold_time": null, -// "connections.inputConnections.$.manual_threshold_percentage": null -// } -// } -// ); -// clearInterval(intervalId); + // const stopConditionMessage = stopCriteria === "level" + // ? `🚨 Pump will stop when the water level reaches **${manualThresholdTime}%**.` + // : `⚠️ Pump will stop **manually**.`; -// await delay(300000); + const message = `🚰 Motor Started 🚀\n` + + `👤 Customer ID: ${customerId}\n` + + `🔹 Motor Name: ${tankName} - ${blockName}\n` + + `💧 Water Level: ${waterLevel}\n` + + `📱 Mode: Manually Started\n` + + `🕒 Pump started at: ${formattedTime}\n`; -// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); -// if (motorData) { -// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); -// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); -// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); -// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); -// const totalwaterpumped = quantityDelivered + water_pumped_till_now -// await Tank.findOneAndUpdate({customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase()}, { $set: { total_water_added_from_midnight: totalwaterpumped } }) + await sendNotification(hw_Id, customerId, fcmTokens, "Motor Started 🚀", message); + console.log(`✅ Motor start notification sent for Customer ID: ${customerId}`); + } catch (error) { + console.error(`❌ Error in sendMotorStartNotification for Customer ID: ${customerId}`, error); + } +}); -// const stopTime = formatDate(new Date()); +eventEmitter.on("sendMotorStopNotification", async (hw_Id, customerId, fcmTokens, waterLevel, blockName, tankName, motorOnType) => { + try { + console.log(`✅ Received sendMotorStopNotification event for ${customerId}`); -// await MotorData.updateOne( -// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, -// { -// $set: { -// stopTime:stopTime, -// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), -// quantity_delivered: quantityDelivered.toString() -// } -// } -// ); -// } -// } -// }, 60000); -// } else if (req.body.threshold_type === "litres") { -// console.log("entered litres") -// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); + const formattedTime = new Date().toLocaleTimeString("en-IN", { timeZone: "Asia/Kolkata" }); -// const newMotorData = new MotorData({ -// customerId: customerId, -// motor_id: motorId, -// start_instance_id: start_instance_id, -// supplierTank: req.body.from, -// receiverTank: req.body.to, -// supplier_type: req.body.from_type, -// receiver_type: req.body.to_type, -// startTime: req.body.startTime, -// receiverInitialwaterlevel:parseInt(receiver_tank_info7.waterlevel, 10) -// }); -// await newMotorData.save(); -// // If threshold type is percentage, calculate percentage threshold -// const receiver_tank_info = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); -// const supplier_tank_info = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// if (!receiver_tank_info) { -// throw new Error("Receiver tank not found."); -// } -// if (!supplier_tank_info) { -// throw new Error("Supplierr tank not found."); -// } -// const supplier_capacity = parseInt(supplier_tank_info.capacity, 10); -// const supplier_waterLevel = parseInt(supplier_tank_info.waterlevel, 10); + const normalizedMotorOnType = motorOnType.toLowerCase(); + if (normalizedMotorOnType !== "forced_manual") { + console.log(`⚠️ Skipping notification: Motor stopped in **${motorOnType}** mode.`); + return; + } -// const capacity = parseInt(receiver_tank_info.capacity, 10); -// const waterLevel = parseInt(receiver_tank_info.waterlevel, 10); -// const desired_percentage = parseInt(req.body.manual_threshold_litres.replace(/,/g, ''), 10); + if (!Array.isArray(fcmTokens) || fcmTokens.length === 0) { + console.warn(`⚠️ No valid FCM tokens found for Customer ID: ${customerId}`); + return; + } -// console.log(desired_percentage) -// const threshold_water_level = waterLevel+desired_percentage; + const message = `🛑 Motor Stopped ❌\n` + + `👤 Customer ID: ${customerId}\n` + + `🔹 Motor Name: ${tankName} - ${blockName}\n` + + `💧 Water Level: ${waterLevel}\n` + + `📱 Mode: Manually Stopped\n` + + `🕒 Pump stopped at: ${formattedTime}`; -// const supplier_threshold = supplier_waterLevel-desired_percentage -// console.log(supplier_threshold,"supplier_threshold") + await sendNotification(hw_Id, customerId, fcmTokens, "Motor Stopped ❌", message); + console.log(`✅ Motor stop notification sent for Customer ID: ${customerId}`); + } catch (error) { + console.error(`❌ Error in sendMotorStopNotification for Customer ID: ${customerId}`, error); + } +}); -// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { -// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); -// if (index !== -1) { -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { $set: { [`connections.inputConnections.${index}.manual_threshold_percentage`]: supplier_threshold.toString(), [`connections.inputConnections.${index}.startTime`]: req.body.startTime } } -// ); -// } -// } - +eventEmitter.on('sendLowWaterNotification', async (customerId, fcmTokens, hw_Id, tankName, blockName, lowWaterLevel, currentWaterLevel, currentWaterPercentage) => { + try { + const message = + `⚠️ Warning: Low water level detected!\n` + + `🛢️ Tank Name: '${tankName}'\n` + + `🏢 Block Name: '${blockName}'\n` + + `📉 Low Water Threshold: '${lowWaterLevel} liters'\n` + + `📌 Current Water Level: '${currentWaterLevel} liters'\n` + + `📅 Date & Time: '${new Date().toLocaleString()}'`; + + await sendNotification(hw_Id, customerId, fcmTokens, 'Low Water Alert', message); + console.log("✅ Low water notification sent successfully."); + } catch (error) { + console.error("❌ Error sending low water notification:", error); + } +}); +eventEmitter.on('sendCriticalLowWaterNotification', async (customerId, fcmTokens, hw_Id, tankName, blockName, criticalLowWaterLevel, currentWaterLevel, currentWaterPercentage) => { + try { + const message = + `🚨 Critical Alert: Water level is **critically low!**\n` + + `🛢️ Tank Name: '${tankName}'\n` + + `🏢 Block Name: '${blockName}'\n` + + `🔴 Critical Low Threshold: '${criticalLowWaterLevel} liters'\n` + + `📌 Current Water Level: '${currentWaterLevel} liters'\n` + + `📅 Date & Time: '${new Date().toLocaleString()}'`; + + await sendNotification(hw_Id, customerId, fcmTokens, 'Critical Water Alert', message); + console.log("✅ Critical low water notification sent successfully."); + } catch (error) { + console.error("❌ Error sending critical low water notification:", error); + } +}); -// // Update water level threshold - -// // Start monitoring water level based on threshold percentage -// const intervalId = setInterval(async () => { -// // Check if water level has reached the threshold percentage -// const supplier_tank_info1 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// const current_water_level = parseInt(supplier_tank_info1.waterlevel, 10); -// if (current_water_level <= supplier_threshold) { -// // Stop the motor pump -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { -// $set: { -// "connections.inputConnections.$.motor_stop_status": "1", - -// "connections.inputConnections.$.threshold_type": null, -// "connections.inputConnections.$.manual_threshold_time": null, -// "connections.inputConnections.$.manual_threshold_percentage": null -// } -// } -// ); -// clearInterval(intervalId); // Stop monitoring water level -// await delay(300000); -// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); -// if (motorData) { -// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); -// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); -// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); +// eventEmitter.on('sendLowWaterNotification', (fcmTokens, message) => { +// const notificationMessage = `Warning: Water level is low in the tank. +// Tank Name: ${tankName}, +// Location: ${receiverTank.location}, +// Type of Water: ${receiverTank.typeOfWater}, +// Current Water Level: ${currentWaterLevel} liters (${currentWaterPercentage.toFixed(2)}%), +// Date & Time: ${new Date().toLocaleString()}`; + +// // Send notifications using the provided FCM tokens +// sendNotification(fcmTokens, notificationMessage); +// }); +// eventEmitter.on('sendCriticalLowWaterNotification', (fcmTokens, message) => { +// const notificationMessage = `Critical Alert: Water level is critically low in the tank. +// Tank Name: ${tankName}, +// Location: ${receiverTank.location}, +// Type of Water: ${receiverTank.typeOfWater}, +// Current Water Level: ${currentWaterLevel} liters (${currentWaterPercentage.toFixed(2)}%), +// Date & Time: ${new Date().toLocaleString()}`; + +// // Send notifications using the provided FCM tokens +// sendNotification(fcmTokens, notificationMessage); +// }); -// const stopTime = formatDate(new Date()); -// await MotorData.updateOne( -// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, -// { -// $set: { -// stopTime:stopTime, -// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), -// quantity_delivered: quantityDelivered.toString() -// } -// } -// ); -// } -// } -// }, 20000); // Check water level every minute -// } -// } -// // Respond with success message -// reply.code(200).send({ message: `Motor ${action === "start" ? "started" : "stopped"} successfully.` }); -// } catch (err) { -// // Handle errors -// throw boom.boomify(err); +// eventEmitter.on('sendMotorStartNotification', async (fcmTokens, message) => { +// try { +// await sendNotification(fcmTokens, "Motor Started", message); +// console.log("Manual method time notification sent successfully."); +// } catch (error) { +// console.error("Error sending thresmanual method time notification:", error); // } -// }; -const motorIntervals = {}; -exports.motorAction = async (req, reply) => { - try { - const customerId = req.params.customerId; - const action = req.body.action; - const motorId = req.body.motor_id; - const start_instance_id = req.body.start_instance_id; - - // Define thresholds for water levels - const lowWaterThreshold = 20; // Low water level percentage threshold - //const highWaterThreshold = 90; // High water level percentage threshold - const highWaterThreshold = 70; // High water level percentage threshold - const veryHighWaterThreshold = 80; // Very High water level percentage threshold - const criticalHighWaterThreshold = 85; - // Ensure motor_id is provided - if (!motorId) { - throw new Error("Motor ID is required."); - } - - // Get user FCM tokens - const users = await User.find({ customerId }); - const fcmToken = users.map(user => user.fcmId).filter(fcmId => fcmId); +// }); - const receiverTank = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); - console.log(receiverTank) - const currentWaterLevel = parseInt(receiverTank.waterlevel, 10); - const waterLevelThresholds = { low: 30, veryLow: 20, criticallyLow: 10 }; - - // // Check if the water level is below any of the thresholds - // if (currentWaterLevel < waterLevelThresholds.criticallyLow) { - // if (!receiverTank.notificationSentCritical) { - // eventEmitter.emit('sendCriticalLowWaterNotification', fcmToken, receiverTank); - // await Tank.updateOne({ customerId, tankName: receiverTank.tankName }, { $set: { notificationSentCritical: true } }); - // } - // } else if (currentWaterLevel < waterLevelThresholds.veryLow) { - // if (!receiverTank.notificationSentVeryLow) { - // eventEmitter.emit('sendVeryLowWaterNotification', fcmToken, receiverTank); - // await Tank.updateOne({ customerId, tankName: receiverTank.tankName }, { $set: { notificationSentVeryLow: true } }); - // } - // } else if (currentWaterLevel < waterLevelThresholds.low) { - // if (!receiverTank.notificationSentLow) { - // eventEmitter.emit('sendLowWaterNotification', fcmToken, receiverTank); - // await Tank.updateOne({ customerId, tankName: receiverTank.tankName }, { $set: { notificationSentLow: true } }); - // } - // } -// Check for critical high water level -// if (currentWaterLevel >= criticalHighWaterThreshold) { -// if (!receiverTank.notificationSentCriticalHigh) { -// eventEmitter.emit('sendCriticalHighWaterNotification', fcmToken, receiverTank); -// await Tank.updateOne({ customerId, tankName: receiverTank.tankName }, { $set: { notificationSentCriticalHigh: true } }); -// } -// } -// // Check for very high water level -// else if (currentWaterLevel >= veryHighWaterThreshold) { -// if (!receiverTank.notificationSentVeryHigh) { -// eventEmitter.emit('sendVeryHighWaterNotification', fcmToken, receiverTank); -// await Tank.updateOne({ customerId, tankName: receiverTank.tankName }, { $set: { notificationSentVeryHigh: true } }); -// } -// } -// // Check for high water level -// else if (currentWaterLevel >= highWaterThreshold) { -// if (!receiverTank.notificationSentHigh) { -// eventEmitter.emit('sendHighWaterNotification', fcmToken, receiverTank); -// await Tank.updateOne({ customerId, tankName: receiverTank.tankName }, { $set: { notificationSentHigh: true } }); +// eventEmitter.on('sendMotorStopNotification', async (fcmTokens, message) => { +// try { +// await sendNotification(fcmTokens, "Motor Stopped", message); +// console.log("Manual method time notification sent successfully."); +// } catch (error) { +// console.error("Error sending thresmanual method time notification:", error); // } -// } - // Determine the motor stop status based on the action - let motorStopStatus; - const blockName = req.body.from || "Unknown Block"; // Provide a fallback if `from` is missing - const tankName = req.body.to || "Unknown Tank"; // Provide a fallback if `to` is missing - const stopTime = req.body.stopTime || new Date().toISOString(); - const motorOnType = req.body.motor_on_type || "application"; - if (action === "start") { +// }); - motorStopStatus = "2"; - const startTime = req.body.startTime; - const stopCriteria = - motorOnType === "time" - ? `${req.body.manual_threshold_time} minutes` - : `${req.body.manual_threshold_litres} litres`; - - eventEmitter.emit( - "motorStart", - fcmToken, - new Date().toISOString(), - motorId, - currentWaterLevel, - blockName, // Block Name - tankName, // Tank Name - startTime, - motorOnType, - stopCriteria - ); - await Tank.updateOne( - { customerId, "connections.inputConnections.motor_id": motorId }, - { $set: { "connections.inputConnections.$.motor_stop_status": "2" } } - ); - reply.code(200).send({ message: "Motor started successfully." }); - } else if (action === "stop") { - motorStopStatus = "1"; // If action is stop, set stop status to "1" - eventEmitter.emit( - "motorStop", - fcmToken, - motorId, - currentWaterLevel, - blockName, - tankName, - stopTime, - motorOnType - ); - } else { - throw new Error("Invalid action provided."); - } +// Function to emit events with timestamps +const emitWithTimestamp = (eventName, fcmTokens, motorId, waterLevel) => { + const timestamp = moment().format('HH:mm:ss YYYY-MM-DD '); + eventEmitter.emit(eventName, fcmTokens, timestamp, motorId, waterLevel); +}; - // If action is stop, immediately update motor status and perform stop operations - if (action === "stop") { - console.log("enterted stop") - await Tank.updateOne( - { customerId, "connections.inputConnections.motor_id": motorId }, - { - $set: { - "connections.inputConnections.$.motor_stop_status": "1", - "connections.inputConnections.$.motor_on_type": "manual", - "connections.inputConnections.$.stopTime": req.body.stopTime, - "connections.inputConnections.$.threshold_type": null, - "connections.inputConnections.$.manual_threshold_time": null, - "connections.inputConnections.$.manual_threshold_percentage": null - } - } - ); - if (motorIntervals[motorId]) { - console.log(motorIntervals[motorId],"deleted") - clearInterval(motorIntervals[motorId]); // Clear the interval - delete motorIntervals[motorId]; // Remove the interval from the object - } - this.publishMotorStopStatus(motorId, motorStopStatus); - - // Send immediate response to the client - reply.code(200).send({ message: "Motor stopped successfully." }); - // Perform stop operations in the background - (async () => { - await delay(300000); - - const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); - if (motorData) { - const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); - const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); - const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); - const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); - const totalwaterpumped = quantityDelivered + water_pumped_till_now; - - await Tank.findOneAndUpdate( - { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, - { $set: { total_water_added_from_midnight: totalwaterpumped } } - ); - - await MotorData.updateOne( - { customerId, motor_id: motorId, start_instance_id: start_instance_id }, - { - $set: { - stopTime: req.body.stopTime, - receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), - quantity_delivered: quantityDelivered.toString() - } - } - ); - } - })(); +// const sendNotification = async (fcmTokens, title, body) => { +// if (!Array.isArray(fcmTokens) || fcmTokens.length === 0) { +// console.error('No FCM tokens provided.'); +// return; +// } - return; // Return early to avoid executing the start logic - } else { - await Tank.updateOne( - { customerId, "connections.inputConnections.motor_id": motorId }, - { $set: { "connections.inputConnections.$.motor_stop_status": "2" } } - ); - } +// for (const token of fcmTokens) { +// const message = { +// token: token, +// notification: { +// title: title, +// body: body, +// }, +// data: { +// target: 'tank_levels', +// }, +// }; - // Check threshold settings if action is start - if (action === "start") { - if (req.body.threshold_type === "time") { - // Create a new MotorData entry - const receiverTank = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); - const newMotorData = new MotorData({ - customerId, - motor_id: motorId, - start_instance_id: start_instance_id, - supplierTank: req.body.from, - receiverTank: req.body.to, - supplier_type: req.body.from_type, - receiver_type: req.body.to_type, - startTime: req.body.startTime, - receiverInitialwaterlevel: parseInt(receiverTank.waterlevel, 10) - }); - await newMotorData.save(); +// try { +// const response = await admin.messaging().send(message); // Send each message individually +// console.log('Notification sent successfully:', response); +// } catch (error) { +// console.error(`Failed to send notification to token ${token}:`, error); +// } +// } +// }; - // Update the tank connections with start time and threshold time - for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { - this.publishMotorStopStatus(motorId, motorStopStatus); - for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { - const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); - if (index !== -1) { - await Tank.updateOne( - { customerId, "connections.inputConnections.motor_id": motorId }, - { - $set: { - [`connections.inputConnections.${index}.manual_threshold_time`]: req.body.manual_threshold_time, - [`connections.inputConnections.${index}.threshold_type`]: "time", - [`connections.inputConnections.${index}.startTime`]: req.body.startTime, - [`connections.inputConnections.${index}.start_instance_id`]: start_instance_id - } - } - ); - } - } +// const sendNotification = async (fcmIds, title, body) => { +// try { +// if (!fcmIds || fcmIds.length === 0) { +// throw new Error('No FCM tokens provided.'); +// } - const thresholdTime = new Date(new Date().getTime() + req.body.manual_threshold_time * 60000); - motorIntervals[motorId] = setInterval(async () => { - const supplierTank = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); - const currentWaterLevel = parseInt(supplierTank.waterlevel, 10); - const currentWaterPercentage = (currentWaterLevel / parseInt(supplierTank.capacity.replace(/,/g, ''), 10)) * 100; +// const flatTokens = fcmIds.flat(); +// if (flatTokens.length === 0) { +// throw new Error('Flattened FCM token list is empty.'); +// } - if (new Date() >= thresholdTime || currentWaterPercentage <= lowWaterThreshold) { - console.log(new Date(),"new date") - console.log(thresholdTime,"thresholdTime") - console.log("motor stopping because it entered this condition") - await Tank.updateOne( - { customerId, "connections.inputConnections.motor_id": motorId }, - { - $set: { - "connections.inputConnections.$.motor_stop_status": "1", - - "connections.inputConnections.$.threshold_type": null, - "connections.inputConnections.$.manual_threshold_time": null, - "connections.inputConnections.$.manual_threshold_percentage": null - } - } - ); - emitWithTimestamp('lowWaterLevel', fcmToken); - console.log(motorIntervals[motorId],"deleted automatically") // Emit low water level notification - clearInterval(motorIntervals[motorId]); // Clear interval - delete motorIntervals[motorId]; - - this.publishMotorStopStatus(motorId, "1"); - await delay(300000); - const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); - if (motorData) { - const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); - const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); - const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); - const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); - const totalwaterpumped = quantityDelivered + water_pumped_till_now; +// // Iterate over each token and send individually +// const promises = flatTokens.map(async (token) => { +// try { +// const response = await admin.messaging().send({ +// notification: { title, body }, +// token, +// data: { +// 'target': 'tank_levels', +// }, +// }); +// console.log(`Notification sent successfully to token: ${token}`, response); +// } catch (error) { +// console.error(`Failed to send notification to token: ${token}`, error); +// // Check for specific error indicating an invalid token +// if (error.code === 'messaging/registration-token-not-registered') { +// // Remove the invalid token from the database +// await User.updateOne( +// { fcmIds: token }, // Ensure you're targeting the right user with the invalid token +// { $pull: { fcmIds: token } } // Remove the invalid token +// ); +// console.log(`Removed invalid token: ${token}`); +// } +// } +// }); - await Tank.findOneAndUpdate( - { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, - { $set: { total_water_added_from_midnight: totalwaterpumped } } - ); +// await Promise.all(promises); +// } catch (error) { +// console.error('Error sending notifications:', error); +// } +// }; - await MotorData.updateOne( - { customerId, motor_id: motorId, start_instance_id: start_instance_id }, - { - $set: { - stopTime: req.body.stopTime, - receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), - quantity_delivered: quantityDelivered.toString() - } - } - ); - } - } +// const sendNotification = async (customerId, fcmIds, title, body) => { +// try { +// if (!customerId) { +// throw new Error("Customer ID is required."); +// } - // Check for high water level and send notification - if (currentWaterPercentage >= highWaterThreshold) { - emitWithTimestamp('highWaterLevel', fcmToken); // Emit high water level notification - } +// if (!Array.isArray(fcmIds) || fcmIds.length === 0) { +// throw new Error("No FCM tokens provided or invalid format."); +// } - }, 30000); // Check every minute - } - } - } - // Respond with success message - reply.code(200).send({ message: `Motor ${action === "start" ? "started" : "stopped"} successfully.` }); +// // Safely flatten the tokens +// const flatTokens = fcmIds.flat ? fcmIds.flat() : fcmIds; // Use flat() if available +// if (flatTokens.length === 0) { +// throw new Error("Flattened FCM token list is empty."); +// } - } catch (err) { - throw boom.boomify(err); - } -}; +// // Fetch user notification settings +// const users = await User.find({ customerId }).select("fcmIds notificationPreference lastNotificationSent"); +// // Iterate over users to send notifications based on their preferences +// const promises = users.map(async (user) => { +// const { fcmIds: userFcmIds, notificationPreference, lastNotificationSent } = user; -// exports.motorAction = async (req, reply) => { -// try { +// // Check if userFcmIds is an array +// if (!Array.isArray(userFcmIds)) { +// console.log(`Invalid fcmIds for customer ID: ${customerId}`); +// return; +// } + +// // Filter tokens that belong to the user +// const validTokens = flatTokens.filter(token => userFcmIds.includes(token)); + +// if (validTokens.length === 0) { +// console.log(`No matching FCM tokens for customer ID: ${customerId}`); +// return; +// } + +// // // Handle notification preference +// // if (notificationPreference === "never") { +// // console.log(`Notifications disabled for customer ID: ${customerId}`); +// // return; +// // } + +// const now = new Date(); +// const lastSent = new Date(lastNotificationSent || 0); + +// // If preference is not "always", check the timing +// // if (notificationPreference !== "always") { +// // let minInterval = 0; + +// // switch (notificationPreference) { +// // case "6_hours": +// // minInterval = 6 * 60 * 60 * 1000; // 6 hours +// // break; +// // case "8_hours": +// // minInterval = 8 * 60 * 60 * 1000; // 8 hours +// // break; +// // case "1_month": +// // minInterval = 30 * 24 * 60 * 60 * 1000; // 1 month +// // break; +// // } + +// // // Skip sending if the time restriction hasn't passed +// // if (now - lastSent < minInterval) { +// // console.log(`Skipping notification for customer ID: ${customerId} due to time restriction.`); +// // return; +// // } +// // } + +// // Send notifications +// const notificationPromises = flatTokens.map(async (token) => { +// try { +// const response = await admin.messaging().send({ +// notification: { title, body }, +// token, +// data: { target: "/tank_levels" }, +// }); + +// console.log(`Notification sent successfully to token: ${token}`); +// console.log("FCM Response:", response); // Log the full response +// console.log(`Title: ${title}, Body: ${body}`); // Log title and body before sending + + +// } catch (error) { +// console.error(`Failed to send notification to token: ${token}`, error); + +// // Handle token errors +// if (error.code === "messaging/registration-token-not-registered") { +// await User.updateOne( +// { customerId }, +// { $pull: { fcmIds: token } } // Remove invalid token +// ); +// console.log(`Removed invalid token: ${token}`); +// } +// } +// }); + +// await Promise.all(notificationPromises); + +// // Update lastNotificationSent timestamp if not "always" +// if (notificationPreference !== "always") { +// await User.updateOne({ customerId }, { lastNotificationSent: now }); +// } +// }); + +// await Promise.all(promises); + +// } catch (error) { +// console.error("Error sending notifications:", error); +// } +// }; + +// important +// const sendNotification = async (customerId, fcmIds, title, body) => { +// try { +// if (!customerId) { +// throw new Error("Customer ID is required."); +// } + +// if (!Array.isArray(fcmIds) || fcmIds.length === 0) { +// console.log("No valid FCM tokens provided."); +// return; +// } + +// // Flatten nested arrays +// const flatFcmIds = fcmIds.flat(); + +// // Fetch user from database +// const user = await User.findOne({ customerId }).select("fcmIds"); +// if (!user || !Array.isArray(user.fcmIds)) { +// console.log(`No valid FCM tokens found for customer ID: ${customerId}`); +// return; +// } + +// console.log("User's stored FCM tokens:", user.fcmIds); +// console.log("FCM tokens passed to function:", flatFcmIds); + +// // Proper token filtering +// const validTokens = user.fcmIds.filter(token => flatFcmIds.some(t => t.trim() === token.trim())); + +// console.log("Valid tokens for notification:", validTokens); + +// if (validTokens.length === 0) { +// console.log(`No matching FCM tokens for customer ID: ${customerId}`); +// return; +// } + +// const promises = validTokens.map(async (token) => { +// try { +// console.log(`Sending notification to token: ${token}`); + +// const response = await admin.messaging().send({ +// notification: { title, body }, +// token, +// data: { target: "/tank_levels" }, +// }); + +// console.log(`Notification sent successfully:`, response); +// console.log(`title:`, title); +// console.log(`body:`, body); + + +// } catch (error) { +// console.error(`Failed to send notification to token: ${token}`, error); + +// if (error?.errorInfo?.code === "messaging/registration-token-not-registered") { +// await User.updateOne({ customerId }, { $pull: { fcmIds: token } }); +// console.log(`Removed invalid token: ${token}`); +// } +// } +// }); + +// await Promise.all(promises); +// } catch (error) { +// console.error("Error sending notifications:", error); +// } +// }; + + + + + +const sendNotification = async (hw_Id, customerId, fcmIds, title, body) => { + try { + if (!customerId) throw new Error("Customer ID is required."); + if (!Array.isArray(fcmIds) || fcmIds.length === 0) { + throw new Error("No FCM tokens provided or invalid format."); + } + + const flatTokens = fcmIds.flat ? fcmIds.flat() : fcmIds; + if (flatTokens.length === 0) throw new Error("Flattened FCM token list is empty."); + + console.log(`📨 Preparing to send notification to Customer ID: ${customerId}`); + + // Fetch users with FCM tokens & preferences + const users = await User.find({ customerId }).select("fcmIds notificationPreference lastNotificationSent"); + + const promises = users.map(async (user) => { + const { fcmIds: userFcmIds, notificationPreference, lastNotificationSent } = user; + if (!Array.isArray(userFcmIds)) return console.log(`⚠️ Invalid fcmIds for customer ID: ${customerId}`); + + const validTokens = flatTokens.filter(token => userFcmIds.includes(token)); + if (validTokens.length === 0) return console.log(`⚠️ No matching FCM tokens for customer ID: ${customerId}`); + + // Handle notification preferences + if (notificationPreference === "never") return console.log(`🔕 Notifications disabled for Customer ID: ${customerId}`); + + const now = new Date(); + const lastSent = new Date(lastNotificationSent || 0); + let minInterval = 0; + + switch (notificationPreference) { + case "6_hours": + minInterval = 6 * 60 * 60 * 1000; + break; + case "8_hours": + minInterval = 8 * 60 * 60 * 1000; + break; + case "1_month": + minInterval = 30 * 24 * 60 * 60 * 1000; + break; + } + + if (notificationPreference !== "always" && now - lastSent < minInterval) { + return console.log(`⏳ Skipping notification for Customer ID: ${customerId} due to preference (${notificationPreference}).`); + } + + console.log(`🚀 Sending notification to Customer ID: ${customerId}, Tokens: ${validTokens.length}`); + + const notificationPromises = validTokens.map(async (token) => { + try { + const response = await admin.messaging().send({ + token, + notification: { title, body }, + data: { + hw_Id: String(hw_Id), + target: "/tank_levels" + }, + }); + + console.log(`✅ Notification sent successfully to token: ${token}`); + console.log("📬 FCM Response:", response); + console.log(`📡 Sending notification to Customer ID: ${customerId}`); + //console.log(`🔍 FCM Tokens:`, fcmTokens); + + + } catch (error) { + // console.error(`❌ Failed to send notification to token: ${token}`, error); + + if (error.code === "messaging/registration-token-not-registered") { + await User.updateOne({ customerId }, { $pull: { fcmIds: token } }); + console.log(`🗑️ Removed invalid token: ${token}`); + } + } + }); + + await Promise.all(notificationPromises); + + if (notificationPreference !== "always") { + await User.updateOne({ customerId }, { $set: { lastNotificationSent: now } }); + } + }); + + await Promise.all(promises); + + } catch (error) { + console.error("❌ Error sending notifications:", error); + } +}; + + + + + + +// Function to send notifications +// const sendDailyConsumptionNotification = async () => { +// try { +// const now = new Date(); +// const currentTime = moment(now).format("HH:mm"); // e.g., "09:30" +// const currentDate = moment(now).format("DD-MMM-YYYY"); // e.g., "28-Feb-2025" + +// console.log(`🕒 Checking users for scheduled notifications at ${currentTime}`); + +// // Fetch unique users who have enabled notifications for the current time +// const users = await User.find({ +// allowNotifications: true, +// notificationTime: currentTime, +// }).select("customerId fcmIds lastNotificationSent").lean(); + +// // Ensure unique customers only +// const uniqueUsers = users.filter((user, index, self) => +// index === self.findIndex((u) => u.customerId === user.customerId) +// ); + +// if (uniqueUsers.length === 0) { +// console.log("⏳ No users have notifications scheduled for this time."); +// return; +// } + +// for (const user of uniqueUsers) { +// const { customerId, fcmIds, lastNotificationSent } = user; + +// // Ensure user has valid FCM tokens +// if (!Array.isArray(fcmIds) || fcmIds.length === 0) { +// console.log(`⚠️ No valid FCM tokens for customer ID: ${customerId}`); +// continue; +// } + +// // Remove duplicate and trim tokens +// const uniqueTokens = [...new Set(fcmIds.map(token => token.trim()))]; + +// // Check if notification should be sent based on lastNotificationSent +// const lastSent = new Date(lastNotificationSent || 0); +// if (now - lastSent < 24 * 60 * 60 * 1000) { +// console.log(`⏳ Skipping notification for ${customerId}, already sent in the last 24 hours.`); +// continue; +// } + +// // Fetch last 24-hour consumption data +// const startTime = moment(now).subtract(1, "days").format("DD-MMM-YYYY - HH:mm"); +// const endTime = moment(now).format("DD-MMM-YYYY - HH:mm"); + +// console.log(`📅 Fetching consumption for ${customerId} from ${startTime} to ${endTime}`); + +// const consumptions = await TankConsumptionOriginalSchema.find({ +// customerId, +// time: { $gte: startTime, $lt: endTime }, +// }); + +// if (consumptions.length === 0) { +// console.log(`❌ No consumption data found for ${customerId}`); +// continue; +// } + +// // Standardized mapping for water types +// const typeMapping = { +// "bore water": "Bore Water", +// "bore": "Bore Water", +// "drinking": "Drinking Water", +// "drink": "Drinking Water", +// "DRINK": "Drinking Water" +// }; + +// // Calculate consumption per water type +// let totalConsumption = 0; +// let consumptionByType = {}; + +// for (const record of consumptions) { +// let { typeofwater, consumption } = record; +// typeofwater = (typeofwater || "").trim().toLowerCase(); // Normalize case +// const standardType = typeMapping[typeofwater] || typeofwater; // Use mapped name or original + +// const consumptionValue = parseInt(consumption, 10) || 0; +// if (!consumptionByType[standardType]) { +// consumptionByType[standardType] = 0; +// } + +// consumptionByType[standardType] += consumptionValue; +// totalConsumption += consumptionValue; +// } + +// // Prepare notification message +// let notificationBody = `🚰 Water Consumption Report for ${currentDate}:\n`; + +// for (const type in consumptionByType) { +// const percentage = totalConsumption ? ((consumptionByType[type] / totalConsumption) * 100).toFixed(2) : 0; +// notificationBody += `\n💧 Type: ${type}\n` + +// `Total Consumption: ${consumptionByType[type]} liters (${percentage}%)\n`; +// } + +// console.log(`📩 Preparing notification for ${customerId}:`, notificationBody); + +// // Update lastNotificationSent before sending +// await User.updateOne( +// { customerId }, +// { $set: { lastNotificationSent: new Date() } } +// ); + +// // Send notification to FCM tokens +// const notificationPromises = uniqueTokens.map(async (token) => { +// try { +// await admin.messaging().send({ +// notification: { title: "Daily Water Consumption Report", body: notificationBody }, +// token, +// data: { target: "/tank_levels" }, +// }); + +// console.log(`✅ Notification sent to token: ${token}`); +// } catch (error) { +// console.error(`❌ Failed to send notification to token: ${token}`, error); + +// if (error.code === "messaging/registration-token-not-registered") { +// await User.updateOne({ customerId }, { $pull: { fcmIds: token } }); +// console.log(`🚫 Removed invalid token: ${token}`); +// } +// } +// }); + +// await Promise.all(notificationPromises); +// } +// } catch (error) { +// console.error("❌ Error sending daily consumption notifications:", error); +// } +// }; + +// cron.schedule("* * * * *", async () => { +// console.log("🔄 Running daily consumption notification check..."); +// await sendDailyConsumptionNotification(); +// }, { +// timezone: "Asia/Kolkata", +// }); + + + +// const sendPushNotification = async (registrationToken, title, body) => { +// const message = { +// notification: { +// title: title, +// body: body, +// }, +// data: { +// title: title, +// body: body, +// }, +// }; + +// const options = { +// priority: "high", +// timeToLive: 60 * 60 * 24, +// }; + +// try { +// const response = await admin.messaging().sendToDevice(registrationToken, message, options); +// console.log('FCM response:', response); // Log the FCM response +// return response; // Return the FCM response object +// } catch (error) { +// console.error('FCM error:', error); +// throw error; // Throw the error to handle it further up the call stack +// } +// }; + + + + +exports.publishMotorStopStatus = async (motor_id, motor_stop_status) => { + // console.log("entered publish",motor_id,motor_stop_status) + const deviceTopic = `water/operation/${motor_id}`; // Target specific IoT + //console.log(deviceTopic,"deviceTopic") + const payload = { + topic: 'operation', + object: { + 'motor-id': motor_id, + control: motor_stop_status + } + }; + + //console.log(`📡 Publishing to ${deviceTopic}`); + console.log(payload); + + // Publish to the specific device's control topic + client.publish(deviceTopic, JSON.stringify(payload)); +}; + +const stat_stop_intervals = {}; +// exports.motorAction = async (req, reply) => { +// try { // const customerId = req.params.customerId; // const action = req.body.action; // const motorId = req.body.motor_id; @@ -2303,12 +2511,21 @@ exports.motorAction = async (req, reply) => { // throw new Error("Motor ID is required."); // } +// const users = await User.find({ customerId: customerId }); +// const fcmToken = users.map(user => user.fcmId).filter(fcmId => fcmId); +// console.log(fcmToken) + // // Determine the motor stop status based on the action // let motorStopStatus; // if (action === "start") { // motorStopStatus = "2"; // If action is start, set stop status to "2" +// // eventEmitter.emit('motorStart', fcmToken); // Emit motor start event +// emitWithTimestamp('motorStart', fcmToken); // Emit motor start event with timestamp +// console.log( eventEmitter.emit('motorStart', fcmToken)) // } else if (action === "stop") { // motorStopStatus = "1"; // If action is stop, set stop status to "1" +// // eventEmitter.emit('motorStop', fcmToken); // Emit motor stop event +// emitWithTimestamp('motorStop', fcmToken); // Emit motor stop event with timestamp // } else { // throw new Error("Invalid action provided."); // } @@ -2329,7 +2546,6 @@ exports.motorAction = async (req, reply) => { // } // ); -// // Send immediate response to the client // reply.code(200).send({ message: "Motor stopped successfully." }); // // Perform stop operations in the background @@ -2359,6 +2575,7 @@ exports.motorAction = async (req, reply) => { // } // } // ); + // } // })(); @@ -2567,2228 +2784,5636 @@ exports.motorAction = async (req, reply) => { // } // }; +const notificationSentStatus = { + motorStart: false, + motorStop: false, + lowWater: false, + veryLowWater: false, + criticallyLowWater: false, + highWater: false, + veryHighWater: false, + criticallyHighWater: false, +}; +let waterLevelCheckInterval; // To hold the interval ID -// exports.motorAction = async (req, reply) => { -// try { -// const customerId = req.params.customerId; -// const action = req.body.action; -// const motorId = req.body.motor_id; -// const start_instance_id = req.body.start_instance_id; -// //const fcmIds = req.body.fcmIds; // Assume this is provided in the request to notify users -// // Ensure motor_id is provided -// if (!motorId) { -// throw new Error("Motor ID is required."); -// } +exports.getPumpsAndUsers = async (req, reply) => { + try { + const { customerId } = req.params; -// // Determine the motor stop status based on the action -// let motorStopStatus; -// if (action === "start") { -// motorStopStatus = "2"; // If action is start, set stop status to "2" -// } else if (action === "stop") { -// motorStopStatus = "1"; // If action is stop, set stop status to "1" -// } else { -// throw new Error("Invalid action provided."); -// } -// const users = await User.find({ customerId: customerId }); -// const fcmIds = users.map(user => user.fcmId).filter(fcmId => fcmId); + // Fetch motor_id from inputConnections of all tanks for the customer + const tanks = await Tank.find({ customerId }, { "connections.inputConnections.motor_id": 1 }); -// // Handle motor stop action -// if (action === "stop") { -// // Update the motor stop status and other fields -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { -// $set: { -// "connections.inputConnections.$.motor_stop_status": "1", -// "connections.inputConnections.$.stopTime": req.body.stopTime, -// "connections.inputConnections.$.threshold_type": null, -// "connections.inputConnections.$.manual_threshold_time": null, -// "connections.inputConnections.$.manual_threshold_percentage": null -// } -// } -// ); + const motorIds = tanks.flatMap((tank) => + tank.connections?.inputConnections?.map((conn) => conn.motor_id).filter(Boolean) || [] + ); -// // Send immediate response to the client -// reply.code(200).send({ message: "Motor stopped successfully." }); + // Fetch username and staff names from User collection + const user = await User.findOne({ customerId }, { username: 1, "staff.staff.name": 1 }); -// // Send notification for motor stop -// for (const fcmId of fcmIds) { -// try { -// const response = await sendPushNotification(fcmId, 'Motor Stopped', `Motor has stopped at ${req.body.stopTime}.`); -// console.log('Notification sent successfully:', response); + const staffNames = user?.staff?.staff?.map((s) => s.name) || []; + const username = user?.username || ""; -// if (response.results[0].error === 'NotRegistered') { -// await User.updateOne({ fcmId: fcmId }, { $unset: { fcmId: "" } }); -// console.log(`Removed unregistered FCM ID: ${fcmId}`); -// } -// } catch (error) { -// console.error('Error sending notification:', error); -// } -// } + // Include username at the beginning of staffNames and add "manual" at the end + const updatedStaffNames = ["All",username, ...staffNames, "manual","user"]; + + const updatedmotorIds = ["All",...motorIds]; -// // Perform stop operations in the background -// (async () => { -// await delay(300000); -// // Update the existing motor data entry with stop details -// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); -// if (motorData) { -// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); -// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); -// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); -// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); -// const totalwaterpumped = quantityDelivered + water_pumped_till_now; -// await Tank.findOneAndUpdate( -// { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, -// { $set: { total_water_added_from_midnight: totalwaterpumped } } -// ); + // Prepare response + const result = { + motorIds: updatedmotorIds, + staffNames: updatedStaffNames, + }; -// await MotorData.updateOne( -// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, -// { -// $set: { -// stopTime: req.body.stopTime, -// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), -// quantity_delivered: quantityDelivered.toString() -// } -// } -// ); -// } -// })(); + return reply.send({ success: true, data: result }); + } catch (error) { + console.error("Error fetching data:", error); + return reply.status(500).send({ success: false, message: "Internal Server Error" }); + } +}; -// // Return here to ensure the rest of the code is not executed for the stop action -// return; -// } else { -// // Update the motor stop status to "2" for start action -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { $set: { "connections.inputConnections.$.motor_stop_status": "2" } } -// ); +exports.motoractiontest = async (req, reply) => { + try { + const { customerId } = req.params; + const { motor_id, action } = req.body; -// // Send notification for motor start -// for (const fcmId of fcmIds) { -// try { -// const response = await sendPushNotification(fcmId, 'Motor Started', `Motor has started at ${req.body.startTime}.`); -// console.log('Notification sent successfully:', response); + // Fetch Tank data using customerId + const customer = await Tank.findOne({ customerId }); -// if (response.results[0].error === 'NotRegistered') { -// await User.updateOne({ fcmId: fcmId }, { $unset: { fcmId: "" } }); -// console.log(`Removed unregistered FCM ID: ${fcmId}`); -// } -// } catch (error) { -// console.error('Error sending notification:', error); -// } -// } -// } + // console.log("Customer Data:", customer); -// // Check threshold settings if action is start -// if (action === "start") { -// if (req.body.threshold_type === "time") { -// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); + if (!customer) { + return reply.status(404).send({ success: false, message: "Customer not found" }); + } -// const newMotorData = new MotorData({ -// customerId: customerId, -// motor_id: motorId, -// start_instance_id: start_instance_id, -// supplierTank: req.body.from, -// receiverTank: req.body.to, -// supplier_type: req.body.from_type, -// receiver_type: req.body.to_type, -// startTime: req.body.startTime, -// receiverInitialwaterlevel: parseInt(receiver_tank_info7.waterlevel, 10) -// }); -// await newMotorData.save(); + let motorFound = false; -// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { -// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); -// if (index !== -1) { -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { $set: { [`connections.inputConnections.${index}.manual_threshold_time`]: req.body.manual_threshold_time, [`connections.inputConnections.${index}.startTime`]: req.body.startTime, [`connections.inputConnections.${index}.start_instance_id`]: start_instance_id } } -// ); -// } -// } + // Traverse through inputConnections instead of tanks.inputconnections + for (const inputConnection of customer.connections.inputConnections || []) { + console.log("Checking Motor ID:", inputConnection.motor_id); -// // Start monitoring water level based on threshold time -// const thresholdTime = moment().add(req.body.manual_threshold_time, 'minutes').toDate(); -// const intervalId = setInterval(async () => { -// const splr_tank_info3 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// const splr_tank_info3_waterlevel = parseInt(splr_tank_info3.waterlevel, 10); -// const splr_tank_info3_capacity = parseInt(splr_tank_info3.capacity.replace(/,/g, ''), 10); -// const splr_tank_info3_percentage = (splr_tank_info3_waterlevel / splr_tank_info3_capacity) * 100; + if (String(inputConnection.motor_id) === String(motor_id)) { // Convert both to string + motorFound = true; -// if (new Date() >= thresholdTime || splr_tank_info3_percentage <= 20) { -// // Send notification for low supplier tank percentage -// for (const fcmId of fcmIds) { -// try { -// const response = await sendPushNotification(fcmId, 'Low Water Level Alert', `Supplier tank water level is below 20% (${splr_tank_info3_percentage.toFixed(2)}%).`); -// console.log('Notification sent successfully:', response); + if (action === "start") { + await this.publishMotorStopStatus(motor_id, "2"); + } else if (action === "stop") { + await this.publishMotorStopStatus(motor_id, "1"); + } else { + return reply.status(400).send({ success: false, message: "Invalid action" }); + } -// if (response.results[0].error === 'NotRegistered') { -// await User.updateOne({ fcmId: fcmId }, { $unset: { fcmId: "" } }); -// console.log(`Removed unregistered FCM ID: ${fcmId}`); -// } -// } catch (error) { -// console.error('Error sending notification:', error); -// } -// } + return reply.send({ success: true, message: `Motor ${action} command sent.` }); + } + } -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { -// $set: { -// "connections.inputConnections.$.motor_stop_status": "1", -// "connections.inputConnections.$.threshold_type": null, -// "connections.inputConnections.$.manual_threshold_time": null, -// "connections.inputConnections.$.manual_threshold_percentage": null -// } -// } -// ); -// clearInterval(intervalId); + if (!motorFound) { + return reply.status(404).send({ success: false, message: "Motor ID not found" }); + } -// await delay(300000); + } catch (error) { + console.error("Error fetching data:", error); + return reply.status(500).send({ success: false, message: "Internal Server Error" }); + } +}; -// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); -// if (motorData) { -// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); -// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); -// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); -// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); -// const totalwaterpumped = quantityDelivered + water_pumped_till_now; -// await Tank.findOneAndUpdate( -// { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, -// { $set: { total_water_added_from_midnight: totalwaterpumped } } -// ); +exports.motoractiontestbeforeqc = async (req, reply) => { + try { + -// await MotorData.updateOne( -// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, -// { -// $set: { -// stopTime: req.body.stopTime, -// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), -// quantity_delivered: quantityDelivered.toString() -// } -// } -// ); -// } -// } -// }, 60000); // Check every minute -// } else if (req.body.threshold_type === "litres") { -// console.log("entered litres") -// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); + const { motor_id, action } = req.body; + + -// const newMotorData = new MotorData({ -// customerId: customerId, -// motor_id: motorId, -// start_instance_id: start_instance_id, -// supplierTank: req.body.from, -// receiverTank: req.body.to, -// supplier_type: req.body.from_type, -// receiver_type: req.body.to_type, -// startTime: req.body.startTime, -// receiverInitialwaterlevel:parseInt(receiver_tank_info7.waterlevel, 10) -// }); -// await newMotorData.save(); -// // If threshold type is percentage, calculate percentage threshold -// const receiver_tank_info = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); -// const supplier_tank_info = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// if (!receiver_tank_info) { -// throw new Error("Receiver tank not found."); -// } -// if (!supplier_tank_info) { -// throw new Error("Supplierr tank not found."); -// } -// const supplier_capacity = parseInt(supplier_tank_info.capacity, 10); -// const supplier_waterLevel = parseInt(supplier_tank_info.waterlevel, 10); + -// const capacity = parseInt(receiver_tank_info.capacity, 10); -// const waterLevel = parseInt(receiver_tank_info.waterlevel, 10); -// const desired_percentage = parseInt(req.body.manual_threshold_litres.replace(/,/g, ''), 10); + if (action === "start") { + await this.publishMotorStopStatus(motor_id, "2"); + } else if (action === "stop") { + await this.publishMotorStopStatus(motor_id, "1"); + } else { + return reply.status(400).send({ success: false, message: "Invalid action" }); + } -// console.log(desired_percentage) -// const threshold_water_level = waterLevel+desired_percentage; + return reply.send({ success: true, message: `Motor ${action} command sent.` }); + + -// const supplier_threshold = supplier_waterLevel-desired_percentage -// console.log(supplier_threshold,"supplier_threshold") + + } catch (error) { + console.error("Error fetching data:", error); + return reply.status(500).send({ success: false, message: "Internal Server Error" }); + } +}; -// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { -// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); -// if (index !== -1) { -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { $set: { [`connections.inputConnections.${index}.manual_threshold_percentage`]: supplier_threshold.toString(), [`connections.inputConnections.${index}.startTime`]: req.body.startTime } } -// ); -// } -// } - +const motorIntervals = {}; +async function calculateTotalPumpedWater(customerId, motorId, start_instance_id) { + const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); + if (motorData) { + const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); + const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); + const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); + const waterPumpedTillNow = parseInt(receiverTank.total_water_added_from_midnight, 10); + return quantityDelivered + waterPumpedTillNow; // Total water pumped + } + return 0; // Return 0 if no data found +} - -// // Start monitoring water level based on threshold percentage -// const intervalId = setInterval(async () => { -// // Check if water level has reached the threshold percentage -// const supplier_tank_info1 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// const current_water_level = parseInt(supplier_tank_info1.waterlevel, 10); -// if (current_water_level <= supplier_threshold) { -// // Stop the motor pump -// await Tank.updateOne( -// { customerId, "connections.inputConnections.motor_id": motorId }, -// { -// $set: { -// "connections.inputConnections.$.motor_stop_status": "1", - -// "connections.inputConnections.$.threshold_type": null, -// "connections.inputConnections.$.manual_threshold_time": null, -// "connections.inputConnections.$.manual_threshold_percentage": null -// } -// } -// ); -// clearInterval(intervalId); // Stop monitoring water level -// await delay(300000); -// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); -// if (motorData) { -// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); -// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); -// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); +exports.motorAction = async (req, reply) => { + try { + const { customerId } = req.params; + const { action, motor_id: motorId, start_instance_id, phone, threshold_type, manual_threshold_time, manual_threshold_litres } = req.body; + + if (!motorId) throw new Error("Motor ID is required."); + const users = await User.findOne({ customerId }); + if (!users) return reply.status(404).send({ error: "User not found" }); + + let loggedInUser = users.phone === phone ? + { role: "Customer", name: users.username, phone: users.phone } : + users.staff?.staff?.find(staff => staff.phone === phone) ? + { role: "Staff", name: users.staff.staff.find(staff => staff.phone === phone).name, phone } : null; + + if (!loggedInUser) return reply.status(404).send({ error: "User not found" }); + console.log(loggedInUser) + + const fcmToken = users.fcmIds ? users.fcmIds.filter(id => id) : []; + const receiverTank = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); + if (!receiverTank) throw new Error("Receiver tank not found."); + + const typeOfWater = receiverTank.typeOfWater; + let motorStopStatus = action === "start" ? "2" : "1"; + const blockName = req.body.from || "Unknown Block"; + const tankName = req.body.to || "Unknown Tank"; + const stopTime = req.body.stopTime + const inputConnection_1 = receiverTank.connections?.inputConnections?.find( + conn => conn.motor_id === motorId + ); + // Step 2: Check motor_stop_status + if (action === "start" && inputConnection_1.motor_stop_status === "2") { + // ✅ Proceed with motor start logic + return reply.status(400).send({ error: "Motor is already running or blocked from starting." }); -// const stopTime = formatDate(new Date()); + // ... your logic to handle starting the motor -// await MotorData.updateOne( -// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, -// { -// $set: { -// stopTime:stopTime, -// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), -// quantity_delivered: quantityDelivered.toString() -// } -// } -// ); -// } -// } -// }, 20000); -// } -// } - -// reply.code(200).send({ message: `Motor ${action === "start" ? "started" : "stopped"} successfully.` }); -// } catch (err) { -// // Handle errors -// throw boom.boomify(err); -// } -// }; - -const motorActionAuto = async (req, reply) => { - try { - const customerId = req.params.customerId; - const action = req.body.action; - const motorId = req.body.motor_id; - const motor_on_type = req.body.motor_on_type - - if (!motorId) { - throw new Error("Motor ID is required."); - } - - let motorStopStatus; + } + + if (action === "start" && inputConnection_1.motor_stop_status !== "2") { + + if (motorIntervals[motorId]) { + console.log(`🛑 Clearing old interval for motorId: ${motorId}`); + clearInterval(motorIntervals[motorId]); + delete motorIntervals[motorId]; + + // Confirm deletion + if (!motorIntervals[motorId]) { + console.log(`✅ Old interval successfully deleted.`); + } else { + console.error(`❌ Failed to delete old interval.`); + } + } + const startTime = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm'); + - if (action === "start") { + await Tank.updateOne( { customerId, "connections.inputConnections.motor_id": motorId }, - { - $set: { + { $set: { "connections.inputConnections.$.motor_stop_status": "2", - "connections.inputConnections.$.startTime": req.body.startTime, - "connections.inputConnections.$.motor_on_type": "auto", - } - } + "connections.inputConnections.$.threshold_type": threshold_type, + "connections.inputConnections.$.motor_on_type": "manual" + }} ); - - ; - } - - if (action === "stop") { - await Tank.updateOne( - { customerId, "connections.inputConnections.motor_id": motorId }, - { - $set: { - "connections.inputConnections.$.motor_stop_status": "1", - "connections.inputConnections.$.stopTime": req.body.stopTime, - "connections.inputConnections.$.motor_on_type": null, + + eventEmitter.emit("motorStart", customerId, fcmToken, tankName, blockName, startTime, "Mobile APP", manual_threshold_time, typeOfWater, motorId, loggedInUser.phone); + //this.publishMotorStopStatus(motorId, motorStopStatus); + reply.code(200).send({ message: "Motor started successfully." }); + + if (req.body.threshold_type === "time") { + // Create a new MotorData entry + const receiverTank = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); + const newMotorData = new MotorData({ + customerId, + motor_id: motorId, + start_instance_id: start_instance_id, + supplierTank: req.body.from, + receiverTank: req.body.to, + supplier_type: req.body.from_type, + receiver_type: req.body.to_type, + startTime: req.body.startTime, + receiverInitialwaterlevel: parseInt(receiverTank.waterlevel, 10), + started_by:loggedInUser.name + }); + await newMotorData.save(); + console.log("entered time",motorId,motorStopStatus) + // Update the tank connections with start time and threshold time + this.publishMotorStopStatus(motorId, motorStopStatus); + const startTime1 = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm:ss'); + for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { + const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); + + if (index !== -1) { + await Tank.updateOne( + { customerId, "connections.inputConnections.motor_id": motorId }, + { + $set: { + [`connections.inputConnections.${index}.manual_threshold_time`]: req.body.manual_threshold_time, + [`connections.inputConnections.${index}.threshold_type`]: "time", + [`connections.inputConnections.${index}.startTime`]: startTime1, + [`connections.inputConnections.${index}.start_instance_id`]: start_instance_id + } + } + ); } } - ); - } - - - - reply.code(200).send({ message: `Motor ${action === "start" ? "started" : "stopped"} successfully.` }); - } catch (err) { - console.error("Error in motorActionAuto:", err); - reply.code(500).send({ error: err.message }); - } -}; - - -const checkAutoMode = async () => { - try { - const tanks = await Tank.find(); + const thresholdTime = new Date(new Date().getTime() + req.body.manual_threshold_time * 60000); + console.log("New Threshold Time:", thresholdTime); + console.log("Current Time:", new Date()); + + + motorIntervals[motorId] = setInterval(async () => { + console.log(motorId,"interval created") + console.log(customerId,req.body.from,req.body.from_type.toLowerCase()) + const supplierTank = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); + console.log("up",supplierTank) + console.log(supplierTank.waterlevel,"parseInt(supplierTank.waterlevel, 10)") + const lowWaterThreshold = 20; - for (const tank of tanks) { - for (const inputConnection of tank.connections.inputConnections) { - if (inputConnection.auto_mode === "active") { - console.log("This is automation for tank: " + tank.tankName); - const waterLevel = parseFloat(tank.waterlevel.replace(/,/g, '')); - const capacity = parseFloat(tank.capacity.replace(/,/g, '')); - const autoMinPercentage = parseFloat(tank.auto_min_percentage); - const autoMaxPercentage = parseFloat(tank.auto_max_percentage); - console.log(waterLevel,capacity,autoMinPercentage,autoMaxPercentage) + const currentWaterLevel = parseInt(supplierTank.waterlevel, 10); + const currentWaterPercentage = (currentWaterLevel / parseInt(supplierTank.capacity.replace(/,/g, ''), 10)) * 100; + const notificationTracker = new Map(); + if (new Date() >= thresholdTime || currentWaterPercentage <= lowWaterThreshold) { + console.log(new Date(),"new date") + console.log(thresholdTime,"thresholdTime") + console.log("motor stopping because it entered this condition") + // Emit the threshold time notification + try { + console.log("motor stopping because it entered this condition") + + const tank = await Tank.findOne( + { customerId, "connections.inputConnections.motor_id": motorId }, + { "connections.inputConnections.$": 1 } // Fetch only relevant motor connection + ); + + if (tank && tank.connections.inputConnections[0].motor_stop_status === "1") { + console.log("⚠️ Motor already stopped. Skipping notification."); + } else { + console.log("🚀 Sending threshold time notification..."); + + eventEmitter.emit( + "sendThresholdTimeNotification", + customerId, + fcmToken, + manual_threshold_time, + motorId, + tankName, + blockName + ); + } + + + reply.code(200).send({ message: "Motor stopped successfully." }); + } catch (error) { + console.error("Error in handleMotorStop:", error); + reply.code(500).send({ error: "Internal Server Error" }); + } - if (isNaN(waterLevel) || isNaN(capacity) || capacity === 0) { - console.error(`Invalid water level or capacity for tank: ${tank.tankName}`); - continue; // Skip this tank if the values are not valid - } - const currentPercentage = (waterLevel / capacity) * 100; - console.log("This is automation percentage: " + currentPercentage); - const now = moment().format('DD-MMM-YYYY - HH:mm'); - console.log(now) - if (currentPercentage <= autoMinPercentage) { - await motorActionAuto({ - params: { customerId: tank.customerId }, - body: { - action: "start", - motor_id: inputConnection.motor_id, - motor_on_type: "auto", - startTime: now + const currentTime = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm'); + const currentTime1 = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm:ss'); + await Tank.updateOne( + { customerId, "connections.inputConnections.motor_id": motorId }, + { + $set: { + "connections.inputConnections.$.motor_stop_status": "1", + "connections.inputConnections.$.start_instance_id": null, + "connections.inputConnections.$.threshold_type": null, + "connections.inputConnections.$.manual_threshold_time": null, + "connections.inputConnections.$.manual_threshold_percentage": null, + "connections.inputConnections.$.stopTime": currentTime1, + } } - }, { - code: (statusCode) => ({ send: (response) => console.log(response) }) - }); - } else if (currentPercentage >= autoMaxPercentage && inputConnection.motor_on_type === "auto") { - await motorActionAuto({ - params: { customerId: tank.customerId }, - body: { - action: "stop", - motor_id: inputConnection.motor_id, - motor_on_type: "auto", - stopTime: now + ); + + if (motorIntervals[motorId]) { + console.log(`🛑 Clearing interval for motorId: ${motorId}`); + clearInterval(motorIntervals[motorId]); + delete motorIntervals[motorId]; + + // Confirm deletion + if (!motorIntervals[motorId]) { + console.log(`✅ Interval for motorId: ${motorId} successfully deleted.`); + } else { + console.error(`❌ Failed to delete interval for motorId: ${motorId}`); } - }, { - code: (statusCode) => ({ send: (response) => console.log(response) }) - }); - } - } - } - } - } catch (err) { - console.error("Error checking auto mode:", err); - } -}; - -// Set the interval to check every 15 seconds (15000 milliseconds) -setInterval(checkAutoMode, 15000); + } + + + this.publishMotorStopStatus(motorId, "1"); + console.log(start_instance_id,"start_instance_id",customerId,"customerId",motorId,"motorId") + + const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); + console.log(motorData,"motorData") + if (motorData) { + console.log("got into if") + const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); + const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); + const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); + const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); + const totalwaterpumped = quantityDelivered + water_pumped_till_now; + const start = moment(motorData.startTime, 'DD-MMM-YYYY - HH:mm'); + const stop = moment(currentTime, 'DD-MMM-YYYY - HH:mm'); + const duration = moment.duration(stop.diff(start)); + const runtime = Math.floor(duration.asMinutes()); // runtime in minutes + await Tank.findOneAndUpdate( + { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, + { $set: { total_water_added_from_midnight: totalwaterpumped } } + ); + await MotorData.updateOne( + { customerId, motor_id: motorId, start_instance_id: start_instance_id }, + { + $set: { + stopTime: currentTime, + receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), + quantity_delivered: quantityDelivered.toString(), + runtime: runtime.toString(), + stopped_by:loggedInUser.name + } + } + ); + } + } + + // Check for high water level and send notification + // if (currentWaterPercentage >= highWaterThreshold) { + // // eventEmitter.emit('sendHighWaterNotification', fcmToken, receiverTank); + // await checkWaterLevelsAndNotify(customerId, tankName, supplierTank.tankLocation, fcmToken); + // } -// exports.calculateCapacity = async (req, reply) => { -// try { -// const shape = req.body.shape -// if(shape==="rectangle"){ -// const { length, width, height } = req.body + }, 30000); // Check every minute + + } + else if (req.body.threshold_type === "litres") { + console.log("entered litres") + const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); + const supplier_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); -// // Ensure all parameters are valid numbers -// if (isNaN(length) || isNaN(width) || isNaN(height)) { -// reply.code(400).send('Invalid input parameters') -// return -// } + const newMotorData = new MotorData({ + customerId: customerId, + motor_id: motorId, + start_instance_id: start_instance_id, + supplierTank: req.body.from, + receiverTank: req.body.to, + supplier_type: req.body.from_type, + receiver_type: req.body.to_type, + startTime: req.body.startTime, + receiverInitialwaterlevel:parseInt(receiver_tank_info7.waterlevel, 10), + supplierInitialwaterlevel:parseInt(supplier_tank_info7.waterlevel, 10), + started_by:loggedInUser.name + }); + await newMotorData.save(); + // If threshold type is percentage, calculate percentage threshold + const receiver_tank_info = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); + const supplier_tank_info = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); + if (!receiver_tank_info) { + throw new Error("Receiver tank not found."); + } + if (!supplier_tank_info) { + throw new Error("Supplierr tank not found."); + } + const supplier_capacity = parseInt(supplier_tank_info.capacity, 10); + const supplier_waterLevel = parseInt(supplier_tank_info.waterlevel, 10); -// // Calculate the capacity of the water tank in liters -// const capacity = length * width * height * 1000 + const capacity = parseInt(receiver_tank_info.capacity, 10); + const waterLevel = parseInt(receiver_tank_info.waterlevel, 10); + const desired_percentage = parseInt(req.body.manual_threshold_litres.replace(/,/g, ''), 10); -// reply.send({ status_code: 200, capacity: capacity}); + console.log(desired_percentage) + const threshold_water_level = waterLevel+desired_percentage; + const supplier_threshold = supplier_waterLevel-desired_percentage + console.log(supplier_threshold,"supplier_threshold") + for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { + this.publishMotorStopStatus(motorId, motorStopStatus); + for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { + const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); + if (index !== -1) { + await Tank.updateOne( + { customerId, "connections.inputConnections.motor_id": motorId }, + { $set: { [`connections.inputConnections.${index}.manual_threshold_percentage`]: supplier_threshold.toString(), [`connections.inputConnections.${index}.startTime`]: req.body.startTime } } + ); + } + } + -// return { message: 'success' }; -// } -// if(shape==="cylinder"){ -// console.log("hii1") -// const { length,diameter } = req.body + // Update water level threshold + -// // Ensure all parameters are valid numbers -// if (isNaN(length) || isNaN(diameter)) { -// reply.code(400).send('Invalid input parameters') -// return -// } - -// // Calculate the capacity of the water tank in liters -// const radius = diameter / 2 -// const volume = Math.PI * Math.pow(radius, 2) * length -// const capacity = volume * 1000 - -// reply.send({ status_code: 200, capacity: capacity}); + // Start monitoring water level based on threshold percentage + motorIntervals[motorId] = setInterval(async () => { + // Check if water level has reached the threshold percentage + const supplier_tank_info1 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); + const current_water_level = parseInt(supplier_tank_info1.waterlevel, 10); + if (current_water_level <= supplier_threshold) { + // Stop the motor pump + await Tank.updateOne( + { customerId, "connections.inputConnections.motor_id": motorId }, + { + $set: { + "connections.inputConnections.$.motor_stop_status": "1", + "connections.inputConnections.$.start_instance_id": null, + "connections.inputConnections.$.threshold_type": null, + "connections.inputConnections.$.manual_threshold_time": null, + "connections.inputConnections.$.manual_threshold_percentage": null + } + } + ); + clearInterval(motorIntervals[motorId]); // Clear interval + delete motorIntervals[motorId]; + + this.publishMotorStopStatus(motorId, "1"); + await delay(300000); + const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); + if (motorData) { + const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); + const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); + const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); -// return { message: 'success' }; -// } - -// // if(shape==="oval"){ -// // console.log("hii3") -// // const { length, width, height } = req.body + const stopTime = formatDate(new Date()); -// // // Ensure all parameters are valid numbers -// // if (isNaN(length) || isNaN(width) || isNaN(height)) { -// // reply.code(400).send('Invalid input parameters') -// // return -// // } - -// // // Calculate the capacity of the water tank in liters -// // const radius = height / 2 -// // const a = width - height -// // const area = Math.PI * radius * radius + 2 * radius * a -// // const volume = area * length -// // const capacity = volume * 1000 + await MotorData.updateOne( + { customerId, motor_id: motorId, start_instance_id: start_instance_id }, + { + $set: { + stopTime:stopTime, + receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), + quantity_delivered: quantityDelivered.toString(), + stopped_by:loggedInUser.name + } + } + ); + } + } + }, 20000); + } + + } + + } else if (action === "stop") { + // Dynamically find start_instance_id from tank + const tankWithMotor = await Tank.findOne({ + customerId, + "connections.inputConnections.motor_id": motorId + }); -// // // Format the result with two decimal places and comma-separated thousands -// // const formattedCapacity = capacity.toFixed(2).replace(/\d(?=(\d{3})+\.)/g, '$&,') - -// // reply.send({ status_code: 200, capacity: formattedCapacity}); + let dynamicInstanceId = null; + let user_name = loggedInUser.name + console.log(user_name,"user_name in stop1") + if (tankWithMotor) { + const connection = tankWithMotor.connections.inputConnections.find(conn => conn.motor_id === motorId); + if (connection && connection.start_instance_id) { + dynamicInstanceId = connection.start_instance_id; + } + } + await stopMotor(motorId, customerId, dynamicInstanceId,user_name); -// // return { message: 'success' }; + + try { + const motorData = await MotorData.findOne({ + customerId, + motor_id: motorId, + start_instance_id: dynamicInstanceId + }); + + let totalWaterPumped = 0; // Default value in case data is missing + if (motorData && motorData.quantity_delivered) { + totalWaterPumped = motorData.quantity_delivered; + } + + console.log("quantity_delivered:", totalWaterPumped); + + + //const totalWaterPumped = await motorData.quantity_delivered + // console.log("quantity_delivered",totalWaterPumped) + eventEmitter.emit("motorStop", customerId, fcmToken, tankName, blockName, stopTime, "Mobile APP", totalWaterPumped, typeOfWater, motorId, + loggedInUser.phone,); + + reply.code(200).send({ message: "Motor stopped successfully." }); + } catch (error) { + console.error("Error in handleMotorStop:", error); + reply.code(500).send({ error: "Internal Server Error" }); + } + this.publishMotorStopStatus(motorId, motorStopStatus); + reply.code(200).send({ message: "Motor stopped successfully." }); + } + } catch (err) { + throw boom.boomify(err); + } +}; -// // } +async function stopMotor(motorId, customerId, start_instance_id,user_name) { + console.log(user_name,"user_name in stop2") + const currentTime = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm'); + const currentTime1 = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm:ss'); + await Tank.updateOne( + { customerId, "connections.inputConnections.motor_id": motorId }, + { $set: { + "connections.inputConnections.$.motor_stop_status": "1", + "connections.inputConnections.$.stopTime": currentTime1, + "connections.inputConnections.$.start_instance_id": null, + "connections.inputConnections.$.threshold_type": null, + "connections.inputConnections.$.manual_threshold_time": null, + "connections.inputConnections.$.manual_threshold_percentage": null + }} + ); + + if (motorIntervals[motorId]) { + clearInterval(motorIntervals[motorId]); + delete motorIntervals[motorId]; + } -// // if(shape==="horizontalellipse"){ + // eventEmitter.emit("motorStop", customerId, [], "", "", currentTime, "Mobile APP", 0, "", motorId, ""); + + const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id }); + if (motorData) { + const start = moment(motorData.startTime, 'DD-MMM-YYYY - HH:mm'); + const stop = moment(currentTime, 'DD-MMM-YYYY - HH:mm'); + const duration = moment.duration(stop.diff(start)); + const runtime = Math.floor(duration.asMinutes()); // runtime in minutes + + const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); + const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel.replace(/,/g, ''), 10); + const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel.replace(/,/g, ''), 10); + await MotorData.updateOne( + { customerId, motor_id: motorId, start_instance_id }, + { $set: { stopTime: currentTime, receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), quantity_delivered: quantityDelivered.toString(), runtime: runtime,stopped_by:user_name } } + ); + } +} -// // const { length, width, height } = req.body -// // // Ensure all parameters are valid numbers -// // if (isNaN(length) || isNaN(width) || isNaN(height)) { -// // reply.code(400).send('Invalid input parameters') -// // return -// // } -// // // Calculate the capacity of the water tank in liters -// // const radius1 = length / 2 -// // const radius2 = width / 2 -// // const volume = Math.PI * radius1 * radius2 * height -// // const capacity = volume * 1000 -// // reply.send({ status_code: 200, capacity: capacity}); -// // return { message: 'success' }; +const monitorWaterLevels = async () => { + try { + // console.log("⏳ Monitoring water levels..."); -// // } -// if(shape==="userdefined"){ -// const capacity = req.body + const tanks = await Tank.find(); // Fetch all tanks + //console.log("Fetched Tanks:", tanks.length); -// reply.send({ status_code: 200, capacity: capacity}); + for (const tank of tanks) { + + const { + _id, + customerId, // Move this to the top + motor_id, + tankName, + blockName, + waterlevel: currentWaterLevel, + capacity, + auto_min_percentage, + reserved_percentage, + notificationSentLow, + notificationSentCritical, + } = tank; + + const users = await User.find({ customerId }); + const fcmTokens = users + .map(user => user.fcmIds) + .filter(fcmIds => Array.isArray(fcmIds) && fcmIds.length > 0) // Ensure it's an array + .flat(); + + if (!fcmTokens || fcmTokens.length === 0) { + //console.error("❌ No valid FCM tokens found for customerId:", customerId); + continue; // Skip this tank + } + const LOW_PERCENTAGE = 20; // 20% threshold + const CRITICAL_PERCENTAGE = 10; // 10% threshold + + // **Calculate thresholds based on tank capacity** + const lowWaterLevel = (LOW_PERCENTAGE / 100) * capacity; + const criticalLowWaterLevel = (CRITICAL_PERCENTAGE / 100) * capacity; + const currentWaterPercentage = (currentWaterLevel / capacity) * 100; + // const lowWaterLevel = 9483; // Low threshold in liters + // const criticalLowWaterLevel = 9483; + if (currentWaterLevel <= criticalLowWaterLevel) { + if (!notificationSentCritical) { + console.log("🚨 Sending Critical Low Water Notification..."); + eventEmitter.emit("sendCriticalLowWaterNotification", customerId, fcmTokens, motor_id, tankName, blockName, criticalLowWaterLevel, currentWaterLevel, currentWaterPercentage); + await Tank.updateOne({ _id }, { notificationSentCritical: true, notificationSentLow: true }); + } + } + else if (currentWaterLevel <= lowWaterLevel) { + if (!notificationSentLow) { + console.log("⚠️ Sending Low Water Notification..."); + eventEmitter.emit("sendLowWaterNotification", customerId, fcmTokens, motor_id, tankName, blockName, lowWaterLevel, currentWaterLevel, currentWaterPercentage); + await Tank.updateOne({ _id }, { notificationSentLow: true }); + } + } + else if (currentWaterLevel > lowWaterLevel && (notificationSentLow || notificationSentCritical)) { + console.log("🔄 Water level restored. Resetting flags."); + await Tank.updateOne({ _id }, { notificationSentCritical: false, notificationSentLow: false }); + } + + } + } catch (error) { + console.error("❌ Error monitoring water levels:", error); + } +}; +setInterval(monitorWaterLevels, 1000); -// return { message: 'success' }; -// } +// async function stopMotor(motorId, customerId, start_instance_id) { +// const currentTime = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm'); +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { $set: { +// "connections.inputConnections.$.motor_stop_status": "1", +// "connections.inputConnections.$.stopTime": currentTime, +// "connections.inputConnections.$.threshold_type": null, +// "connections.inputConnections.$.manual_threshold_time": null, +// "connections.inputConnections.$.manual_threshold_percentage": null +// }} +// ); -// } +// if (motorIntervals[motorId]) { +// clearInterval(motorIntervals[motorId]); +// delete motorIntervals[motorId]; +// } -// catch (err) { -// throw boom.boomify(err); +// eventEmitter.emit("motorStop", customerId, [], "", "", currentTime, "Mobile APP", 0, "", motorId, ""); + +// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id }); +// if (motorData) { +// const startTime = moment(motorData.startTime, 'DD-MMM-YYYY - HH:mm'); +// const runtime = moment.duration(moment(currentTime, 'DD-MMM-YYYY - HH:mm').diff(startTime)).asSeconds(); + +// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); +// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel.replace(/,/g, ''), 10); +// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel.replace(/,/g, ''), 10); + +// await MotorData.updateOne( +// { customerId, motor_id: motorId, start_instance_id }, +// { $set: { stopTime: currentTime, receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), quantity_delivered: quantityDelivered.toString(), runtime: runtime } } +// ); // } -// }; +// } -// exports.calculateCapacity = async (req, reply) => { -// try { -// const shape = req.body.shape; -// if (shape === "rectangle") { -// const { length, width, height } = req.body; -// // Convert input units from feet to meters -// const length_m = length * 0.3048; -// const width_m = width * 0.3048; -// const height_m = height * 0.3048; -// console.log(length_m,width_m,height_m) -// // Ensure all parameters are valid numbers -// if (isNaN(length_m) || isNaN(width_m) || isNaN(height_m)) { -// reply.code(400).send("Invalid input parameters"); -// return; -// } -// // Calculate the capacity of the water tank in liters -// const capacity = length_m * width_m * height_m * 1000; -// reply.send({ status_code: 200, capacity: capacity }); -// return { message: "success" }; -// } -// if (shape === "cylinder") { -// console.log("hii1"); -// const { length, diameter } = req.body; -// // Convert input units from feet to meters -// const length_m = length * 0.3048; -// const diameter_m = diameter * 0.3048; -// // Ensure all parameters are valid numbers -// if (isNaN(length_m) || isNaN(diameter_m)) { -// reply.code(400).send("Invalid input parameters"); -// return; -// } -// // Calculate the capacity of the water tank in liters -// const radius = diameter_m / 2; -// const volume = Math.PI * Math.pow(radius, 2) * length_m; -// const capacity = volume * 1000; -// reply.send({ status_code: 200, capacity: capacity }); -// return { message: "success" }; +// exports.motorAction = async (req, reply) => { +// try { +// const customerId = req.params.customerId; +// const action = req.body.action; +// const motorId = req.body.motor_id; +// const start_instance_id = req.body.start_instance_id +// console.log(req.body.startTime) +// // Ensure motor_id is provided +// if (!motorId) { +// throw new Error("Motor ID is required."); // } -// // Add similar conversions for other shapes if necessary - -// if (shape === "userdefined") { -// const capacity = req.body; - -// reply.send({ status_code: 200, capacity: capacity }); - -// return { message: "success" }; +// // Determine the motor stop status based on the action +// let motorStopStatus; +// if (action === "start") { +// motorStopStatus = "2"; // If action is start, set stop status to "2" +// } else if (action === "stop") { +// motorStopStatus = "1"; // If action is stop, set stop status to "1" +// } else { +// throw new Error("Invalid action provided."); // } -// } catch (err) { -// throw boom.boomify(err); -// } -// }; - -exports.calculateCapacity = async (req, reply) => { - try { - const shape = req.body.shape; - if (shape === "rectangle") { - const { length, width, height } = req.body; - - // Convert input units from feet to meters - const length_m = length * 0.3048; - const width_m = width * 0.3048; - const height_m = height * 0.3048; - - // Ensure all parameters are valid numbers - if (isNaN(length_m) || isNaN(width_m) || isNaN(height_m)) { - reply.code(400).send("Invalid input parameters"); - return; - } - // Calculate the capacity of the water tank in liters - const capacity = length_m * width_m * height_m * 1000; +// // Update the motor stop status immediately if action is stop +// if (action === "stop") { +// // Update the motor stop status and other fields +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { +// $set: { +// "connections.inputConnections.$.motor_stop_status": "1", +// "connections.inputConnections.$.stopTime": req.body.stopTime, +// "connections.inputConnections.$.threshold_type": null, +// "connections.inputConnections.$.manual_threshold_time": null, +// "connections.inputConnections.$.manual_threshold_percentage": null +// } +// } +// ); - // Calculate the water capacity for a 1 centimeter height - const waterCapacityPerCm = length_m * width_m * 0.01 * 1000; - reply.send({ status_code: 200, capacity: capacity, waterCapacityPerCm: waterCapacityPerCm }); +// // Send immediate response to the client +// reply.code(200).send({ message: "Motor stopped successfully." }); - return { message: "success" }; - } - if (shape === "cylinder") { - const { length, diameter } = req.body; +// // Perform stop operations in the background +// (async () => { +// await delay(300000); - // Convert input units from feet to meters - const length_m = length * 0.3048; - const diameter_m = diameter * 0.3048; - - // Ensure all parameters are valid numbers - if (isNaN(length_m) || isNaN(diameter_m)) { - reply.code(400).send("Invalid input parameters"); - return; - } +// // Update the existing motor data entry with stop details +// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); +// if (motorData) { +// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); +// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); +// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); +// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); +// const totalwaterpumped = quantityDelivered + water_pumped_till_now; +// await Tank.findOneAndUpdate( +// { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, +// { $set: { total_water_added_from_midnight: totalwaterpumped } } +// ); - // Calculate the capacity of the water tank in liters - const radius = diameter_m / 2; - const volume = Math.PI * Math.pow(radius, 2) * length_m; - const capacity = volume * 1000; +// await MotorData.updateOne( +// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, +// { +// $set: { +// stopTime: req.body.stopTime, +// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), +// quantity_delivered: quantityDelivered.toString() +// } +// } +// ); +// } +// })(); - // Calculate the water capacity for a 1 centimeter height - const waterCapacityPerCm = Math.PI * Math.pow(radius, 2) * 0.01 * 1000; +// // Return here to ensure the rest of the code is not executed for the stop action +// return; +// } else { +// // Update the motor stop status to "2" for start action +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { $set: { "connections.inputConnections.$.motor_stop_status": "2" } } +// ); +// } - reply.send({ status_code: 200, capacity: capacity, waterCapacityPerCm: waterCapacityPerCm }); +// // Check threshold settings if action is start +// if (action === "start") { +// if (req.body.threshold_type === "time") { +// // If threshold type is time, update threshold time +// // await Tank.updateOne( +// // { customerId, "connections.inputConnections.motor_id": motorId }, +// // { $set: { "connections.inputConnections.$.manual_threshold_time": req.body.manual_threshold_time,startTime:req.body.startTime } } +// // ); +// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); - return { message: "success" }; - } +// const newMotorData = new MotorData({ +// customerId: customerId, +// motor_id: motorId, +// start_instance_id: start_instance_id, +// supplierTank: req.body.from, +// receiverTank: req.body.to, +// supplier_type: req.body.from_type, +// receiver_type: req.body.to_type, +// startTime: req.body.startTime, +// receiverInitialwaterlevel:parseInt(receiver_tank_info7.waterlevel, 10) +// }); +// await newMotorData.save(); - // Add similar conversions for other shapes if necessary +// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { +// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); +// if (index !== -1) { +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { $set: { [`connections.inputConnections.${index}.manual_threshold_time`]: req.body.manual_threshold_time, [`connections.inputConnections.${index}.startTime`]: req.body.startTime,[`connections.inputConnections.${index}.start_instance_id`]: start_instance_id } } +// ); +// } +// } + - if (shape === "userdefined") { - const capacity = req.body.capacity; // Assuming capacity is provided directly +// // Start monitoring water level based on threshold time +// const thresholdTime = moment().add(req.body.manual_threshold_time, 'minutes').toDate(); +// const intervalId = setInterval(async () => { +// const splr_tank_info3 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); +// const splr_tank_info3_waterlevel = parseInt(splr_tank_info3.waterlevel, 10); +// //console.log(splr_tank_info3_waterlevel,"splr_tank_info3_waterlevel") +// const splr_tank_info3_capacity = parseInt(splr_tank_info3.capacity.replace(/,/g, ''), 10); +// // const splr_tank_info3_capacity = parseInt(splr_tank_info3.capacity, 10); +// // console.log(splr_tank_info3.capacity,splr_tank_info3_capacity,"splr_tank_info3_capacity") +// const splr_tank_info3_percentage = (splr_tank_info3_waterlevel / splr_tank_info3_capacity) * 100; +// // console.log(splr_tank_info3_percentage, "percentage for less than 20"); - // Calculate the water capacity for a 1 centimeter height - const waterCapacityPerCm = capacity / req.body.height; // Assuming height of the shape is provided +// if (new Date() >= thresholdTime || splr_tank_info3_percentage <= 20) { +// console.log(splr_tank_info3_percentage,) +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { +// $set: { +// "connections.inputConnections.$.motor_stop_status": "1", +// "connections.inputConnections.$.threshold_type": null, +// "connections.inputConnections.$.manual_threshold_time": null, +// "connections.inputConnections.$.manual_threshold_percentage": null +// } +// } +// ); +// clearInterval(intervalId); - reply.send({ status_code: 200, capacity: capacity, waterCapacityPerCm: waterCapacityPerCm }); +// await delay(300000); - return { message: "success" }; - } - } catch (err) { - throw boom.boomify(err); - } -}; +// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); +// if (motorData) { +// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); +// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); +// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); +// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); +// const totalwaterpumped = quantityDelivered + water_pumped_till_now +// await Tank.findOneAndUpdate({customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase()}, { $set: { total_water_added_from_midnight: totalwaterpumped } }) -// exports.IotDevice = async (req, reply) => { -// try { -// const { hardwareId, mode, tanks } = req.body; +// const stopTime = formatDate(new Date()); -// // create a new tank document with the current date and time -// const currentDate = new Date(); -// const date = currentDate.toISOString(); // save the date as an ISO string -// const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); +// await MotorData.updateOne( +// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, +// { +// $set: { +// stopTime:stopTime, +// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), +// quantity_delivered: quantityDelivered.toString() +// } +// } +// ); +// } +// } +// }, 60000); +// } else if (req.body.threshold_type === "litres") { +// console.log("entered litres") +// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); -// // Create an array of tank documents -// const tankDocuments = tanks.map(tank => ({ -// tankhardwareId: tank.tankhardwareId, -// tankHeight: tank.tankHeight, -// maxLevel: tank.maxLevel, -// minLevel: tank.minLevel, -// date: date, -// time: time -// })); +// const newMotorData = new MotorData({ +// customerId: customerId, +// motor_id: motorId, +// start_instance_id: start_instance_id, +// supplierTank: req.body.from, +// receiverTank: req.body.to, +// supplier_type: req.body.from_type, +// receiver_type: req.body.to_type, +// startTime: req.body.startTime, +// receiverInitialwaterlevel:parseInt(receiver_tank_info7.waterlevel, 10) +// }); +// await newMotorData.save(); +// // If threshold type is percentage, calculate percentage threshold +// const receiver_tank_info = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); +// const supplier_tank_info = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); +// if (!receiver_tank_info) { +// throw new Error("Receiver tank not found."); +// } +// if (!supplier_tank_info) { +// throw new Error("Supplierr tank not found."); +// } +// const supplier_capacity = parseInt(supplier_tank_info.capacity, 10); +// const supplier_waterLevel = parseInt(supplier_tank_info.waterlevel, 10); +// const capacity = parseInt(receiver_tank_info.capacity, 10); +// const waterLevel = parseInt(receiver_tank_info.waterlevel, 10); +// const desired_percentage = parseInt(req.body.manual_threshold_litres.replace(/,/g, ''), 10); -// // create a new IotData document with the provided data -// const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); +// console.log(desired_percentage) +// const threshold_water_level = waterLevel+desired_percentage; -// // save the document to MongoDB -// await ottank.save(); +// const supplier_threshold = supplier_waterLevel-desired_percentage +// console.log(supplier_threshold,"supplier_threshold") -// // delete previous records except the three latest ones -// const previousRecords = await IotData.find({ hardwareId }) -// .sort({ date: -1, time: -1 }) -// .skip(3); // skip the three latest documents +// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { +// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); +// if (index !== -1) { +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { $set: { [`connections.inputConnections.${index}.manual_threshold_percentage`]: supplier_threshold.toString(), [`connections.inputConnections.${index}.startTime`]: req.body.startTime } } +// ); +// } +// } + -// for (const record of previousRecords) { -// await record.remove(); -// } -// // get the latest three documents sorted in descending order of date and time -// const latestOttanks = await IotData.find({ hardwareId }) -// .sort({ date: -1, time: -1 }) -// .limit(3); +// // Update water level threshold + -// // send the latest documents -// reply.code(200).send({ latestOttanks }); -// } catch (err) { -// // send an error response -// reply.code(500).send({ error: err.message }); -// } -// }; +// // Start monitoring water level based on threshold percentage +// const intervalId = setInterval(async () => { +// // Check if water level has reached the threshold percentage +// const supplier_tank_info1 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); +// const current_water_level = parseInt(supplier_tank_info1.waterlevel, 10); +// if (current_water_level <= supplier_threshold) { +// // Stop the motor pump +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { +// $set: { +// "connections.inputConnections.$.motor_stop_status": "1", + +// "connections.inputConnections.$.threshold_type": null, +// "connections.inputConnections.$.manual_threshold_time": null, +// "connections.inputConnections.$.manual_threshold_percentage": null +// } +// } +// ); +// clearInterval(intervalId); // Stop monitoring water level +// await delay(300000); +// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); +// if (motorData) { +// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); +// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); +// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); -exports.IotDevice = async (req, reply) => { - try { - const { hardwareId, mode, tanks } = req.body; +// const stopTime = formatDate(new Date()); - // create a new tank document with the current date and time +// await MotorData.updateOne( +// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, +// { +// $set: { +// stopTime:stopTime, +// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), +// quantity_delivered: quantityDelivered.toString() +// } +// } +// ); +// } +// } +// }, 20000); // Check water level every minute +// } +// } + +// // Respond with success message +// reply.code(200).send({ message: `Motor ${action === "start" ? "started" : "stopped"} successfully.` }); +// } catch (err) { +// // Handle errors +// throw boom.boomify(err); +// } +// }; + + + + +// exports.motorAction = async (req, reply) => { +// try { +// const customerId = req.params.customerId; +// const action = req.body.action; +// const motorId = req.body.motor_id; +// const start_instance_id = req.body.start_instance_id; +// //const fcmIds = req.body.fcmIds; // Assume this is provided in the request to notify users + +// // Ensure motor_id is provided +// if (!motorId) { +// throw new Error("Motor ID is required."); +// } + +// // Determine the motor stop status based on the action +// let motorStopStatus; +// if (action === "start") { +// motorStopStatus = "2"; // If action is start, set stop status to "2" +// } else if (action === "stop") { +// motorStopStatus = "1"; // If action is stop, set stop status to "1" +// } else { +// throw new Error("Invalid action provided."); +// } +// const users = await User.find({ customerId: customerId }); +// const fcmIds = users.map(user => user.fcmId).filter(fcmId => fcmId); + +// // Handle motor stop action +// if (action === "stop") { +// // Update the motor stop status and other fields +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { +// $set: { +// "connections.inputConnections.$.motor_stop_status": "1", +// "connections.inputConnections.$.stopTime": req.body.stopTime, +// "connections.inputConnections.$.threshold_type": null, +// "connections.inputConnections.$.manual_threshold_time": null, +// "connections.inputConnections.$.manual_threshold_percentage": null +// } +// } +// ); + +// // Send immediate response to the client +// reply.code(200).send({ message: "Motor stopped successfully." }); + +// // Send notification for motor stop +// for (const fcmId of fcmIds) { +// try { +// const response = await sendPushNotification(fcmId, 'Motor Stopped', `Motor has stopped at ${req.body.stopTime}.`); +// console.log('Notification sent successfully:', response); + +// if (response.results[0].error === 'NotRegistered') { +// await User.updateOne({ fcmId: fcmId }, { $unset: { fcmId: "" } }); +// console.log(`Removed unregistered FCM ID: ${fcmId}`); +// } +// } catch (error) { +// console.error('Error sending notification:', error); +// } +// } + +// // Perform stop operations in the background +// (async () => { +// await delay(300000); + +// // Update the existing motor data entry with stop details +// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); +// if (motorData) { +// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); +// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); +// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); +// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); +// const totalwaterpumped = quantityDelivered + water_pumped_till_now; +// await Tank.findOneAndUpdate( +// { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, +// { $set: { total_water_added_from_midnight: totalwaterpumped } } +// ); + +// await MotorData.updateOne( +// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, +// { +// $set: { +// stopTime: req.body.stopTime, +// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), +// quantity_delivered: quantityDelivered.toString() +// } +// } +// ); +// } +// })(); + +// // Return here to ensure the rest of the code is not executed for the stop action +// return; +// } else { +// // Update the motor stop status to "2" for start action +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { $set: { "connections.inputConnections.$.motor_stop_status": "2" } } +// ); + +// // Send notification for motor start +// for (const fcmId of fcmIds) { +// try { +// const response = await sendPushNotification(fcmId, 'Motor Started', `Motor has started at ${req.body.startTime}.`); +// console.log('Notification sent successfully:', response); + +// if (response.results[0].error === 'NotRegistered') { +// await User.updateOne({ fcmId: fcmId }, { $unset: { fcmId: "" } }); +// console.log(`Removed unregistered FCM ID: ${fcmId}`); +// } +// } catch (error) { +// console.error('Error sending notification:', error); +// } +// } +// } + +// // Check threshold settings if action is start +// if (action === "start") { +// if (req.body.threshold_type === "time") { +// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); + +// const newMotorData = new MotorData({ +// customerId: customerId, +// motor_id: motorId, +// start_instance_id: start_instance_id, +// supplierTank: req.body.from, +// receiverTank: req.body.to, +// supplier_type: req.body.from_type, +// receiver_type: req.body.to_type, +// startTime: req.body.startTime, +// receiverInitialwaterlevel: parseInt(receiver_tank_info7.waterlevel, 10) +// }); +// await newMotorData.save(); + +// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { +// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); +// if (index !== -1) { +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { $set: { [`connections.inputConnections.${index}.manual_threshold_time`]: req.body.manual_threshold_time, [`connections.inputConnections.${index}.startTime`]: req.body.startTime, [`connections.inputConnections.${index}.start_instance_id`]: start_instance_id } } +// ); +// } +// } + +// // Start monitoring water level based on threshold time +// const thresholdTime = moment().add(req.body.manual_threshold_time, 'minutes').toDate(); +// const intervalId = setInterval(async () => { +// const splr_tank_info3 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); +// const splr_tank_info3_waterlevel = parseInt(splr_tank_info3.waterlevel, 10); +// const splr_tank_info3_capacity = parseInt(splr_tank_info3.capacity.replace(/,/g, ''), 10); +// const splr_tank_info3_percentage = (splr_tank_info3_waterlevel / splr_tank_info3_capacity) * 100; + +// if (new Date() >= thresholdTime || splr_tank_info3_percentage <= 20) { +// // Send notification for low supplier tank percentage +// for (const fcmId of fcmIds) { +// try { +// const response = await sendPushNotification(fcmId, 'Low Water Level Alert', `Supplier tank water level is below 20% (${splr_tank_info3_percentage.toFixed(2)}%).`); +// console.log('Notification sent successfully:', response); + +// if (response.results[0].error === 'NotRegistered') { +// await User.updateOne({ fcmId: fcmId }, { $unset: { fcmId: "" } }); +// console.log(`Removed unregistered FCM ID: ${fcmId}`); +// } +// } catch (error) { +// console.error('Error sending notification:', error); +// } +// } + +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { +// $set: { +// "connections.inputConnections.$.motor_stop_status": "1", +// "connections.inputConnections.$.threshold_type": null, +// "connections.inputConnections.$.manual_threshold_time": null, +// "connections.inputConnections.$.manual_threshold_percentage": null +// } +// } +// ); +// clearInterval(intervalId); + +// await delay(300000); + +// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); +// if (motorData) { +// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); +// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); +// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); +// const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); +// const totalwaterpumped = quantityDelivered + water_pumped_till_now; +// await Tank.findOneAndUpdate( +// { customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, +// { $set: { total_water_added_from_midnight: totalwaterpumped } } +// ); + +// await MotorData.updateOne( +// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, +// { +// $set: { +// stopTime: req.body.stopTime, +// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), +// quantity_delivered: quantityDelivered.toString() +// } +// } +// ); +// } +// } +// }, 60000); // Check every minute +// } else if (req.body.threshold_type === "litres") { +// console.log("entered litres") +// const receiver_tank_info7 = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); + +// const newMotorData = new MotorData({ +// customerId: customerId, +// motor_id: motorId, +// start_instance_id: start_instance_id, +// supplierTank: req.body.from, +// receiverTank: req.body.to, +// supplier_type: req.body.from_type, +// receiver_type: req.body.to_type, +// startTime: req.body.startTime, +// receiverInitialwaterlevel:parseInt(receiver_tank_info7.waterlevel, 10) +// }); +// await newMotorData.save(); +// // If threshold type is percentage, calculate percentage threshold +// const receiver_tank_info = await Tank.findOne({ customerId, tankName: req.body.to, tankLocation: req.body.to_type.toLowerCase() }); +// const supplier_tank_info = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); +// if (!receiver_tank_info) { +// throw new Error("Receiver tank not found."); +// } +// if (!supplier_tank_info) { +// throw new Error("Supplierr tank not found."); +// } +// const supplier_capacity = parseInt(supplier_tank_info.capacity, 10); +// const supplier_waterLevel = parseInt(supplier_tank_info.waterlevel, 10); + +// const capacity = parseInt(receiver_tank_info.capacity, 10); +// const waterLevel = parseInt(receiver_tank_info.waterlevel, 10); +// const desired_percentage = parseInt(req.body.manual_threshold_litres.replace(/,/g, ''), 10); + +// console.log(desired_percentage) +// const threshold_water_level = waterLevel+desired_percentage; + +// const supplier_threshold = supplier_waterLevel-desired_percentage +// console.log(supplier_threshold,"supplier_threshold") + +// for await (const tank of Tank.find({ "connections.inputConnections.motor_id": motorId })) { +// const index = tank.connections.inputConnections.findIndex(connection => connection.motor_id === motorId); +// if (index !== -1) { +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { $set: { [`connections.inputConnections.${index}.manual_threshold_percentage`]: supplier_threshold.toString(), [`connections.inputConnections.${index}.startTime`]: req.body.startTime } } +// ); +// } +// } + + + + + +// // Start monitoring water level based on threshold percentage +// const intervalId = setInterval(async () => { +// // Check if water level has reached the threshold percentage +// const supplier_tank_info1 = await Tank.findOne({ customerId, tankName: req.body.from, tankLocation: req.body.from_type.toLowerCase() }); +// const current_water_level = parseInt(supplier_tank_info1.waterlevel, 10); +// if (current_water_level <= supplier_threshold) { +// // Stop the motor pump +// await Tank.updateOne( +// { customerId, "connections.inputConnections.motor_id": motorId }, +// { +// $set: { +// "connections.inputConnections.$.motor_stop_status": "1", + +// "connections.inputConnections.$.threshold_type": null, +// "connections.inputConnections.$.manual_threshold_time": null, +// "connections.inputConnections.$.manual_threshold_percentage": null +// } +// } +// ); +// clearInterval(intervalId); // Stop monitoring water level +// await delay(300000); + +// const motorData = await MotorData.findOne({ customerId, motor_id: motorId, start_instance_id: start_instance_id }); +// if (motorData) { +// const receiverTank = await Tank.findOne({ customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); +// const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); +// const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); + + +// const stopTime = formatDate(new Date()); + +// await MotorData.updateOne( +// { customerId, motor_id: motorId, start_instance_id: start_instance_id }, +// { +// $set: { +// stopTime:stopTime, +// receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), +// quantity_delivered: quantityDelivered.toString() +// } +// } +// ); +// } +// } +// }, 20000); +// } +// } + +// reply.code(200).send({ message: `Motor ${action === "start" ? "started" : "stopped"} successfully.` }); +// } catch (err) { +// // Handle errors +// throw boom.boomify(err); +// } +// }; + +const motorActionAuto = async (req, reply) => { + try { + const customerId = req.params.customerId; + const action = req.body.action; + const motorId = req.body.motor_id; + const motor_on_type = req.body.motor_on_type + const startTime = req.body.startTime; + const stopTime = req.body.stopTime; + const threshold = req.body.threshold || "unknown"; + + if (!motorId) { + throw new Error("Motor ID is required."); + } + + let motorStopStatus; + const tank = await Tank.findOne({ customerId, "connections.inputConnections.motor_id": motorId }); + const user = await User.findOne({ customerId }); + const allowNotifications = user?.automaticStartAndStopNotify ?? true; // Default to true if not set + + if (!tank) { + throw new Error("Tank not found for the provided motor ID."); + } + + const { tankName, blockName, typeOfWater, fcmTokens } = tank; // Extracting necessary details + + + if (action === "start") { + await Tank.updateOne( + { customerId, "connections.inputConnections.motor_id": motorId }, + { + $set: { + "connections.inputConnections.$.motor_stop_status": "2", + "connections.inputConnections.$.startTime": req.body.startTime, + "connections.inputConnections.$.motor_on_type": "auto", + + } + } + ); + + if (allowNotifications) { + eventEmitter.emit( + "motorStartAutomatic", + fcmTokens, + tankName, + blockName, + startTime, + "Automatic", + typeOfWater, + threshold + ); + } + + } + + if (action === "stop") { + await Tank.updateOne( + { customerId, "connections.inputConnections.motor_id": motorId }, + { + $set: { + "connections.inputConnections.$.motor_stop_status": "1", + "connections.inputConnections.$.stopTime": req.body.stopTime, + "connections.inputConnections.$.motor_on_type": null, + } + } + ); + + const currentDateTime = new Date(); + const formattedDate = currentDateTime.toLocaleDateString(); + const formattedTime = currentDateTime.toLocaleTimeString(); + + if (allowNotifications) { + const stopMessage = + `🚰 Motor Name: ${tankName}-${blockName}-${typeOfWater}\n` + + `🛢️ Tank Name: '${tankName}'\n` + + `🏢 Block Name: '${blockName}'\n` + + `🕒 Pump stopped at: ${stopTime}\n` + + `⏳ Operation Duration: ${threshold} `; + + // Send stop notification + await sendNotification(fcmTokens, "Arminta Water Management", stopMessage); + } + } + + + + reply.code(200).send({ message: `Motor ${action === "start" ? "started" : "stopped"} successfully.` }); + } catch (err) { + console.error("Error in motorActionAuto:", err); + reply.code(500).send({ error: err.message }); + } +}; + + +const checkAutoMode = async () => { + try { + const tanks = await Tank.find(); + + for (const tank of tanks) { + for (const inputConnection of tank.connections.inputConnections) { + if (inputConnection.auto_mode === "active") { + console.log("This is automation for tank: " + tank.tankName); + const waterLevel = parseFloat(tank.waterlevel.replace(/,/g, '')); + const capacity = parseFloat(tank.capacity.replace(/,/g, '')); + const autoMinPercentage = parseFloat(tank.auto_min_percentage); + const autoMaxPercentage = parseFloat(tank.auto_max_percentage); + console.log(waterLevel,capacity,autoMinPercentage,autoMaxPercentage) + + if (isNaN(waterLevel) || isNaN(capacity) || capacity === 0) { + console.error(`Invalid water level or capacity for tank: ${tank.tankName}`); + continue; // Skip this tank if the values are not valid + } + + const currentPercentage = (waterLevel / capacity) * 100; + console.log("This is automation percentage: " + currentPercentage); + const now = moment().format('DD-MMM-YYYY - HH:mm'); + console.log(now) + if (currentPercentage <= autoMinPercentage) { + await motorActionAuto({ + params: { customerId: tank.customerId }, + body: { + action: "start", + motor_id: inputConnection.motor_id, + motor_on_type: "auto", + startTime: now + } + }, { + code: (statusCode) => ({ send: (response) => console.log(response) }) + }); + } else if (currentPercentage >= autoMaxPercentage && inputConnection.motor_on_type === "auto") { + await motorActionAuto({ + params: { customerId: tank.customerId }, + body: { + action: "stop", + motor_id: inputConnection.motor_id, + motor_on_type: "auto", + stopTime: now + } + }, { + code: (statusCode) => ({ send: (response) => console.log(response) }) + }); + } + } + } + } + } catch (err) { + console.error("Error checking auto mode:", err); + } +}; + +// Set the interval to check every 15 seconds (15000 milliseconds) +setInterval(checkAutoMode, 15000); + + + + + + + +// exports.calculateCapacity = async (req, reply) => { +// try { +// const shape = req.body.shape +// if(shape==="rectangle"){ +// const { length, width, height } = req.body + +// // Ensure all parameters are valid numbers +// if (isNaN(length) || isNaN(width) || isNaN(height)) { +// reply.code(400).send('Invalid input parameters') +// return +// } + +// // Calculate the capacity of the water tank in liters +// const capacity = length * width * height * 1000 + +// reply.send({ status_code: 200, capacity: capacity}); + + +// return { message: 'success' }; + +// } +// if(shape==="cylinder"){ +// console.log("hii1") +// const { length,diameter } = req.body + +// // Ensure all parameters are valid numbers +// if (isNaN(length) || isNaN(diameter)) { +// reply.code(400).send('Invalid input parameters') +// return +// } + +// // Calculate the capacity of the water tank in liters +// const radius = diameter / 2 +// const volume = Math.PI * Math.pow(radius, 2) * length +// const capacity = volume * 1000 + +// reply.send({ status_code: 200, capacity: capacity}); + + +// return { message: 'success' }; + +// } + +// // if(shape==="oval"){ +// // console.log("hii3") +// // const { length, width, height } = req.body + +// // // Ensure all parameters are valid numbers +// // if (isNaN(length) || isNaN(width) || isNaN(height)) { +// // reply.code(400).send('Invalid input parameters') +// // return +// // } + +// // // Calculate the capacity of the water tank in liters +// // const radius = height / 2 +// // const a = width - height +// // const area = Math.PI * radius * radius + 2 * radius * a +// // const volume = area * length +// // const capacity = volume * 1000 + +// // // Format the result with two decimal places and comma-separated thousands +// // const formattedCapacity = capacity.toFixed(2).replace(/\d(?=(\d{3})+\.)/g, '$&,') + +// // reply.send({ status_code: 200, capacity: formattedCapacity}); + + +// // return { message: 'success' }; + +// // } + + +// // if(shape==="horizontalellipse"){ + + +// // const { length, width, height } = req.body + +// // // Ensure all parameters are valid numbers +// // if (isNaN(length) || isNaN(width) || isNaN(height)) { +// // reply.code(400).send('Invalid input parameters') +// // return +// // } + +// // // Calculate the capacity of the water tank in liters +// // const radius1 = length / 2 +// // const radius2 = width / 2 +// // const volume = Math.PI * radius1 * radius2 * height +// // const capacity = volume * 1000 +// // reply.send({ status_code: 200, capacity: capacity}); + + +// // return { message: 'success' }; + +// // } +// if(shape==="userdefined"){ +// const capacity = req.body + +// reply.send({ status_code: 200, capacity: capacity}); + + +// return { message: 'success' }; + +// } + + +// } + +// catch (err) { +// throw boom.boomify(err); +// } +// }; + + +// exports.calculateCapacity = async (req, reply) => { +// try { +// const shape = req.body.shape; +// if (shape === "rectangle") { +// const { length, width, height } = req.body; + +// // Convert input units from feet to meters +// const length_m = length * 0.3048; +// const width_m = width * 0.3048; +// const height_m = height * 0.3048; +// console.log(length_m,width_m,height_m) +// // Ensure all parameters are valid numbers +// if (isNaN(length_m) || isNaN(width_m) || isNaN(height_m)) { +// reply.code(400).send("Invalid input parameters"); +// return; +// } + +// // Calculate the capacity of the water tank in liters +// const capacity = length_m * width_m * height_m * 1000; + +// reply.send({ status_code: 200, capacity: capacity }); + +// return { message: "success" }; +// } +// if (shape === "cylinder") { +// console.log("hii1"); +// const { length, diameter } = req.body; + +// // Convert input units from feet to meters +// const length_m = length * 0.3048; +// const diameter_m = diameter * 0.3048; + +// // Ensure all parameters are valid numbers +// if (isNaN(length_m) || isNaN(diameter_m)) { +// reply.code(400).send("Invalid input parameters"); +// return; +// } + +// // Calculate the capacity of the water tank in liters +// const radius = diameter_m / 2; +// const volume = Math.PI * Math.pow(radius, 2) * length_m; +// const capacity = volume * 1000; + +// reply.send({ status_code: 200, capacity: capacity }); + +// return { message: "success" }; +// } + +// // Add similar conversions for other shapes if necessary + +// if (shape === "userdefined") { +// const capacity = req.body; + +// reply.send({ status_code: 200, capacity: capacity }); + +// return { message: "success" }; +// } +// } catch (err) { +// throw boom.boomify(err); +// } +// }; + +exports.calculateCapacity = async (req, reply) => { + try { + const shape = req.body.shape; + if (shape === "rectangle") { + const { length, width, height } = req.body; + + // Convert input units from feet to meters + const length_m = length * 0.3048; + const width_m = width * 0.3048; + const height_m = height * 0.3048; + + // Ensure all parameters are valid numbers + if (isNaN(length_m) || isNaN(width_m) || isNaN(height_m)) { + reply.code(400).send("Invalid input parameters"); + return; + } + + // Calculate the capacity of the water tank in liters + const capacity = length_m * width_m * height_m * 1000; + + // Calculate the water capacity for a 1 centimeter height + const waterCapacityPerCm = length_m * width_m * 0.01 * 1000; + + reply.send({ status_code: 200, capacity: capacity, waterCapacityPerCm: waterCapacityPerCm }); + + return { message: "success" }; + } + if (shape === "cylinder") { + const { length, diameter } = req.body; + + // Convert input units from feet to meters + const length_m = length * 0.3048; + const diameter_m = diameter * 0.3048; + + // Ensure all parameters are valid numbers + if (isNaN(length_m) || isNaN(diameter_m)) { + reply.code(400).send("Invalid input parameters"); + return; + } + + // Calculate the capacity of the water tank in liters + const radius = diameter_m / 2; + const volume = Math.PI * Math.pow(radius, 2) * length_m; + const capacity = volume * 1000; + + // Calculate the water capacity for a 1 centimeter height + const waterCapacityPerCm = Math.PI * Math.pow(radius, 2) * 0.01 * 1000; + + reply.send({ status_code: 200, capacity: capacity, waterCapacityPerCm: waterCapacityPerCm }); + + return { message: "success" }; + } + + // Add similar conversions for other shapes if necessary + + if (shape === "userdefined") { + const capacity = req.body.capacity; // Assuming capacity is provided directly + + // Calculate the water capacity for a 1 centimeter height + const waterCapacityPerCm = capacity / req.body.height; // Assuming height of the shape is provided + + reply.send({ status_code: 200, capacity: capacity, waterCapacityPerCm: waterCapacityPerCm }); + + return { message: "success" }; + } + } catch (err) { + throw boom.boomify(err); + } +}; + +// exports.IotDevice = async (req, reply) => { +// try { +// const { hardwareId, mode, tanks } = req.body; + +// // create a new tank document with the current date and time +// const currentDate = new Date(); +// const date = currentDate.toISOString(); // save the date as an ISO string +// const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); + +// // Create an array of tank documents +// const tankDocuments = tanks.map(tank => ({ +// tankhardwareId: tank.tankhardwareId, +// tankHeight: tank.tankHeight, +// maxLevel: tank.maxLevel, +// minLevel: tank.minLevel, +// date: date, +// time: time +// })); + + +// // create a new IotData document with the provided data +// const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); + +// // save the document to MongoDB +// await ottank.save(); + +// // delete previous records except the three latest ones +// const previousRecords = await IotData.find({ hardwareId }) +// .sort({ date: -1, time: -1 }) +// .skip(3); // skip the three latest documents + +// for (const record of previousRecords) { +// await record.remove(); +// } + +// // get the latest three documents sorted in descending order of date and time +// const latestOttanks = await IotData.find({ hardwareId }) +// .sort({ date: -1, time: -1 }) +// .limit(3); + +// // send the latest documents +// reply.code(200).send({ latestOttanks }); +// } catch (err) { +// // send an error response +// reply.code(500).send({ error: err.message }); +// } +// }; + + + +exports.IotDevice = async (req, reply) => { + try { + const { hardwareId, mode, tanks } = req.body; + + // create a new tank document with the current date and time + const currentDate = new Date(); + const date = currentDate.toISOString(); // save the date as an ISO string + const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); + + // Create an array of tank documents + const tankDocuments = tanks.map(tank => ({ + tankhardwareId: tank.tankhardwareId, + tankHeight: tank.tankHeight, + maxLevel: tank.maxLevel, + minLevel: tank.minLevel, + date: date, + time: time + })); + + // create a new IotData document with the provided data + const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); + + // save the document to MongoDB + await ottank.save(); + + // Delete excess records (keep only the latest three records) + const recordsToKeep = 3; + const recordsToDelete = await IotData.find({ hardwareId }) + .sort({ date: -1, time: -1 }) + .skip(recordsToKeep); + + for (const record of recordsToDelete) { + await record.remove(); + } + + // Update waterlevel in tanksSchema for each tank + for (const tank of tanks) { + const { tankhardwareId, tankHeight } = tank; + + // Find the corresponding tank in tanksSchema + const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); + if (!existingTank) { + continue; // Skip to the next tank if not found + } + + const customerId = existingTank.customerId; + const tank_name = existingTank.tankName; + + // Update the waterlevel using the tankHeight value + const tank_height1 = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; + const tank_height = parseInt(tank_height1.toFixed(0), 10); // Ensure it's an integer + const water_level_height = tank_height - tankHeight; + const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10); + + const water_level = parseInt(water_level_height * waterCapacityPerCm, 10); + + if (water_level >= 0) { + existingTank.waterlevel = water_level; + + // Save the updated tank document + await existingTank.save(); + + // Update linked tanks + for (const outputConnection of existingTank.connections.outputConnections) { + const linkedTank = await Tank.findOne({ customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); + if (linkedTank) { + for (const inputConnection of linkedTank.connections.inputConnections) { + if (inputConnection.inputConnections === tank_name) { + inputConnection.water_level = water_level; + await linkedTank.save(); + } + } + } + } + } + + } + + // Send the latest three documents + const latestOttanks = await IotData.find({ hardwareId }) + .sort({ date: -1, time: -1 }); + + reply.code(200).send({ latestOttanks }); + } catch (err) { + // send an error response + reply.code(500).send({ error: err.message }); + } +}; + +console.log("this is for testing push") + +exports.IotDeviceforstandalonedevice = async (req, reply) => { + try { + console.log("entered post for iotstandalone") + const { hardwareId, Motor_status, tanks } = req.body; + + // create a new tank document with the current date and time const currentDate = new Date(); const date = currentDate.toISOString(); // save the date as an ISO string const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); - // Create an array of tank documents - const tankDocuments = tanks.map(tank => ({ - tankhardwareId: tank.tankhardwareId, - tankHeight: tank.tankHeight, - maxLevel: tank.maxLevel, - minLevel: tank.minLevel, - date: date, - time: time + // Create an array of tank documents + const tankDocuments = tanks.map(tank => ({ + tankhardwareId: tank.tankhardwareId, + tankHeight: tank.tankHeight, + date: date, + time: time + })); + + // create a new IotData document with the provided data + const ottank = new IotData({ hardwareId, Motor_status, tanks: tankDocuments, date, time }); + + // save the document to MongoDB + await ottank.save(); + + + // Delete excess records (keep only the latest three records) + const recordsToKeep = 3; + const recordsToDelete = await IotData.find({ hardwareId }) + .sort({ date: -1, time: -1 }) + .skip(recordsToKeep); + + for (const record of recordsToDelete) { + await record.remove(); + } + + // Update waterlevel in tanksSchema for each tank + for (const tank of tanks) { + const { tankhardwareId, tankHeight } = tank; + + // Find the corresponding tank in tanksSchema + const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); + if (!existingTank) { + continue; // Skip to the next tank if not found + } + + const customerId = existingTank.customerId; + const tank_name = existingTank.tankName; + + // Update the waterlevel using the tankHeight value + const tank_height1 = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; + const tank_height = parseInt(tank_height1.toFixed(0), 10); // Ensure it's an integer + const water_level_height = tank_height - tankHeight; + const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10); + + const water_level = parseInt(water_level_height * waterCapacityPerCm, 10); + + if (water_level >= 0) { + existingTank.waterlevel = water_level; + + // Save the updated tank document + await existingTank.save(); + + // Update linked tanks + for (const outputConnection of existingTank.connections.outputConnections) { + const linkedTank = await Tank.findOne({ customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); + if (linkedTank) { + for (const inputConnection of linkedTank.connections.inputConnections) { + if (inputConnection.inputConnections === tank_name) { + inputConnection.water_level = water_level; + await linkedTank.save(); + } + } + } + } + } + + } + const status = req.body.Motor_status; + + console.log(status,"status") + // Find the tank that contains the specified motor_id in its inputConnections + const tank = await Tank.findOne({ "connections.inputConnections.motor_id": hardwareId }); + + if (!tank) { + return reply.status(404).send({ + status_code: 404, + message: 'Motor not found for the specified motor_id' + }); + } + console.log(hardwareId,"hardwareId") + console.log(status,"status") + // Find the inputConnection with the specified motor_id + const inputConnection = tank.connections.inputConnections.find(conn => conn.motor_id === hardwareId); + +if (inputConnection) { + // Update the motor_status of the inputConnection + inputConnection.motor_status = status; + console.log(inputConnection) + + + // Check if motor_stop_status is "1" and status is "2" + if (inputConnection.motor_stop_status === "1" && status === "2") { + console.log("got into forced manual") + console.log(inputConnection.motor_on_type,"before if motor on type") + // Check if motor_on_type is not already "forced_manual" + if (inputConnection.motor_on_type !== "forced_manual") { + inputConnection.motor_on_type = "forced_manual"; + console.log("entered forced manual of if") + inputConnection.motor_stop_status = "2"; + // Update startTime to the current time in the specified format + const currentTime = new Date(); + const formattedTime = currentTime.toLocaleString('en-GB', { + day: '2-digit', + month: 'short', + year: 'numeric', + hour: '2-digit', + minute: '2-digit', + hour12: false, + }).replace(',', ''); + inputConnection.startTime = formattedTime; + } + } + // Check if motor_stop_status is "1" and status is "2" + if (inputConnection.motor_stop_status === "2" && status === "1") { + console.log("got into forced manual stop") + console.log(inputConnection.motor_on_type,"before if motor on type stop") + // Check if motor_on_type is not already "forced_manual" + if (inputConnection.motor_on_type = "forced_manual") { + inputConnection.motor_on_type = "manual"; + console.log("entered forced manual of if of stop") + + // Update startTime to the current time in the specified format + + + inputConnection.motor_stop_status = "1"; + } + } + +} + + + // Save the updated tank + await tank.save(); + + + // Send the latest three documents + const latestOttanks = await IotData.find({ hardwareId }) + .sort({ date: -1, time: -1 }); + + reply.code(200).send({ latestOttanks }); + } catch (err) { + // send an error response + reply.code(500).send({ error: err.message }); + } +}; + + + + + + + +// exports.IotDevice3 = async (req, reply) => { +// try { +// const { hardwareId, mode, tanks } = req.body; + +// // create a new tank document with the current date and time +// const currentDate = new Date(); +// const date = currentDate.toISOString(); // save the date as an ISO string +// const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); + +// // Create an array of tank documents +// const tankDocuments = tanks.map(tank => ({ +// tankhardwareId: tank.tankhardwareId, +// tankHeight: tank.tankHeight, +// maxLevel: tank.maxLevel, +// minLevel: tank.minLevel, +// date: date, +// time: time +// })); + +// // create a new IotData document with the provided data +// const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); + +// // save the document to MongoDB +// await ottank.save(); + +// // Delete excess records (keep only the latest three records) +// const recordsToKeep = 3; +// const recordsToDelete = await IotData.find({ hardwareId }) +// .sort({ date: -1, time: -1 }) +// .skip(recordsToKeep); + +// for (const record of recordsToDelete) { +// await record.remove(); +// } + +// // Update waterlevel in tanksSchema for each tank +// for (const tank of tanks) { +// const { tankhardwareId, tankHeight } = tank; + +// // Find the corresponding tank in tanksSchema +// const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); + + +// if (existingTank) { +// // Update the waterlevel using the tankHeight value + +// const tank_height1 = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; +// console.log(tank_height1, 25); + +// // The value of tank_height1 is a number, not a string, so you cannot use replace on it. +// // If you want to format it with commas, you can create a function to add commas to a number. +// function numberWithCommas(x) { +// return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); +// } + +// // Now you can use the function to format the tank_height1 value with commas. +// const formatted_tank_height1 = numberWithCommas(tank_height1); +// console.log(formatted_tank_height1, 25); + +// const tank_height = parseInt(formatted_tank_height1.replace(/,/g, ''), 10); +// console.log(tank_height); +// // console.log(tank_height,1) +// const water_level_height = tank_height - tankHeight +// console.log(water_level_height,2) + +// const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10) +// console.log(waterCapacityPerCm,3) +// const water_level = water_level_height * waterCapacityPerCm; +// console.log(water_level, 4); + +// // Function to add commas to a number +// function numberWithCommas(x) { +// return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); +// } + +// const formatted_water_level = numberWithCommas(water_level); +// console.log(formatted_water_level, 4); + +// existingTank.waterlevel = parseInt(formatted_water_level.replace(/,/g, ''), 10); +// console.log(existingTank.waterlevel); + + + +// // Save the updated tank document +// await existingTank.save(); +// for (const outputConnection of existingTank.connections.outputConnections) { +// const linkedTank = await Tank.findOne({ customerId: customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); +// if (linkedTank) { +// // linkedTank.waterlevel = existingTank.waterlevel; +// //await linkedTank.save(); + +// // Update water level of tanks linked through input connections of the linked tank +// for (const inputConnection of linkedTank.connections.inputConnections) { +// if (inputConnection.inputConnections === tank_name) { +// inputConnection.water_level = water_level.toString(); +// await linkedTank.save(); +// } +// } +// } +// } +// } +// } + +// // Send the latest three documents +// const latestOttanks = await IotData.find({ hardwareId }) +// .sort({ date: -1, time: -1 }); + +// reply.code(200).send({ latestOttanks }); +// } catch (err) { +// // send an error response +// reply.code(500).send({ error: err.message }); +// } +// }; + +// exports.IotDevice1 = async (req, reply) => { +// try { +// const { hardwareId, mode, tanks } = req.body; + + +// // create a new tank document with the current date and time +// const currentDate = new Date(); +// const date = currentDate.toISOString(); // save the date as an ISO string +// const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); + +// // Create an array of tank documents +// const tankDocuments = tanks.map(tank => ({ +// tankhardwareId: tank.tankhardwareId, +// tankHeight: tank.tankHeight, +// maxLevel: tank.maxLevel, +// minLevel: tank.minLevel, +// date: date, +// time: time +// })); + +// // create a new IotData document with the provided data +// const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); + +// // save the document to MongoDB +// await ottank.save(); + +// // Delete excess records (keep only the latest three records) +// const recordsToKeep = 3; +// const recordsToDelete = await IotData.find({ hardwareId }) +// .sort({ date: -1, time: -1 }) +// .skip(recordsToKeep); + +// for (const record of recordsToDelete) { +// await record.remove(); +// } +// console.log(tanks) +// // Update waterlevel in tanksSchema for each tank +// for (const tank of tanks) { +// console.log(tank) +// const { tankhardwareId, tankHeight } = tank; +// console.log(hardwareId,tankhardwareId) +// // Find the corresponding tank in tanksSchema +// const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); +// if (!existingTank) { +// console.log(`No tank found for tankhardwareId '${tankhardwareId}'. Skipping.`); +// continue; // Skip to the next iteration +// } +// console.log(existingTank,"existing tank") +// if (existingTank) { +// // Update the waterlevel using the tankHeight value + +// const tank_height1 = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; +// console.log(tank_height1, 25); + +// // The value of tank_height1 is a number, not a string, so you cannot use replace on it. +// // If you want to format it with commas, you can create a function to add commas to a number. +// function numberWithCommas(x) { +// return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); +// } + +// // Now you can use the function to format the tank_height1 value with commas. +// const formatted_tank_height1 = numberWithCommas(tank_height1); +// console.log(formatted_tank_height1, 25); + +// const tank_height = parseInt(formatted_tank_height1.replace(/,/g, ''), 10); +// console.log(tank_height); +// // console.log(tank_height,1) +// const water_level_height = tank_height - tankHeight +// console.log(water_level_height,2) + +// const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10) +// console.log(waterCapacityPerCm,3) +// const water_level = water_level_height * waterCapacityPerCm; +// console.log(water_level, 4); + +// // Function to add commas to a number +// function numberWithCommas(x) { +// return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); +// } + +// const formatted_water_level = numberWithCommas(water_level); +// console.log(formatted_water_level, 4); + +// existingTank.waterlevel = parseInt(formatted_water_level.replace(/,/g, ''), 10); +// console.log(existingTank.waterlevel); + + + +// // Save the updated tank document +// await existingTank.save(); +// for (const outputConnection of existingTank.connections.outputConnections) { +// const linkedTank = await Tank.findOne({ customerId: customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); +// if (linkedTank) { +// // linkedTank.waterlevel = existingTank.waterlevel; +// //await linkedTank.save(); + +// // Update water level of tanks linked through input connections of the linked tank +// for (const inputConnection of linkedTank.connections.inputConnections) { +// if (inputConnection.inputConnections === tank_name) { +// inputConnection.water_level = water_level.toString(); +// await linkedTank.save(); +// } +// } +// } +// } +// } +// } + +// // Send the latest three documents +// const latestOttanks = await IotData.find({ hardwareId }) +// .sort({ date: -1, time: -1 }); + +// reply.code(200).send({ latestOttanks }); +// } catch (err) { +// // send an error response +// reply.code(500).send({ error: err.message }); +// } +// }; + + +// exports.IotDevice1 = async (req, reply) => { +// try { +// const { hardwareId, mode, tanks } = req.body; + +// // create a new tank document with the current date and time +// const currentDate = new Date(); +// const date = currentDate.toISOString(); // save the date as an ISO string +// const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); + +// // Create an array of tank documents +// const tankDocuments = tanks.map(tank => ({ +// tankhardwareId: tank.tankhardwareId, +// tankHeight: tank.tankHeight, +// maxLevel: tank.maxLevel, +// minLevel: tank.minLevel, +// date: date, +// time: time +// })); + +// // create a new IotData document with the provided data +// const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); + +// // save the document to MongoDB +// await ottank.save(); + +// // Delete excess records (keep only the latest three records) +// const recordsToKeep = 3; +// const recordsToDelete = await IotData.find({ hardwareId }) +// .sort({ date: -1, time: -1 }) +// .skip(recordsToKeep); + +// for (const record of recordsToDelete) { +// await record.remove(); +// } + +// // Update waterlevel in tanksSchema for each tank +// for (const tank of tanks) { +// const { tankhardwareId, tankHeight } = tank; + +// if (tankHeight === null || tankHeight === undefined) { +// continue; // Skip this iteration and move to the next tank +// } +// // Find the corresponding tank in tanksSchema +// const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); + +// if (existingTank) { +// // Update the waterlevel using the tankHeight value +// const tank_height = parseInt(existingTank.height.replace(/,/g, ''), 10) * 30.48; +// const water_level_height = tank_height - tankHeight; +// const customerId = existingTank.customerId; +// const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10); +// let water_level = water_level_height * waterCapacityPerCm; +// water_level = Math.round(water_level); // Round to nearest whole number +// existingTank.waterlevel = water_level.toString(); // Convert to string as per schema definition +// const tank_name = existingTank.tankName; +// // Save the updated tank document +// await existingTank.save(); + +// // Update water level of tanks linked through output connections +// for (const outputConnection of existingTank.connections.outputConnections) { +// const linkedTank = await Tank.findOne({ customerId: customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); +// if (linkedTank) { +// // linkedTank.waterlevel = existingTank.waterlevel; +// //await linkedTank.save(); + +// // Update water level of tanks linked through input connections of the linked tank +// for (const inputConnection of linkedTank.connections.inputConnections) { +// if (inputConnection.inputConnections === tank_name) { +// inputConnection.water_level = water_level.toString(); +// await linkedTank.save(); +// } +// } +// } +// } +// } +// } + +// // Send the latest three documents +// const latestOttanks = await IotData.find({ hardwareId }) +// .sort({ date: -1, time: -1 }); + +// reply.code(200).send({ latestOttanks }); +// } catch (err) { +// // send an error response +// reply.code(500).send({ error: err.message }); +// } +// }; + + + + +// exports.getIotD = async(req, reply) => { +// try { +// await IotData.find({hardwareId: req.query.hardwareId}) +// .exec() +// .then((docs) => { +// reply.send({ status_code: 200, data: docs, count: docs.length }); +// }) +// .catch((err) => { +// console.log(err); +// reply.send({ error: err }); +// }); +// } catch (err) { +// throw boom.boomify(err); +// } +// } + + +exports.getIotD = async (req, reply) => { + try { + const latestRecords = await IotData.find({ hardwareId: req.query.hardwareId }) + .sort({ date: -1, time: -1 }) // Sort by date and time in descending order + .limit(3) // Limit the result to 3 records + .exec(); + + reply.send({ status_code: 200, data: latestRecords, count: latestRecords.length }); + } catch (err) { + console.error(err); + throw boom.boomify(err); + } +}; + + +exports.getLatestData = async (req, reply) => { + try { + const hardwareId = req.params.hardwareId; + + // get the latest two tank documents for the current hardwareId sorted in descending order of date and time + const latestTanks = await IotData.find({ hardwareId }).sort({ date: -1, time: -1 }).limit(2); + + // if the number of documents for the current hardwareId is less than two, return an error response + if (latestTanks.length < 2) { + return reply.code(404).send({ error: 'Not enough data' }); + } + + // calculate the time difference between the latest and previous documents + const latestDate = new Date(latestTanks[0].date); + const previousDate = new Date(latestTanks[1].date); + const latestTime = latestTanks[0].time.split('.')[0]; // remove milliseconds + const previousTime = latestTanks[1].time.split('.')[0]; // remove milliseconds + latestDate.setHours(parseInt(latestTime.substring(0, 2)), parseInt(latestTime.substring(3, 5)), parseInt(latestTime.substring(6, 8))); + previousDate.setHours(parseInt(previousTime.substring(0, 2)), parseInt(previousTime.substring(3, 5)), parseInt(previousTime.substring(6, 8))); + const timeDiff = (latestDate.getTime() - previousDate.getTime()) / 1000; // convert from milliseconds to seconds + console.log(latestDate,previousDate,latestTime,previousTime,timeDiff) + reply.code(200).send({ timeDiff }); + + } catch (err) { + // send an error response + reply.code(500).send({ error: err.message }); + } +}; + +exports.changesurveystatus = async (req, reply) => { + try { + const customerId = req.params.customerId; + + + const result = await User.findOneAndUpdate( + { customerId: customerId }, + { $set: { survey_status: req.body.survey_status } }, + { new: true } + ); + + + + reply.code(200).send({ result }); + + } catch (err) { + // send an error response + reply.code(500).send({ error: err.message }); + } +}; + + + +exports.checkStatusofIot = async (req, reply) => { + try { + // get a list of unique hardware IDs in the collection + const hardwareIds = await IotData.distinct('hardwareId'); + + // create an empty object to store the time differences for each hardware ID + const timeDiffs = {}; + + // loop over each hardware ID and calculate the time difference between the latest two records + for (const hardwareId of hardwareIds) { + // get the latest two records for the current hardware ID + const latestTanks = await IotData.find({ hardwareId }).sort({ date: -1, time: -1 }).limit(2); + + // if the number of records for the current hardware ID is less than two, skip to the next ID + if (latestTanks.length < 2) { + continue; + } + + // calculate the time difference between the latest and previous records for the current hardware ID + const latestDate = new Date(latestTanks[0].date); + const previousDate = new Date(latestTanks[1].date); + const latestTime = latestTanks[0].time.split('.')[0]; // remove milliseconds + const previousTime = latestTanks[1].time.split('.')[0]; // remove milliseconds + latestDate.setHours(parseInt(latestTime.substring(0, 2)), parseInt(latestTime.substring(3, 5)), parseInt(latestTime.substring(6, 8))); + previousDate.setHours(parseInt(previousTime.substring(0, 2)), parseInt(previousTime.substring(3, 5)), parseInt(previousTime.substring(6, 8))); + const timeDiff = (latestDate.getTime() - previousDate.getTime()) / 1000; // convert from milliseconds to seconds + + // store the time difference for the current hardware ID + timeDiffs[hardwareId] = timeDiff; + } + + // send the time differences for all hardware IDs + reply.code(200).send({ timeDiffs }); + + } catch (err) { + // send an error response + reply.code(500).send({ error: err.message }); + } +}; + +exports.totalwaterLevelSum = async (request, reply) => { + const { tankLocation, typeOfWater } = request.query; + + const waterlevelSum = await Tank.aggregate([ + { + $match: { tankLocation, typeOfWater } + }, + { + $group: { + _id: null, + totalWaterlevel: { $sum: { $toInt: '$waterlevel' } } + } + } + ]); + + const result = waterlevelSum[0]?totalWaterlevel : 0; + + reply.send({ waterlevelSum: result }); +} + + +exports.startUpdateLoop = async (request, reply) => { + const updateInterval = 5000; + + setInterval(async () => { + try { + const iotTank = await IotData.findOne({ hardwareId: request.body.hardwareId }); + if (!iotTank) { + console.log(`IOTtank not found for hardwareId ${request.body.hardwareId}`); + return; + } + + const currentWaterlevel = Number(iotTank.tankHeight) * 200; + const tank = await Tank.findOne({ hardwareId: iotTank.hardwareId }); + + let combinedWaterlevel; + if (tank) { + combinedWaterlevel = currentWaterlevel + Number(tank.waterlevel); + } else { + combinedWaterlevel = currentWaterlevel; + } + + await Tank.updateOne({ hardwareId: iotTank.hardwareId }, { $set: { waterlevel: combinedWaterlevel } }); + + console.log(`Waterlevel updated successfully for hardwareId ${iotTank.hardwareId}`); + console.log(`Previous waterlevel: ${tank ? tank.waterlevel : 0}`); + console.log(`Current waterlevel: ${currentWaterlevel}`); + console.log(`Combined waterlevel: ${combinedWaterlevel}`); + } catch (err) { + console.error(err); + } + }, updateInterval); +}; + + + + +const updatewaterlevelsatmidnight = async () => { + console.log('Cron job triggered at:', moment().tz('Asia/Kolkata').format()); + + try { + const tanks = await Tank.find({}); + for (const tank of tanks) { + tank.waterlevel_at_midnight = tank.waterlevel; + tank.total_water_added_from_midnight = "0"; + await tank.save(); + console.log(`Updated tank ${tank._id} waterlevel_at_midnight to ${tank.waterlevel}`); + } + console.log('Waterlevel noted in waterlevel_at_midnight'); + } catch (error) { + console.error('Error occurred:', error); + } +}; + +// Schedule the task to run every day at 13:49 IST (1:49 PM IST) +cron.schedule('0 0 * * *', updatewaterlevelsatmidnight, { + timezone: "Asia/Kolkata" +}); + + +let consumptionTask; + +// Function to clear the specific scheduled task +const clearConsumptionSchedule = () => { + if (consumptionTask) { + consumptionTask.stop(); // Stop the existing task if it exists + consumptionTask = null; // Clear the reference + } +}; + +// Function to update total consumption till midnight +const updatetotalConsumptiontillmidnight = async () => { + console.log('Cron job triggered at:', moment().tz('Asia/Kolkata').format()); + + try { + const tanks = await Tank.find({}); + for (const tank of tanks) { + const waterlevel_at_midnight = parseInt((tank.waterlevel_at_midnight).replace(/,/g, ''), 10); + const total_water_added_from_midnight = parseInt((tank.total_water_added_from_midnight).replace(/,/g, ''), 10); + const waterlevel = parseInt((tank.waterlevel).replace(/,/g, ''), 10); + const capacity = parseInt((tank.capacity).replace(/,/g, ''), 10); + console.log(waterlevel_at_midnight,total_water_added_from_midnight,waterlevel) + const totalconsumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel; + // const available_capacity = total_water_added_from_midnight + waterlevel; + const consumed_percentage = ((totalconsumption / capacity) * 100).toFixed(2); + console.log(totalconsumption,tank.tankName) + + // Format the date in the desired format + const formattedDate = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm'); + + // Check if the record already exists + const existingRecord = await TankConsumptionOriginalSchema.findOne({ + customerId: tank.customerId, + tankName: tank.tankName, + tankLocation: tank.tankLocation, + time: formattedDate + }); + + if (!existingRecord) { + // Create and save the new document if it doesn't exist + const newTankConsumption = new TankConsumptionOriginalSchema({ + customerId: tank.customerId, + tankName: tank.tankName, + tankLocation: tank.tankLocation, + available_capacity: (tank.capacity).toString(), + consumption: totalconsumption.toString(), + consumed_percentage: consumed_percentage.toString(), + time: formattedDate, // Save the formatted date + block:tank.blockName, + typeofwater:tank.typeOfWater + }); + + await newTankConsumption.save(); + console.log(`Created new record for tank ${tank.tankName} at ${formattedDate}`); + } else { + console.log(`Record already exists for tank ${tank.tankName} at ${formattedDate}`); + } + } + console.log('Waterlevel noted in waterlevel_at_midnight'); + } catch (error) { + console.error('Error occurred:', error); + } +}; + +// Clear the existing schedule for this task before creating a new one +clearConsumptionSchedule(); + +// Schedule the task to run every day at 12:49 PM IST and store the reference +consumptionTask = cron.schedule('50 23 * * *', updatetotalConsumptiontillmidnight, { + timezone: "Asia/Kolkata" +}); + +console.log('Scheduled task to update total consumption till midnight.'); + + + + + + + + +exports.deletemotordatarecordsbefore7days = async (req, reply) => { + try { + // Schedule the task to run every day at 10 seconds past the minute + cron.schedule('0 0 * * *', async () => { + try { + // Run the deletion task once a day + setInterval(async () => { + await deleteOldRecords(); + }, 24 * 60 * 60 * 1000); // 24 hours in milliseconds + + } catch (error) { + console.error('Error occurred:', error); + } + }); + + + } catch (err) { + throw boom.boomify(err); + } +}; + + +exports.motorstatus = async (req, reply) => { + + try { + const motor_id = req.params.motor_id; + console.log(motor_id) + + const motorInfo = await Tank.findOne({ motor_id: motor_id }); + + console.log(motorInfo) + + + //return update; + + reply.send({ status_code: 200,status:motorInfo.motor_status}); + + + + } + catch (err) { + throw boom.boomify(err); + } +}; + + + +exports.readMotorStatus = async (req, reply) => { + try { + const motor_id = req.query.motor_id; + console.log("entered read api for iot") + + // Perform any necessary logic based on action (1: Start, 2: Stop) + + // For example, you can update a database or trigger an action + + const tanks = await Tank.find({}); + + let motor_stop_status = null; + + for (let tank of tanks) { + const inputConnections = tank.connections.inputConnections; + const motorConnection = inputConnections.find(conn => conn.motor_id === motor_id); + if (motorConnection) { + // Check if motor_on_type is "forced_manual" and motor_stop_status is "1" + if (motorConnection.motor_on_type === "forced_manual" && motorConnection.motor_stop_status === "1") { + motor_stop_status = "2"; // Send motor_stop_status as "2" + } else { + motor_stop_status = motorConnection.motor_stop_status; // Otherwise, assign its original value + } + + break; + } + + } + + if (!motor_stop_status) { + return reply.status(404).send({ + status_code: 404, + message: 'Motor not found for the specified motor_id' + }); + } + + reply.send({ + status_code: 200, + motor_stop_status: motor_stop_status + }); + } catch (err) { + throw boom.boomify(err); + } +}; + + +exports.readMotorStatusFromIot = async (req, reply) => { + try { + const motor_id = req.query.motor_id; + console.log(motor_id) + + // Find the tank that contains the specified motor_id in its inputConnections + const tank = await Tank.findOne({ "connections.inputConnections.motor_id": motor_id }); + + if (!tank) { + return reply.status(404).send({ + status_code: 404, + message: 'Motor not found for the specified motor_id' + }); + } + + // Find the inputConnection with the specified motor_id + const inputConnection = tank.connections.inputConnections.find(conn => conn.motor_id === motor_id); + + // Extract motor_status and motor_stop_status from the inputConnection + const motor_status = inputConnection.motor_status; + const motor_stop_status = inputConnection.motor_stop_status; + + // Send the response with motor_status and motor_stop_status + reply.send({ + status_code: 200, + motor_status: motor_status, + motor_stop_status: motor_stop_status + }); + } catch (err) { + throw boom.boomify(err); + } +}; + + + +exports.writeMotorStatus = async (req, reply) => { + try { + const motor_id = req.body.motor_id; + const status = req.body.status; + + // Find the tank that contains the specified motor_id in its inputConnections + const tank = await Tank.findOne({ "connections.inputConnections.motor_id": motor_id }); + + if (!tank) { + return reply.status(404).send({ + status_code: 404, + message: 'Motor not found for the specified motor_id' + }); + } + + // Find the inputConnection with the specified motor_id + const inputConnection = tank.connections.inputConnections.find(conn => conn.motor_id === motor_id); + + // Update the motor_status of the inputConnection + inputConnection.motor_status = status; + + + // Save the updated tank + await tank.save(); + + // Send the response with the updated motor_status + reply.send({ + status_code: 200, + motor_status: status + }); + } catch (err) { + throw boom.boomify(err); + } +}; + + + + +exports.changeMotorStatus = async (req, reply) => { + try { + const motor_id = req.body.motor_id; + const action = req.body.action; + + // Perform any necessary logic to handle motor status update from the device + + // For example, update a database with the new status, current, and temp values + + await Tank.updateOne( + { "connections.inputConnections.motor_id": motor_id }, + { + $set: { + "connections.inputConnections.$.motor_stop_status":action , + + } + } + ); + + // Send immediat + + // Fetch the motor_status for the given motor_id + + + // Send the response with motor_stop_status and motor_status + reply.send({ + status_code: 200, + motor_stop_status: action, + // motor_status: updatedMotor.motor_status // Assuming motor_status is a field in your Tank model + }); + + + } catch (err) { + throw boom.boomify(err); + } +}; + + + + +exports.motortemperature = async (req, reply) => { + + try { + const motor_id = req.params.motor_id; + console.log(motor_id) + + const motorInfo = await Tank.findOne({ motor_id: motor_id }); + + console.log(motorInfo) + + + //return update; + + reply.send({ status_code: 200,temperature:motorInfo.motor_temperfature}); + + + + } + catch (err) { + throw boom.boomify(err); + } +}; + +exports.update_auto_mode = async (req, reply) => { + try { + const customerId = req.params.customerId; + const { motor_id, auto_mode } = req.body; + + // Update inputConnections' auto_mode + await Tank.updateOne( + { customerId: customerId, "connections.inputConnections.motor_id": motor_id }, + { $set: { "connections.inputConnections.$.auto_mode": auto_mode } } + ); + + + + reply.send({ status_code: 200, message: "Auto mode and percentages updated successfully." }); + } catch (error) { + throw boom.boomify(error); + } +}; + + + + + +exports.update_auto_percentage = async (req, reply) => { + try { + const customerId = req.params.customerId; + let { tankName, tankLocation, auto_min_percentage, auto_max_percentage, auto_mode_type } = req.body; + + // Handle the optional parameters + tankName = tankName ? tankName : null; + tankLocation = tankLocation ? tankLocation.toLowerCase() : null; + + const filter = { customerId }; + + // If tankName is not 'all', add it to the filter + if (tankName && tankName !== "all") { + filter.tankName = tankName; + } + + // Only add tankLocation to the filter if tankName is not 'all' + if (tankLocation && tankName !== "all") { + filter.tankLocation = tankLocation; + } + + console.log("Update filter:", JSON.stringify(filter, null, 2)); + + // Check if tanks exist + const matchingTanks = await Tank.find(filter); + console.log("Matching tanks:", matchingTanks); + + if (matchingTanks.length === 0) { + return reply.send({ status_code: 400, message: "No matching records found." }); + } + + // Define the update fields + const updateData = { + auto_min_percentage: String(auto_min_percentage || "20"), + auto_max_percentage: String(auto_max_percentage || "80"), + auto_mode_type: auto_mode_type || "default", + }; + + let result; + if (tankName && tankName !== "all") { + // Update only one tank if tankName is specified and not "all" + result = await Tank.updateOne(filter, { $set: updateData }); + } else { + // Update all tanks of the particular customer if tankName is "all" + result = await Tank.updateMany(filter, { $set: updateData }); + + // If auto_mode_type is default and tankName is "all", save or update the data in CustomerAutoPercentages + if (auto_mode_type === "default") { + const currentDate = new Date().toLocaleString("en-GB", { timeZone: "UTC" }); // Get current date in UTC + const formattedDate = currentDate.split(",").join(" -"); // Format it like '17-Dec-2024 - 15:56' + + // Use findOneAndUpdate to either update the existing record or create a new one if it doesn't exist + const updateOrCreate = await CustomerAutoPercentages.findOneAndUpdate( + { customerId }, // Search for the record with the customerId + { + $set: { + auto_min_percentage: String(auto_min_percentage || "20"), + auto_max_percentage: String(auto_max_percentage || "80"), + date: formattedDate, + }, + }, + { upsert: true, new: true } // If no record found, create a new one; return the updated record + ); + + console.log("CustomerAutoPercentages updated/created:", updateOrCreate); + } + } + + console.log("Update result:", result); + + reply.send({ status_code: 200, message: "Auto mode and percentages updated successfully." }); + + } catch (error) { + console.error(error); + reply.send({ status_code: 500, message: "Internal server error." }); + } +}; + + + + +// Controller function to get CustomerAutoPercentages by customerId +exports.getCustomerAutoPercentages = async (req, reply) => { + try { + const { customerId } = req.params; // Extract customerId from the params + + // Find the record in CustomerAutoPercentages based on customerId + const customerData = await CustomerAutoPercentages.findOne({ customerId }); + + if (!customerData) { + return reply.send({ + status_code: 404, + message: "No data found for the provided customerId." + }); + } + + reply.send({ + status_code: 200, + message: "Customer data retrieved successfully.", + data: customerData + }); + + } catch (error) { + console.error(error); + reply.send({ + status_code: 500, + message: "Internal server error." + }); + } +}; + + + + +// exports.update_auto_percentage = async (req, reply) => { +// try { +// const customerId = req.params.customerId; +// let { tankName, tankLocation, auto_min_percentage, auto_max_percentage, auto_mode_type } = req.body; + +// // Handle the optional parameters +// tankName = tankName ? tankName : null; +// tankLocation = tankLocation ? tankLocation.toLowerCase() : null; + +// const filter = { customerId }; + +// // If tankName is not 'all', add it to the filter +// if (tankName && tankName !== "all") { +// filter.tankName = tankName; +// } + +// // Only add tankLocation to the filter if tankName is not 'all' +// if (tankLocation && tankName !== "all") { +// filter.tankLocation = tankLocation; +// } + +// console.log("Update filter:", JSON.stringify(filter, null, 2)); + +// // Check if tanks exist +// const matchingTanks = await Tank.find(filter); +// console.log("Matching tanks:", matchingTanks); + +// if (matchingTanks.length === 0) { +// return reply.send({ status_code: 400, message: "No matching records found." }); +// } + +// // Define the update fields +// const updateData = { +// auto_min_percentage: String(auto_min_percentage || "20"), +// auto_max_percentage: String(auto_max_percentage || "80"), +// auto_mode_type: auto_mode_type || "default", +// }; + +// let result; +// if (tankName && tankName !== "all") { +// // Update only one tank if tankName is specified and not "all" +// result = await Tank.updateOne(filter, { $set: updateData }); +// } else { +// // Update all tanks of the particular customer if tankName is "all" +// result = await Tank.updateMany(filter, { $set: updateData }); +// } + +// console.log("Update result:", result); + + +// reply.send({ status_code: 200, message: "Auto mode and percentages updated successfully." }); + +// } catch (error) { +// console.error(error); +// reply.send({ status_code: 500, message: "Internal server error." }); +// } +// }; + + + + + + +//storing water level for every 15 minutes + +const getFormattedISTTime = () => { + return moment().tz('Asia/Kolkata').format('DD-MM-YYYY hh:mm:ss A'); +}; + +const storeWaterLevels = async () => { + try { + const tanks = await Tank.find({}); + const currentTime = getFormattedISTTime(); + + const waterLevelRecords = tanks.map(tank => ({ + customerId: tank.customerId, + tankName: tank.tankName, + tankLocation: tank.tankLocation, + waterlevel: tank.waterlevel, + time: currentTime })); + + await TankWaterLevel.insertMany(waterLevelRecords); + console.log('Water levels stored successfully'); + } catch (error) { + console.error('Error storing water levels:', error); + } +}; - // create a new IotData document with the provided data - const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); +setInterval(storeWaterLevels, 15 * 60 * 1000); - // save the document to MongoDB - await ottank.save(); - // Delete excess records (keep only the latest three records) - const recordsToKeep = 3; - const recordsToDelete = await IotData.find({ hardwareId }) - .sort({ date: -1, time: -1 }) - .skip(recordsToKeep); - for (const record of recordsToDelete) { - await record.remove(); + +console.log('Cron job scheduled to update water levels at midnight'); + + +exports.getBlockData = async (req, reply) => { + try { + const customerId = req.params.customerId; + + // Get all tank documents for the current customerId + const tanks = await Tank.find({ customerId }); + + // Extract the blockName from each tank + const blockNames = tanks.map(tank => tank.blockName); + + // Remove duplicates by converting the array to a Set and then back to an array + const uniqueBlockNames = [...new Set(blockNames)]; + + // Add "all" and "nduku sneha antha kopam" to the block names + uniqueBlockNames.unshift("All"); + + // Send the unique blockNames in the response + reply.code(200).send({ blockNames: uniqueBlockNames }); + + } catch (err) { + // Log the error for debugging purposes + console.error(err); + + // Send an error response + reply.code(500).send({ error: err.message }); + } +}; + + + +// const mqtt = require('mqtt'); +// // const moment = require('moment-timezone'); +// // const IotData = require('./models/IotData'); // Replace with your actual model +// // const Tank = require('./models/Tank'); // Replace with your actual model + +// // A map to keep track of MQTT clients by hw_Id +// const mqttClients = new Map(); + +// // Function to create a new MQTT client for a specific hw_Id +// function createMqttClient(hw_Id) { +// const client = mqtt.connect('mqtt://35.207.198.4:1883'); // Connect to the MQTT broker + +// client.on('connect', () => { +// console.log(`Client for hw_Id ${hw_Id} connected to MQTT broker`); +// client.subscribe('water/iot-data', (err) => { +// if (err) { +// console.error(`Error subscribing to topic for hw_Id ${hw_Id}:`, err); +// } else { +// console.log(`Client for hw_Id ${hw_Id} subscribed to water/iot-data topic`); +// } +// }); +// }); + +// client.on('message', async (topic, message) => { +// if (topic === 'water/iot-data') { +// try { +// const data = JSON.parse(message.toString()); +// const { hw_Id: receivedHwId, Motor_status, tanks } = data.objects; + +// // Ensure we process data only for the current client +// if (receivedHwId !== hw_Id) return; + +// // Get the current date and time +// const currentDate = new Date(); +// const date = currentDate.toISOString(); // ISO string for date +// const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); // Time in 'HH:MM:SS' + +// // Create tank documents for the received tanks +// const tankDocuments = tanks.map(tank => ({ +// tankhardwareId: tank.Id, +// tankHeight: tank.level, +// date, +// time +// })); + +// // Save IoT data +// const iotTankData = new IotData({ +// hardwareId: receivedHwId, +// Motor_status, +// tanks: tankDocuments, +// date, +// time +// }); +// await iotTankData.save(); + +// // Delete excess records (keep only the latest three) +// const recordsToKeep = 3; +// const recordsToDelete = await IotData.find({ hardwareId: receivedHwId }) +// .sort({ date: -1, time: -1 }) +// .skip(recordsToKeep); + +// for (const record of recordsToDelete) { +// await record.remove(); +// } + +// // Process tanks +// for (const tank of tanks) { +// const { Id: tankhardwareId, level: tankHeight } = tank; +// const existingTank = await Tank.findOne({ hardwareId: receivedHwId, tankhardwareId }); +// if (!existingTank) continue; + +// const customerId = existingTank.customerId; +// const tank_name = existingTank.tankName; + +// const tankHeightInCm = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; +// const tank_height = parseInt(tankHeightInCm.toFixed(0), 10); +// const waterLevelHeight = tank_height - tankHeight; +// const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10); + +// const waterLevel = parseInt(waterLevelHeight * waterCapacityPerCm, 10); + +// if (tankHeight > 0 && waterLevel >= 0) { +// existingTank.waterlevel = waterLevel; +// await existingTank.save(); + +// for (const outputConnection of existingTank.connections.outputConnections) { +// const linkedTank = await Tank.findOne({ +// customerId, +// tankName: outputConnection.outputConnections, +// tankLocation: outputConnection.output_type +// }); +// if (linkedTank) { +// for (const inputConnection of linkedTank.connections.inputConnections) { +// if (inputConnection.inputConnections === tank_name) { +// inputConnection.water_level = waterLevel; +// await linkedTank.save(); +// } +// } +// } +// } +// } +// } + +// // Update motor status +// const status = Motor_status; +// const motorTank = await Tank.findOne({ "connections.inputConnections.motor_id": receivedHwId }); + +// if (motorTank) { +// const inputConnection = motorTank.connections.inputConnections.find(conn => conn.motor_id === receivedHwId); +// if (inputConnection) { +// inputConnection.motor_status = status; +// if (inputConnection.motor_stop_status === "1" && status === 2 && inputConnection.motor_on_type !== "forced_manual") { +// const currentTime = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm'); +// inputConnection.motor_stop_status = "2"; +// inputConnection.motor_on_type = "forced_manual"; +// inputConnection.startTime = currentTime; +// } + +// if (inputConnection.motor_stop_status === "2" && status === 1) { +// inputConnection.motor_stop_status = "1"; +// } + +// await motorTank.save(); +// } +// } + +// console.log(`Data processed successfully for hw_Id: ${receivedHwId}`); +// } catch (err) { +// console.error(`Error processing message for hw_Id ${hw_Id}:`, err.message); +// } +// } +// }); + +// return client; +// } + +// // Handle incoming MQTT messages for water/iot-data topic +// const mainClient = mqtt.connect('mqtt://35.207.198.4:1883'); +// mainClient.on('connect', () => { +// console.log('Main client connected to MQTT broker'); +// mainClient.subscribe('water/iot-data', (err) => { +// if (err) { +// console.error('Error subscribing to water/iot-data topic:', err); +// } +// }); +// }); + +// mainClient.on('message', (topic, message) => { +// if (topic === 'water/iot-data') { +// try { +// const data = JSON.parse(message.toString()); +// const { hw_Id } = data.objects; + +// if (!mqttClients.has(hw_Id)) { +// const client = createMqttClient(hw_Id); +// mqttClients.set(hw_Id, client); +// } +// } catch (err) { +// console.error('Error handling message in main client:', err.message); +// } +// } +// }); + + + + + + + +const mqtt = require('mqtt'); +require('dotenv').config(); + +// **Persistent MQTT Connection** +const client = mqtt.connect('mqtt://35.207.198.4:1883', { + clientId: `mqtt_server_${Math.random().toString(16).substr(2, 8)}`, + clean: false, // Ensures MQTT retains subscriptions + reconnectPeriod: 2000, // Reconnect every 2 seconds +}); + +const subscribedTopics = new Set(); +const activeDevices = new Set(); // Keep track of active devices + +client.on('connect', () => { + console.log('✅ Connected to MQTT broker'); + + // **Ensure re-subscriptions after reconnect** + subscribedTopics.forEach(topic => { + client.subscribe(topic, { qos: 1 }, (err) => { + if (err) { + console.error(`❌ Error resubscribing to ${topic}:`, err); + } else { + console.log(`🔄 Resubscribed to ${topic}`); + } + }); + }); + + // **Subscribe to new device announcements** + client.subscribe('water/iot-data/announce', { qos: 1 }, (err) => { + if (err) { + console.error('❌ Error subscribing to announcement topic:', err); + } else { + console.log('📡 Subscribed to water/iot-data/announce'); } + }); +}); + +client.on('message', async (topic, message) => { + console.log(`📩 Message received on topic ${topic}: ${message.toString()}`); - // Update waterlevel in tanksSchema for each tank - for (const tank of tanks) { - const { tankhardwareId, tankHeight } = tank; + try { + const data = JSON.parse(message.toString()); - // Find the corresponding tank in tanksSchema - const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); - if (!existingTank) { - continue; // Skip to the next tank if not found + // **Handle device announcements** + if (topic === 'water/iot-data/announce') { + if (!data.objects || !data.objects.hw_Id) { + console.error("❌ Invalid announcement format. Missing hw_Id."); + return; } - const customerId = existingTank.customerId; - const tank_name = existingTank.tankName; + const hw_Id = data.objects.hw_Id; + const deviceTopic = `water/iot-data/${hw_Id}`; - // Update the waterlevel using the tankHeight value - const tank_height1 = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; - const tank_height = parseInt(tank_height1.toFixed(0), 10); // Ensure it's an integer - const water_level_height = tank_height - tankHeight; - const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10); - - const water_level = parseInt(water_level_height * waterCapacityPerCm, 10); + if (!subscribedTopics.has(deviceTopic)) { + client.subscribe(deviceTopic, { qos: 1 }, (err) => { + if (err) { + console.error(`❌ Error subscribing to ${deviceTopic}:`, err); + } else { + console.log(`✅ Subscribed to ${deviceTopic}`); + subscribedTopics.add(deviceTopic); + activeDevices.add(hw_Id); + console.log('📡 Active Devices:', Array.from(activeDevices)); - if (water_level >= 0) { - existingTank.waterlevel = water_level; - - // Save the updated tank document - await existingTank.save(); - - // Update linked tanks - for (const outputConnection of existingTank.connections.outputConnections) { - const linkedTank = await Tank.findOne({ customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); - if (linkedTank) { - for (const inputConnection of linkedTank.connections.inputConnections) { - if (inputConnection.inputConnections === tank_name) { - inputConnection.water_level = water_level; - await linkedTank.save(); - } - } + // ✅ **Now also process data** + processIotData(hw_Id, data); } - } + }); + } else { + console.log(`🔄 Already subscribed to ${deviceTopic}, processing data.`); + processIotData(hw_Id, data); } - + return; } - // Send the latest three documents - const latestOttanks = await IotData.find({ hardwareId }) - .sort({ date: -1, time: -1 }); - - reply.code(200).send({ latestOttanks }); + // **Process IoT Data for device topics** + if (topic.startsWith('water/iot-data/')) { + setImmediate(() => { + console.log(`🚀 Entering processIotData() for topic: ${topic}`); + const hw_Id = topic.split('/')[2]; + processIotData(hw_Id, data); + }); + } } catch (err) { - // send an error response - reply.code(500).send({ error: err.message }); + console.error('❌ Error processing message:', err.message); } -}; +}); -console.log("this is for testing push") -exports.IotDeviceforstandalonedevice = async (req, reply) => { +client.on('error', (err) => console.error('❌ MQTT Error:', err)); +client.on('close', () => console.log('⚠️ MQTT Connection Closed.')); +client.on('offline', () => console.log('⚠️ MQTT Broker Offline.')); + +async function processIotData(hw_Id, data) { try { - console.log("entered post for iotstandalone") - const { hardwareId, Motor_status, tanks } = req.body; + console.log(`📡 Processing IoT Data for hw_Id: ${hw_Id}`, JSON.stringify(data, null, 2)); - // create a new tank document with the current date and time + if (!data.objects || !data.objects.tanks) { + console.error(`❌ Missing 'tanks' in data for hw_Id: ${hw_Id}`); + return; + } + + const { Motor_status, tanks } = data.objects; const currentDate = new Date(); - const date = currentDate.toISOString(); // save the date as an ISO string + const date = currentDate.toISOString(); // ISO string for date const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); - // Create an array of tank documents const tankDocuments = tanks.map(tank => ({ - tankhardwareId: tank.tankhardwareId, - tankHeight: tank.tankHeight, - date: date, - time: time + tankhardwareId: tank.Id, + tankHeight: tank.level, + date, + time })); - // create a new IotData document with the provided data - const ottank = new IotData({ hardwareId, Motor_status, tanks: tankDocuments, date, time }); + const iotTankData = new IotData({ + hardwareId: hw_Id, + Motor_status, + tanks: tankDocuments, + date, + time + }); + await iotTankData.save(); - // save the document to MongoDB - await ottank.save(); + // Delete excess records (keep only the latest three records) + // const recordsToKeep = 3; + // const recordsToDelete = await IotData.find({ hardwareId: hw_Id }) + // .sort({ date: -1, time: -1 }) + // .skip(recordsToKeep); + // for (const record of recordsToDelete) { + // await record.remove(); + // } - // Delete excess records (keep only the latest three records) - const recordsToKeep = 3; - const recordsToDelete = await IotData.find({ hardwareId }) - .sort({ date: -1, time: -1 }) - .skip(recordsToKeep); + // Process each tank - for (const record of recordsToDelete) { - await record.remove(); - } - // Update waterlevel in tanksSchema for each tank - for (const tank of tanks) { - const { tankhardwareId, tankHeight } = tank; - // Find the corresponding tank in tanksSchema - const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); - if (!existingTank) { - continue; // Skip to the next tank if not found - } + + for (const tank of tanks) { + const { Id: tankhardwareId, level: tankHeight } = tank; + const existingTank = await Tank.findOne({ hardwareId: hw_Id, tankhardwareId }); + // console.log(hw_Id,"hw_Id") + // console.log(tankhardwareId,"tankhardwareId") + // console.log(existingTank,"existingTank") + if (!existingTank) continue; const customerId = existingTank.customerId; const tank_name = existingTank.tankName; - // Update the waterlevel using the tankHeight value - const tank_height1 = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; - const tank_height = parseInt(tank_height1.toFixed(0), 10); // Ensure it's an integer - const water_level_height = tank_height - tankHeight; + const tankHeightInCm = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; + const tank_height = parseInt(tankHeightInCm.toFixed(0), 10); + const waterLevelHeight = tank_height - tankHeight; const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10); + const waterLevel = parseInt(waterLevelHeight * waterCapacityPerCm, 10); + + console.log(`🚰 Tank [${tankhardwareId}] - Level: ${tankHeight}, Calculated Water Level: ${waterLevel}`); + const now = moment().tz('Asia/Kolkata'); + + if (tankHeight <= 0) { + // Case 1: First time signal is lost + if ( + existingTank.slave_status !== "signal_lost1" && + existingTank.slave_status !== "not_working1" + ) { + existingTank.slave_status = "signal_lost1"; + existingTank.slave_disconnected_time = now.format('DD-MMM-YYYY - HH:mm:ss'); + await existingTank.save(); - const water_level = parseInt(water_level_height * waterCapacityPerCm, 10); - - if (water_level >= 0) { - existingTank.waterlevel = water_level; + console.log(`⚠️ Signal lost for tank [${tankhardwareId}] at ${existingTank.slave_disconnected_time}`); + return; // Important: do not continue in this cycle - // Save the updated tank document - await existingTank.save(); + } else if (existingTank.slave_status === "signal_lost1") { + if (!existingTank.slave_disconnected_time) { + console.log(`❗ Missing slave_disconnected_time despite signal_lost1 for tank [${tankhardwareId}]`); + return; + } - // Update linked tanks + const lostTime = moment(existingTank.slave_disconnected_time, 'DD-MMM-YYYY - HH:mm:ss'); + const minutesElapsed = now.diff(lostTime, 'minutes'); + + if (minutesElapsed >= 15) { + existingTank.slave_status = "not_working1"; + existingTank.slave_disconnected_time = now.format('DD-MMM-YYYY - HH:mm:ss'); + await existingTank.save(); + + console.log(`❌ Slave marked as not_working for tank [${tankhardwareId}] at ${existingTank.slave_disconnected_time}`); + + // Update connected output tanks + for (const outputConnection of existingTank.connections.outputConnections) { + const linkedTank = await Tank.findOne({ + customerId, + tankName: outputConnection.outputConnections, + tankLocation: outputConnection.output_type + }); + + if (linkedTank) { + for (const inputConnection of linkedTank.connections.inputConnections) { + if (inputConnection.inputConnections === tank_name) { + inputConnection.slave_status = "not_working1"; + await linkedTank.save(); + console.log(`🔗 Linked tank [${linkedTank.tankName}] inputConnection updated to not_working`); + } + } + } + } + + } else { + console.log(`⏳ Still within 15-minute grace: ${minutesElapsed} minutes elapsed for tank [${tankhardwareId}]`); + } + } else { + console.log(`⏩ Tank [${tankhardwareId}] already marked as ${existingTank.slave_status}, skipping update.`); + } + } + + + + + + + if (tankHeight > 0 && waterLevel >= 0) { + existingTank.waterlevel = waterLevel; + existingTank.slave_status = "working"; + existingTank.slave_disconnected_time = null; + await existingTank.save(); + for (const outputConnection of existingTank.connections.outputConnections) { const linkedTank = await Tank.findOne({ customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); if (linkedTank) { for (const inputConnection of linkedTank.connections.inputConnections) { if (inputConnection.inputConnections === tank_name) { - inputConnection.water_level = water_level; + inputConnection.water_level = waterLevel; + inputConnection. slave_status = "working"; await linkedTank.save(); } } } } } - } - const status = req.body.Motor_status; - - // Find the tank that contains the specified motor_id in its inputConnections - const tank = await Tank.findOne({ "connections.inputConnections.motor_id": hardwareId }); + // Update motor status + const status = Motor_status; + const motorTank = await Tank.findOne({ "connections.inputConnections.motor_id": hw_Id }); - if (!tank) { - return reply.status(404).send({ - status_code: 404, - message: 'Motor not found for the specified motor_id' - }); + if (!motorTank) { + console.log('⚠️ Motor not found for specified motor_id'); + return; } - console.log(hardwareId,"hardwareId") - console.log(status,"status") - // Find the inputConnection with the specified motor_id - const inputConnection = tank.connections.inputConnections.find(conn => conn.motor_id === hardwareId); -if (inputConnection) { - // Update the motor_status of the inputConnection - inputConnection.motor_status = status; - console.log(inputConnection) + const inputConnection = motorTank.connections.inputConnections.find(conn => conn.motor_id === hw_Id); + if (inputConnection) { + inputConnection.motor_status = status; + + const now1 = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm:ss'); + const nowMoment = moment(now1, 'DD-MMM-YYYY - HH:mm:ss'); + const startMoment = moment(inputConnection.startTime, 'DD-MMM-YYYY - HH:mm:ss'); + const stopMoment = moment(inputConnection.stopTime, 'DD-MMM-YYYY - HH:mm:ss'); + if (inputConnection.motor_stop_status === "1" && status === 2 && nowMoment.diff(stopMoment, 'seconds') >= 15 && inputConnection.motor_on_type !== "forced_manual") { + const currentTime = moment().tz('Asia/Kolkata'); + const formattedTime = currentTime.format('DD-MMM-YYYY - HH:mm'); + const startTime1 = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm:ss'); + const startInstanceId = `${hw_Id}${formattedTime}`; + + inputConnection.motor_stop_status = "2"; + inputConnection.motor_on_type = "forced_manual"; + inputConnection.startTime = startTime1; + inputConnection.start_instance_id = startInstanceId; + const newMotorData = new MotorData({ + customerId:motorTank.customerId, + motor_id: hw_Id, + start_instance_id: startInstanceId, + supplierTank: inputConnection.inputConnections, + receiverTank: motorTank.tankName, + supplier_type: inputConnection.input_type, + receiver_type: motorTank.tankLocation, + startTime: formattedTime, + receiverInitialwaterlevel: parseInt(motorTank.waterlevel, 10), + started_by:"manual" + }); + await newMotorData.save(); + } + + if (inputConnection.motor_stop_status === "2" && status === 1 && nowMoment.diff(startMoment, 'seconds') >= 15) { + const motorData = await MotorData.findOne({ customerId:motorTank.customerId, motor_id: hw_Id, start_instance_id: inputConnection.start_instance_id }); + const startinstance = inputConnection.start_instance_id; + const currentTime = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm'); + const stopTime1 = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm:ss'); + inputConnection.motor_stop_status = "1"; + inputConnection.motor_on_type = "manual"; + inputConnection.stopTime = stopTime1; + inputConnection.start_instance_id = null; + + const motorId = hw_Id; + if (motorIntervals[motorId]) { + clearInterval(motorIntervals[motorId]); + delete motorIntervals[motorId]; + console.log("motor interval deleted") + } + + + console.log(motorData,"motorData") + if (motorData) { + + const receiverTank = await Tank.findOne({ customerId:motorTank.customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }); + const receiverFinalWaterLevel = parseInt(receiverTank.waterlevel, 10); + const quantityDelivered = receiverFinalWaterLevel - parseInt(motorData.receiverInitialwaterlevel, 10); + const water_pumped_till_now = parseInt(receiverTank.total_water_added_from_midnight, 10); + const totalwaterpumped = quantityDelivered + water_pumped_till_now; + const start = moment(motorData.startTime, 'DD-MMM-YYYY - HH:mm'); + const stop = moment(currentTime, 'DD-MMM-YYYY - HH:mm'); + const duration = moment.duration(stop.diff(start)); + const runtime = Math.floor(duration.asMinutes()); // runtime in minutes + + + await Tank.findOneAndUpdate( + { customerId:motorTank.customerId, tankName: motorData.receiverTank, tankLocation: motorData.receiver_type.toLowerCase() }, + { $set: { total_water_added_from_midnight: totalwaterpumped } } + ); - // Check if motor_stop_status is "1" and status is "2" - if (inputConnection.motor_stop_status === "1" && status === "2") { - console.log("got into forced manual") - console.log(inputConnection.motor_on_type,"before if motor on type") - // Check if motor_on_type is not already "forced_manual" - if (inputConnection.motor_on_type !== "forced_manual") { - inputConnection.motor_on_type = "forced_manual"; - console.log("entered forced manual of if") - inputConnection.motor_stop_status = "2"; - // Update startTime to the current time in the specified format - const currentTime = new Date(); - const formattedTime = currentTime.toLocaleString('en-GB', { - day: '2-digit', - month: 'short', - year: 'numeric', - hour: '2-digit', - minute: '2-digit', - hour12: false, - }).replace(',', ''); - inputConnection.startTime = formattedTime; + await MotorData.updateOne( + { customerId:motorTank.customerId, motor_id: motorId, start_instance_id: startinstance }, + { + $set: { + stopTime: currentTime, + receiverfinalwaterlevel: receiverFinalWaterLevel.toString(), + quantity_delivered: quantityDelivered.toString(), + runtime: runtime.toString(), + stopped_by:"manual" + } + } + ); + } } + await motorTank.save(); + + } + + console.log(`✅ Data processed successfully for hw_Id: ${hw_Id}`); + + } catch (err) { + console.error('❌ Error processing IoT data:', err.message); } - // Check if motor_stop_status is "1" and status is "2" - if (inputConnection.motor_stop_status === "2" && status === "1") { - console.log("got into forced manual stop") - console.log(inputConnection.motor_on_type,"before if motor on type stop") - // Check if motor_on_type is not already "forced_manual" - if (inputConnection.motor_on_type = "forced_manual") { - inputConnection.motor_on_type = "manual"; - console.log("entered forced manual of if of stop") - - // Update startTime to the current time in the specified format - - - inputConnection.motor_stop_status = "1"; +} + + + + + + +function logSets() { + console.log("Subscribed Topics:", Array.from(subscribedTopics)); + console.log("Active Devices:", Array.from(activeDevices)); + console.log("motorIntervals:", motorIntervals); +} + +// Call logSets every 30 seconds +setInterval(logSets, 30000); + + + + + + + + + + +const sendMotorNotifications = async () => { + // console.log("🔄 Checking for motor notifications..."); + + // Find motors that need a start or stop notification + const motors = await Tank.find({ + "connections.inputConnections.motor_id": { $exists: true }, + }); + + for (const motorTank of motors) { + const inputConnection = motorTank.connections.inputConnections.find( + (conn) => conn.motor_id + ); + const status = inputConnection.motor_status + //console.log("motorTank",inputConnection) +// console.log("inputConnection.motor_on_type ",inputConnection.motor_on_type ) + if (!inputConnection) continue; + + const { customerId, blockName, tankName } = motorTank; + const fcmTokens = await getFcmTokens(customerId); // Get FCM tokens for this customer + if (!fcmTokens.length) continue; + + // 🔹 Motor Start Condition + if ( + inputConnection.motor_stop_status === "2" && inputConnection.motor_on_type === "forced_manual" && + !motorTank.motor_start_notified + ) { + console.log("✅ Sending Motor Start Notification..."); + + eventEmitter.emit( + "sendMotorStartNotification", + inputConnection.motor_id, + customerId, + fcmTokens, + inputConnection.water_level || 0, + blockName, + tankName, + "forced_manual", + inputConnection.manual_threshold_time + ); + + // Mark notification as sent + motorTank.motor_start_notified = true; + motorTank.motor_stop_notified = false; // Reset stop notification flag + await motorTank.save(); + } + + // 🔹 Motor Stop Condition + if ( + inputConnection.motor_stop_status === "1" && + !motorTank.motor_stop_notified && inputConnection.motor_on_type === "forced_manual" + ) { + console.log("✅ Sending Motor Stop Notification..."); + + eventEmitter.emit( + "sendMotorStopNotification", + inputConnection.motor_id, + customerId, + fcmTokens, + inputConnection.water_level || 0, + blockName, + tankName, + "forced_manual" + ); + + // Mark notification as sent + motorTank.motor_stop_notified = true; + motorTank.motor_start_notified = false; // Reset start notification flag + await motorTank.save(); + } + } + }; + + const getFcmTokens = async (customerId) => { + const user = await User.findOne({ customerId }).select("fcmIds"); + return user?.fcmIds?.filter((token) => token) || []; + }; + + //Run the notification check every second + cron.schedule("* * * * * *", async () => { + await sendMotorNotifications(); + }); + + + +// +// API function to get survey data for a particular installer +// +exports.getPendingAndCompletedsurveyOfparticularInstaller = async (request, reply) => { + try { + const { installationId } = request.params; + const { survey_status } = request.body; + + const surveyData = await User.find({ installationId, survey_status }); + + reply.send({ + status_code: 200, + surveyData, + }); + } catch (err) { + console.error('❌ Error fetching survey data:', err); + throw boom.boomify(err); + } +}; + + + + +//const mqtt = require('mqtt'); + +// Connect to test MQTT broker +const client2 = mqtt.connect('mqtt://34.100.133.20:1884', { + clientId: `mqtt_test_${Math.random().toString(16).substr(2, 8)}`, + clean: true, + reconnectPeriod: 2000, +}); +// Sets to track active devices +const subscribedTopics2 = new Set(); +const activeDevices2 = new Set(); // Keep track of active devices + +client2.on('connect', () => { + console.log('✅ Connected to TEST MQTT broker'); + + // Subscribe to the announcement topic + client2.subscribe('water/iot-data/announce', { qos: 1 }, (err) => { + if (err) { + console.error('❌ Failed to subscribe to announce topic:', err); + } else { + console.log('📡 Subscribed to water/iot-data/announce'); + } + }); +}); + +client2.on('message', (topic, message) => { + console.log(`📩 [${topic}] ${message.toString()}`); + + try { + const data = JSON.parse(message.toString()); + + // If announcement message received + if (topic === 'water/iot-data/announce') { + if (data.objects && data.objects.hw_Id) { + const hw_Id = data.objects.hw_Id; + const deviceTopic = `water/iot-data/${hw_Id}`; + + if (!subscribedTopics2.has(deviceTopic)) { + client2.subscribe(deviceTopic, { qos: 1 }, (err) => { + if (err) { + console.error(`❌ Failed to subscribe to ${deviceTopic}:`, err); + } else { + console.log(`✅ Subscribed to ${deviceTopic}`); + subscribedTopics2.add(deviceTopic); + } + }); + } else { + console.log(`🔄 Already subscribed to ${deviceTopic}`); } + } else { + console.error('❌ Invalid announce message, missing hw_Id'); + } } - + } catch (err) { + console.error('❌ Failed to parse message:', err); + } +}); + +client2.on('error', (err) => console.error('❌ MQTT Error:', err)); +client2.on('close', () => console.log('⚠️ MQTT2 Connection Closed.')); +client2.on('offline', () => console.log('⚠️ MQTT Broker Offline.')); + + + function logSets1() { + console.log("Subscribed Topics2:", Array.from(subscribedTopics2)); + console.log("Active Devices2:", Array.from(activeDevices2)); } +// Call logSets every 30 seconds +setInterval(logSets1, 30000); - // Save the updated tank - await tank.save(); - // Send the latest three documents - const latestOttanks = await IotData.find({ hardwareId }) - .sort({ date: -1, time: -1 }); - reply.code(200).send({ latestOttanks }); + + + + + + + + +exports.getPendingAndCompletedsurveyOfparticularInstaller = async (request, reply) => { + try { + const { installationId } = request.params; + const survey_status = request.body; + + + const surveydata = await User.find({ + installationId, + survey_status, + + }); + + + // Send the response, including both total consumption and filtered consumption records + reply.send({ + status_code: 200, + surveydata, + + }); } catch (err) { - // send an error response - reply.code(500).send({ error: err.message }); + throw boom.boomify(err); } }; - +exports.consumptionofparticulartank = async (request, reply) => { + try { + const { customerId } = request.params; + const { startDate, stopDate, tankName, tankLocation, block } = request.body; -// exports.IotDevice3 = async (req, reply) => { -// try { -// const { hardwareId, mode, tanks } = req.body; - -// // create a new tank document with the current date and time -// const currentDate = new Date(); -// const date = currentDate.toISOString(); // save the date as an ISO string -// const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); - -// // Create an array of tank documents -// const tankDocuments = tanks.map(tank => ({ -// tankhardwareId: tank.tankhardwareId, -// tankHeight: tank.tankHeight, -// maxLevel: tank.maxLevel, -// minLevel: tank.minLevel, -// date: date, -// time: time -// })); - -// // create a new IotData document with the provided data -// const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); - -// // save the document to MongoDB -// await ottank.save(); - -// // Delete excess records (keep only the latest three records) -// const recordsToKeep = 3; -// const recordsToDelete = await IotData.find({ hardwareId }) -// .sort({ date: -1, time: -1 }) -// .skip(recordsToKeep); - -// for (const record of recordsToDelete) { -// await record.remove(); -// } - -// // Update waterlevel in tanksSchema for each tank -// for (const tank of tanks) { -// const { tankhardwareId, tankHeight } = tank; - -// // Find the corresponding tank in tanksSchema -// const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); - - -// if (existingTank) { -// // Update the waterlevel using the tankHeight value - -// const tank_height1 = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; -// console.log(tank_height1, 25); - -// // The value of tank_height1 is a number, not a string, so you cannot use replace on it. -// // If you want to format it with commas, you can create a function to add commas to a number. -// function numberWithCommas(x) { -// return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); -// } - -// // Now you can use the function to format the tank_height1 value with commas. -// const formatted_tank_height1 = numberWithCommas(tank_height1); -// console.log(formatted_tank_height1, 25); - -// const tank_height = parseInt(formatted_tank_height1.replace(/,/g, ''), 10); -// console.log(tank_height); -// // console.log(tank_height,1) -// const water_level_height = tank_height - tankHeight -// console.log(water_level_height,2) - -// const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10) -// console.log(waterCapacityPerCm,3) -// const water_level = water_level_height * waterCapacityPerCm; -// console.log(water_level, 4); - -// // Function to add commas to a number -// function numberWithCommas(x) { -// return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); -// } - -// const formatted_water_level = numberWithCommas(water_level); -// console.log(formatted_water_level, 4); - -// existingTank.waterlevel = parseInt(formatted_water_level.replace(/,/g, ''), 10); -// console.log(existingTank.waterlevel); - - - -// // Save the updated tank document -// await existingTank.save(); -// for (const outputConnection of existingTank.connections.outputConnections) { -// const linkedTank = await Tank.findOne({ customerId: customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); -// if (linkedTank) { -// // linkedTank.waterlevel = existingTank.waterlevel; -// //await linkedTank.save(); + // Convert input dates into proper JavaScript Date objects for comparison + const start = moment(startDate, "DD-MMM-YYYY - HH:mm").toDate(); + const end = moment(stopDate, "DD-MMM-YYYY - HH:mm").toDate(); + end.setHours(23, 59, 59, 999); // Ensure full day is included -// // Update water level of tanks linked through input connections of the linked tank -// for (const inputConnection of linkedTank.connections.inputConnections) { -// if (inputConnection.inputConnections === tank_name) { -// inputConnection.water_level = water_level.toString(); -// await linkedTank.save(); -// } -// } -// } -// } -// } -// } - -// // Send the latest three documents -// const latestOttanks = await IotData.find({ hardwareId }) -// .sort({ date: -1, time: -1 }); - -// reply.code(200).send({ latestOttanks }); -// } catch (err) { -// // send an error response -// reply.code(500).send({ error: err.message }); -// } -// }; + // Find the tank by customerId, tankLocation, and tankName + const tank = await Tank.findOne({ + customerId, + tankLocation: tankLocation || "overhead", // Default to "overhead" if not provided + tankName, + }); -// exports.IotDevice1 = async (req, reply) => { -// try { -// const { hardwareId, mode, tanks } = req.body; + if (!tank) { + return reply.status(404).send({ + status_code: 404, + message: "Tank not found", + }); + } + + const waterlevel_at_midnight = parseInt(tank.waterlevel_at_midnight.replace(/,/g, ""), 10); + const total_water_added_from_midnight = parseInt(tank.total_water_added_from_midnight.replace(/,/g, ""), 10); + const waterlevel = parseInt(tank.waterlevel.replace(/,/g, ""), 10); + + // Fetch all records for the tank (no date filtering yet) + const tankConsumptions = await TankConsumptionOriginalSchema.find({ + customerId, + tankName, + tankLocation: tankLocation, + }); + + // Filter records in JavaScript by comparing the 'time' field after converting to Date + const filteredConsumptions = tankConsumptions.filter((record) => { + const recordTime = moment(record.time, "DD-MMM-YYYY - HH:mm").toDate(); + return recordTime >= start && recordTime <= end; + }); + + // Sort filtered records by date (ascending) + filteredConsumptions.sort((a, b) => { + const dateA = moment(a.time, "DD-MMM-YYYY - HH:mm").toDate(); + const dateB = moment(b.time, "DD-MMM-YYYY - HH:mm").toDate(); + return dateA - dateB; // Sort in ascending order + }); + + + // Calculate total consumption from filtered records + const total_consumption_from_records = filteredConsumptions.reduce((acc, record) => { + return acc + parseInt(record.consumption, 10); + }, 0); + + // Calculate final consumption + const consumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel + total_consumption_from_records; -// // create a new tank document with the current date and time -// const currentDate = new Date(); -// const date = currentDate.toISOString(); // save the date as an ISO string -// const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); + // Prepare response data + const tankData = { + tankname: tank.tankName, + totalConsumption: consumption, + block: tank.blockName, + TypeofWater: tank.typeOfWater, + location: tank.tankLocation, + capacity: tank.capacity, + waterlevel: tank.waterlevel, + }; + const stopDateMoment = moment(stopDate, "DD-MMM-YYYY - HH:mm"); +const today = moment().startOf('day'); -// // Create an array of tank documents -// const tankDocuments = tanks.map(tank => ({ -// tankhardwareId: tank.tankhardwareId, -// tankHeight: tank.tankHeight, -// maxLevel: tank.maxLevel, -// minLevel: tank.minLevel, -// date: date, -// time: time -// })); +if (stopDateMoment.isSame(today, 'day')) { + const latestConsumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel; + const nowFormatted = moment().format("DD-MMM-YYYY - HH:mm"); + + filteredConsumptions.push({ + tankName: tank.tankName, + consumption: latestConsumption.toString(), + time: nowFormatted + }); +} + + // Send the response, including both total consumption and filtered consumption records + reply.send({ + status_code: 200, + tankData, + totalConsumption: consumption, + consumptionRecords: filteredConsumptions, + }); + } catch (err) { + throw boom.boomify(err); + } +}; -// // create a new IotData document with the provided data -// const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); -// // save the document to MongoDB -// await ottank.save(); +// // Set start and end dates +// const startDate = new Date("2024-08-20T00:00:00Z"); +// const endDate = new Date("2024-11-04T00:00:00Z"); -// // Delete excess records (keep only the latest three records) -// const recordsToKeep = 3; -// const recordsToDelete = await IotData.find({ hardwareId }) -// .sort({ date: -1, time: -1 }) -// .skip(recordsToKeep); +// // Tank names array with respective blocks +// const tanks = [ +// { tankName: "REAL TANK OH", block: "A" }, +// { tankName: "DUMMY TANK OH1", block: "BLOCK C" }, +// { tankName: "DUMMY TANK OH2", block: "BLOCK D" }, +// { tankName: "DUMMY TANK OH3", block: "BLOCK C" }, +// { tankName: "DUMMY TANK OH4", block: "BLOCK C" }, +// { tankName: "DUMMY TANK OH5", block: "BLOCK C" }, +// { tankName: "DUMMY TANK OH6", block: "BLOCK C" } +// ]; -// for (const record of recordsToDelete) { -// await record.remove(); -// } -// console.log(tanks) -// // Update waterlevel in tanksSchema for each tank -// for (const tank of tanks) { -// console.log(tank) -// const { tankhardwareId, tankHeight } = tank; -// console.log(hardwareId,tankhardwareId) -// // Find the corresponding tank in tanksSchema -// const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); -// if (!existingTank) { -// console.log(`No tank found for tankhardwareId '${tankhardwareId}'. Skipping.`); -// continue; // Skip to the next iteration -// } -// console.log(existingTank,"existing tank") -// if (existingTank) { -// // Update the waterlevel using the tankHeight value - -// const tank_height1 = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; -// console.log(tank_height1, 25); - -// // The value of tank_height1 is a number, not a string, so you cannot use replace on it. -// // If you want to format it with commas, you can create a function to add commas to a number. -// function numberWithCommas(x) { -// return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); -// } - -// // Now you can use the function to format the tank_height1 value with commas. -// const formatted_tank_height1 = numberWithCommas(tank_height1); -// console.log(formatted_tank_height1, 25); - -// const tank_height = parseInt(formatted_tank_height1.replace(/,/g, ''), 10); -// console.log(tank_height); -// // console.log(tank_height,1) -// const water_level_height = tank_height - tankHeight -// console.log(water_level_height,2) - -// const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10) -// console.log(waterCapacityPerCm,3) -// const water_level = water_level_height * waterCapacityPerCm; -// console.log(water_level, 4); +// const customerId = "AWSUSKY4"; +// const tankLocation = "overhead"; +// const typeofwater = "Bore Water"; -// // Function to add commas to a number -// function numberWithCommas(x) { -// return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ","); +// // Function to format date to "DD-MMM-YYYY - HH:mm" +// function formatDateCustom(date) { +// const options = { day: '2-digit', month: 'short', year: 'numeric' }; +// return date.toLocaleDateString('en-GB', options).replace(/ /g, '-') + " - 23:55"; // } -// const formatted_water_level = numberWithCommas(water_level); -// console.log(formatted_water_level, 4); - -// existingTank.waterlevel = parseInt(formatted_water_level.replace(/,/g, ''), 10); -// console.log(existingTank.waterlevel); +// // Main function to generate data +// async function generateData() { +// for (let date = new Date(startDate); date <= endDate; date.setDate(date.getDate() + 1)) { +// const formattedDate = formatDateCustom(date); // Format date to "DD-MMM-YYYY - 23:55" +// for (const { tankName, block } of tanks) { +// try { +// const existingRecord = await TankConsumptionOriginalSchema.findOne({ +// customerId: customerId, +// tankName: tankName, +// tankLocation: tankLocation, +// time: formattedDate +// }).exec(); +// console.log(`Checking record for ${tankName} on ${formattedDate}: ${existingRecord ? 'Exists' : 'Does not exist'}`); -// // Save the updated tank document -// await existingTank.save(); -// for (const outputConnection of existingTank.connections.outputConnections) { -// const linkedTank = await Tank.findOne({ customerId: customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); -// if (linkedTank) { -// // linkedTank.waterlevel = existingTank.waterlevel; -// //await linkedTank.save(); +// if (!existingRecord) { +// // Random consumption between 7000 and 8000 +// const randomConsumption = Math.floor(Math.random() * (8000 - 7000 + 1)) + 7000; -// // Update water level of tanks linked through input connections of the linked tank -// for (const inputConnection of linkedTank.connections.inputConnections) { -// if (inputConnection.inputConnections === tank_name) { -// inputConnection.water_level = water_level.toString(); -// await linkedTank.save(); -// } +// // Create a new document and save it +// const newRecord = new TankConsumptionOriginalSchema({ +// customerId: customerId, +// tankName: tankName, +// tankLocation: tankLocation, +// consumption: randomConsumption.toString(), +// time: formattedDate, +// block: block, +// typeofwater: typeofwater, +// __v: 0 +// }); +// await newRecord.save(); // Use .save() method to insert the record +// console.log(`Inserted record for ${tankName} on ${formattedDate}`); +// } +// } catch (error) { +// console.error(`Failed to check or insert record for ${tankName} on ${formattedDate}:`, error); // } -// } // } -// } // } +// console.log("Data generation complete."); +// } -// // Send the latest three documents -// const latestOttanks = await IotData.find({ hardwareId }) -// .sort({ date: -1, time: -1 }); +// // Run the data generation function +// generateData(); -// reply.code(200).send({ latestOttanks }); -// } catch (err) { -// // send an error response -// reply.code(500).send({ error: err.message }); -// } -// }; +async function removeDuplicates () { + try { + // Step 1: Find duplicates, considering time and ignoring case for typeofwater + const duplicates = await TankConsumptionOriginalSchema.aggregate([ + { + $group: { + _id: { + customerId: "$customerId", + tankName: "$tankName", + time: "$time" + }, + count: { $sum: 1 }, + ids: { $push: "$_id" }, // Store the _id values for further processing + latestConsumption: { $max: { $toInt: "$consumption" } }, // Get the max consumption + latestTypeofwater: { $last: "$typeofwater" } // Get the last typeofwater value + } + }, + { + $match: { + count: { $gt: 1 } // Only keep groups with more than one occurrence + } + } + ]); -// exports.IotDevice1 = async (req, reply) => { -// try { -// const { hardwareId, mode, tanks } = req.body; + console.log(`Found ${duplicates.length} groups of duplicates.`); -// // create a new tank document with the current date and time -// const currentDate = new Date(); -// const date = currentDate.toISOString(); // save the date as an ISO string -// const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); + // Step 2: Prepare delete operations + for (const duplicateGroup of duplicates) { + // Filter the ids based on the maximum time to keep the latest entry + const idsToDelete = duplicateGroup.ids.filter(id => { + return id !== duplicateGroup.ids[0]; // Keep the first, delete the rest + }); -// // Create an array of tank documents -// const tankDocuments = tanks.map(tank => ({ -// tankhardwareId: tank.tankhardwareId, -// tankHeight: tank.tankHeight, -// maxLevel: tank.maxLevel, -// minLevel: tank.minLevel, -// date: date, -// time: time -// })); + for (const id of idsToDelete) { + try { + await TankConsumptionOriginalSchema.deleteOne({ _id: id }); + console.log(`Deleted duplicate record with ID: ${id}`); + } catch (deleteError) { + console.error(`Failed to delete record with ID ${id}:`, deleteError); + } + } + } -// // create a new IotData document with the provided data -// const ottank = new IotData({ hardwareId, mode, tanks: tankDocuments, date, time }); + console.log("Duplicate removal complete."); + } catch (error) { + console.error("Failed to remove duplicates:", error); + } +} -// // save the document to MongoDB -// await ottank.save(); +// Run the remove duplicates function +// removeDuplicates(); +console.log("this is for testing autopush,line located in tankscontroller") -// // Delete excess records (keep only the latest three records) -// const recordsToKeep = 3; -// const recordsToDelete = await IotData.find({ hardwareId }) -// .sort({ date: -1, time: -1 }) -// .skip(recordsToKeep); -// for (const record of recordsToDelete) { -// await record.remove(); -// } -// // Update waterlevel in tanksSchema for each tank -// for (const tank of tanks) { -// const { tankhardwareId, tankHeight } = tank; +// const calculateDailyConsumptionAndNotify = async () => { +// try { +// const today = moment().startOf("day"); +// const yesterday = moment(today).subtract(1, "days"); + +// // Fetch all active users +// const activeUsers = await User.find({ }); + +// for (const user of activeUsers) { +// const { customerId, fcmIds } = user; + +// // Fetch daily consumption for the customer +// const consumptions = await TankConsumptionOriginalSchema.find({ +// customerId, +// time: { +// $gte: yesterday.format("DD-MMM-YYYY - HH:mm"), +// $lt: today.format("DD-MMM-YYYY - HH:mm"), +// }, +// }); -// if (tankHeight === null || tankHeight === undefined) { -// continue; // Skip this iteration and move to the next tank +// // Calculate total consumption +// const totalConsumption = consumptions.reduce((total, record) => { +// return total + parseInt(record.consumption, 10); +// }, 0); + +// // Prepare tank-wise consumption details +// const tankDetails = consumptions.map((record) => ({ +// tankName: record.tankName, +// consumption: record.consumption, +// })); + +// // Send notification +// const notificationTitle = "Daily Water Consumption Report"; +// const notificationBody = ` +// Total Consumption: ${totalConsumption} liters +// Tank Details: ${tankDetails +// .map((tank) => `${tank.tankName}: ${tank.consumption} liters`) +// .join(", ")} +// `; + +// if (fcmIds && fcmIds.length > 0) { +// await sendNotification(fcmIds, notificationTitle, notificationBody); // } -// // Find the corresponding tank in tanksSchema -// const existingTank = await Tank.findOne({ hardwareId, tankhardwareId }); - -// if (existingTank) { -// // Update the waterlevel using the tankHeight value -// const tank_height = parseInt(existingTank.height.replace(/,/g, ''), 10) * 30.48; -// const water_level_height = tank_height - tankHeight; -// const customerId = existingTank.customerId; -// const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10); -// let water_level = water_level_height * waterCapacityPerCm; -// water_level = Math.round(water_level); // Round to nearest whole number -// existingTank.waterlevel = water_level.toString(); // Convert to string as per schema definition -// const tank_name = existingTank.tankName; -// // Save the updated tank document -// await existingTank.save(); +// } -// // Update water level of tanks linked through output connections -// for (const outputConnection of existingTank.connections.outputConnections) { -// const linkedTank = await Tank.findOne({ customerId: customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); -// if (linkedTank) { -// // linkedTank.waterlevel = existingTank.waterlevel; -// //await linkedTank.save(); +// console.log("Daily consumption notifications sent successfully."); +// } catch (err) { +// console.error("Error sending daily consumption notifications:", err); +// } +// }; -// // Update water level of tanks linked through input connections of the linked tank -// for (const inputConnection of linkedTank.connections.inputConnections) { -// if (inputConnection.inputConnections === tank_name) { -// inputConnection.water_level = water_level.toString(); -// await linkedTank.save(); -// } -// } -// } + +// cron.schedule("0 11:57 * * *", async () => { +// console.log("Starting daily consumption notification task..."); +// await calculateDailyConsumptionAndNotify(); +// }); + +// cron.schedule( +// "0 9 * * *", +// async () => { +// console.log("Starting daily consumption notification task..."); +// await calculateDailyConsumptionAndNotify(); +// }, +// { +// timezone: "Asia/Kolkata", // Specify the timezone +// } +// ); + +// const calculateDailyConsumptionAndNotify = async () => { +// try { +// const today = moment().startOf("day"); +// const yesterday = moment(today).subtract(1, "days"); + +// // Fetch all active users +// const activeUsers = await User.find({}); + +// for (const user of activeUsers) { +// const { customerId, fcmIds } = user; + +// // Fetch daily consumption for the customer +// const consumptions = await TankConsumptionOriginalSchema.find({ +// customerId, +// time: { +// $gte: yesterday.format("DD-MMM-YYYY - HH:mm"), +// $lt: today.format("DD-MMM-YYYY - HH:mm"), +// }, +// }); + +// // Calculate total consumption by type of water and the water level percentage +// const consumptionSummary = consumptions.reduce((acc, record) => { +// const typeOfWater = record.typeOfWater; // Assuming this field exists +// const consumption = parseInt(record.consumption, 10); +// const waterLevel = parseInt(record.waterLevel, 10); // Assuming waterLevel is in percentage + +// if (!acc[typeOfWater]) { +// acc[typeOfWater] = { +// totalConsumption: 0, +// tankDetails: [], +// totalWaterLevel: 0, +// count: 0, +// }; // } + +// acc[typeOfWater].totalConsumption += consumption; +// acc[typeOfWater].totalWaterLevel += waterLevel; +// acc[typeOfWater].count += 1; +// acc[typeOfWater].tankDetails.push({ +// tankName: record.tankName, +// consumption, +// waterLevel, +// }); + +// return acc; +// }, {}); + +// // Prepare notification body +// let notificationBody = "Daily Water Consumption Report:\n"; +// for (const type in consumptionSummary) { +// const { totalConsumption, tankDetails, totalWaterLevel, count } = consumptionSummary[type]; +// const averageWaterLevel = (totalWaterLevel / count).toFixed(2); // Calculate average water level +// console.log("averageWaterLevel",averageWaterLevel) +// console.log("totalConsumption",totalConsumption) + +// notificationBody += ` +// Type of Water: ${type} +// Total Consumption: ${totalConsumption} liters +// Average Water Level: ${averageWaterLevel}% +// `; +// console.log("noti---" ,notificationBody += ` +// Type of Water: ${type} +// Total Consumption: ${totalConsumption} liters +// Average Water Level: ${averageWaterLevel}% +// `) // } -// } -// // Send the latest three documents -// const latestOttanks = await IotData.find({ hardwareId }) -// .sort({ date: -1, time: -1 }); +// if (fcmIds && fcmIds.length > 0) { +// await sendNotification(fcmIds, "Daily Water Consumption Report", notificationBody); +// } +// } -// reply.code(200).send({ latestOttanks }); +// console.log("Daily consumption notifications sent successfully."); // } catch (err) { -// // send an error response -// reply.code(500).send({ error: err.message }); +// console.error("Error sending daily consumption notifications:", err); // } // }; - - -// exports.getIotD = async(req, reply) => { +// const calculateDailyConsumptionAndNotify = async () => { // try { -// await IotData.find({hardwareId: req.query.hardwareId}) -// .exec() -// .then((docs) => { -// reply.send({ status_code: 200, data: docs, count: docs.length }); -// }) -// .catch((err) => { -// console.log(err); -// reply.send({ error: err }); +// const today = moment().startOf("day"); +// const yesterday = moment(today).subtract(1, "days"); + +// // Fetch all active users +// const activeUsers = await User.find({}); + +// for (const user of activeUsers) { +// const { customerId, fcmIds } = user; + +// // Fetch daily consumption for the customer +// const consumptions = await TankConsumptionOriginalSchema.find({ +// customerId, +// time: { +// $gte: yesterday.format("DD-MMM-YYYY - HH:mm"), +// $lt: today.format("DD-MMM-YYYY - HH:mm"), +// }, // }); + +// // Calculate total consumption and capacities based on water type +// let totalBoreConsumption = 0; +// let totalDrinkingConsumption = 0; +// let totalBoreCapacity = 0; +// let totalDrinkingCapacity = 0; + +// for (const record of consumptions) { +// const typeOfWater = record.typeOfWater; // Assuming this field exists +// const consumption = parseInt(record.consumption, 10); +// const capacity = parseInt(record.capacity, 10); // Assuming capacity field exists + +// if (typeOfWater === "bore" || typeOfWater === "Bore Water") { +// totalBoreConsumption += consumption; +// totalBoreCapacity += capacity; +// } else if (typeOfWater === "drinking" || typeOfWater === "Drinking Water") { +// totalDrinkingConsumption += consumption; +// totalDrinkingCapacity += capacity; +// } +// } + +// // Calculate percentages +// const boreConsumptionPercentage = totalBoreCapacity +// ? ((totalBoreConsumption / totalBoreCapacity) * 100).toFixed(2) +// : 0; + +// const drinkingConsumptionPercentage = totalDrinkingCapacity +// ? ((totalDrinkingConsumption / totalDrinkingCapacity) * 100).toFixed(2) +// : 0; + +// // Prepare notification body +// const reportDate = yesterday.format("DD-MMM-YYYY"); +// let notificationBody = `Daily Water Consumption Report for ${reportDate}:\n`; +// notificationBody += `Total Bore Consumption: ${totalBoreConsumption} liters\n`; +// notificationBody += `Bore Water Consumption Percentage: ${boreConsumptionPercentage}%\n`; +// notificationBody += `Total Drinking Consumption: ${totalDrinkingConsumption} liters\n`; +// notificationBody += `Drinking Water Consumption Percentage: ${drinkingConsumptionPercentage}%\n`; + +// // Send notification if FCM IDs are present +// if (fcmIds && fcmIds.length > 0) { +// await sendNotification(fcmIds, "Daily Water Consumption Report", notificationBody); +// } +// } + +// console.log("Daily consumption notifications sent successfully."); // } catch (err) { -// throw boom.boomify(err); +// console.error("Error sending daily consumption notifications:", err); // } -// } +// }; +// Schedule the cron job to run daily at 9 AM +// cron.schedule( +// "0 9 * * *", +// async () => { +// console.log("Starting daily consumption notification task..."); +// await calculateDailyConsumptionAndNotify(); +// }, +// { +// timezone: "Asia/Kolkata", // Specify the timezone +// } +// ); -exports.getIotD = async (req, reply) => { + +const calculateConsumptionAndNotify = async () => { try { - const latestRecords = await IotData.find({ hardwareId: req.query.hardwareId }) - .sort({ date: -1, time: -1 }) // Sort by date and time in descending order - .limit(3) // Limit the result to 3 records - .exec(); + const now = moment(); // Current time + const sixHoursAgo = moment(now).subtract(6, 'hours').startOf('hour'); // 6 hours ago - reply.send({ status_code: 200, data: latestRecords, count: latestRecords.length }); - } catch (err) { - console.error(err); - throw boom.boomify(err); - } -}; + // Fetch all active users + const activeUsers = await User.find({}); + for (const user of activeUsers) { + const { customerId, fcmIds } = user; -exports.getLatestData = async (req, reply) => { - try { - const hardwareId = req.params.hardwareId; + // Fetch consumption records for the last 6 hours + const consumptions = await TankConsumptionOriginalSchema.find({ + customerId, + time: { + $gte: sixHoursAgo.format("DD-MMM-YYYY - HH:mm"), + $lt: now.format("DD-MMM-YYYY - HH:mm"), + }, + }); - // get the latest two tank documents for the current hardwareId sorted in descending order of date and time - const latestTanks = await IotData.find({ hardwareId }).sort({ date: -1, time: -1 }).limit(2); + // Prepare notification body + let notificationBody = `Water Consumption Report (From ${sixHoursAgo.format( + "hh:mm A" + )} to ${now.format("hh:mm A")}):\n`; + const tankDetails = {}; + + // Aggregate consumption data by tank + for (const record of consumptions) { + const tankName = record.tankName; // Assuming this field exists + const tankLocation = record.tankLocation; // Assuming this field exists + const consumption = parseInt(record.consumption, 10); // Liters consumed + const typeOfWater = record.typeOfWater; // Type of water (e.g., bore, drinking) + const tankCapacity = parseInt(record.capacity, 10); // Tank capacity in liters + + if (!tankDetails[tankName]) { + tankDetails[tankName] = { + tankLocation, + totalConsumption: 0, + typeOfWater, + tankCapacity, + }; + } + tankDetails[tankName].totalConsumption += consumption; + } - // if the number of documents for the current hardwareId is less than two, return an error response - if (latestTanks.length < 2) { - return reply.code(404).send({ error: 'Not enough data' }); + // Format tank details for the notification + for (const tankName in tankDetails) { + const { + tankLocation, + totalConsumption, + typeOfWater, + tankCapacity, + } = tankDetails[tankName]; + const consumptionPercentage = tankCapacity + ? ((totalConsumption / tankCapacity) * 100).toFixed(2) + : 0; + + notificationBody += + `Tank Name: ${tankName} \n`+ + `Location: ${tankLocation} \n`+ + `Total Consumption: ${totalConsumption} liters ${consumptionPercentage}% \n`+ + `Type of Water: ${typeOfWater}`; + } + + // Send notification if FCM IDs are present + if (fcmIds && fcmIds.length > 0) { + await sendNotification(fcmIds, "Water Consumption Report", notificationBody); + } } - // calculate the time difference between the latest and previous documents - const latestDate = new Date(latestTanks[0].date); - const previousDate = new Date(latestTanks[1].date); - const latestTime = latestTanks[0].time.split('.')[0]; // remove milliseconds - const previousTime = latestTanks[1].time.split('.')[0]; // remove milliseconds - latestDate.setHours(parseInt(latestTime.substring(0, 2)), parseInt(latestTime.substring(3, 5)), parseInt(latestTime.substring(6, 8))); - previousDate.setHours(parseInt(previousTime.substring(0, 2)), parseInt(previousTime.substring(3, 5)), parseInt(previousTime.substring(6, 8))); - const timeDiff = (latestDate.getTime() - previousDate.getTime()) / 1000; // convert from milliseconds to seconds - console.log(latestDate,previousDate,latestTime,previousTime,timeDiff) - reply.code(200).send({ timeDiff }); - + console.log("Consumption notifications sent successfully."); } catch (err) { - // send an error response - reply.code(500).send({ error: err.message }); + console.error("Error sending consumption notifications:", err); } }; -exports.changesurveystatus = async (req, reply) => { - try { - const customerId = req.params.customerId; - - const result = await User.findOneAndUpdate( - { customerId: customerId }, - { $set: { survey_status: req.body.survey_status } }, - { new: true } - ); +exports.sendUserSetNotifications = async (request, reply) => { + const { customerId, notificationTime, allowNotifications } = request.body; + try { + const user = await User.findOneAndUpdate( + { customerId }, + { notificationTime, allowNotifications }, + { new: true, upsert: true } // Create user if not exists + ); - - reply.code(200).send({ result }); - - } catch (err) { - // send an error response - reply.code(500).send({ error: err.message }); + console.log(`User ${customerId} updated: Notification Time - ${notificationTime}, Allowed - ${allowNotifications}`); + + return reply.send({ success: true, user }); + } catch (error) { + console.error("Error setting notification time:", error); + return reply.status(500).send({ success: false, message: "Internal server error" }); } }; +exports.sendUserSetLowWaterNotificationsSwitch = async (request, reply) => { + const { customerId, lowWaterAlert } = request.body; - -exports.checkStatusofIot = async (req, reply) => { try { - // get a list of unique hardware IDs in the collection - const hardwareIds = await IotData.distinct('hardwareId'); + const user = await User.findOneAndUpdate( + { customerId }, + { lowWaterAlert}, + { new: true, upsert: true } // Create user if not exists + ); - // create an empty object to store the time differences for each hardware ID - const timeDiffs = {}; + console.log(`User ${customerId} updated: Allowed - ${lowWaterAlert}`); - // loop over each hardware ID and calculate the time difference between the latest two records - for (const hardwareId of hardwareIds) { - // get the latest two records for the current hardware ID - const latestTanks = await IotData.find({ hardwareId }).sort({ date: -1, time: -1 }).limit(2); + return reply.send({ success: true, user }); + } catch (error) { + console.error("Error setting notification time:", error); + return reply.status(500).send({ success: false, message: "Internal server error" }); + } +}; - // if the number of records for the current hardware ID is less than two, skip to the next ID - if (latestTanks.length < 2) { - continue; - } +exports.sendUserSetCriticallyLowWaterNotificationsSwitch = async (request, reply) => { + const { customerId, criticalLowWaterAlert } = request.body; - // calculate the time difference between the latest and previous records for the current hardware ID - const latestDate = new Date(latestTanks[0].date); - const previousDate = new Date(latestTanks[1].date); - const latestTime = latestTanks[0].time.split('.')[0]; // remove milliseconds - const previousTime = latestTanks[1].time.split('.')[0]; // remove milliseconds - latestDate.setHours(parseInt(latestTime.substring(0, 2)), parseInt(latestTime.substring(3, 5)), parseInt(latestTime.substring(6, 8))); - previousDate.setHours(parseInt(previousTime.substring(0, 2)), parseInt(previousTime.substring(3, 5)), parseInt(previousTime.substring(6, 8))); - const timeDiff = (latestDate.getTime() - previousDate.getTime()) / 1000; // convert from milliseconds to seconds + try { + const user = await User.findOneAndUpdate( + { customerId }, + { criticalLowWaterAlert}, + { new: true, upsert: true } // Create user if not exists + ); - // store the time difference for the current hardware ID - timeDiffs[hardwareId] = timeDiff; - } + console.log(`User ${customerId} updated: Allowed - ${criticalLowWaterAlert}`); - // send the time differences for all hardware IDs - reply.code(200).send({ timeDiffs }); - - } catch (err) { - // send an error response - reply.code(500).send({ error: err.message }); + return reply.send({ success: true, user }); + } catch (error) { + console.error("Error setting notification time:", error); + return reply.status(500).send({ success: false, message: "Internal server error" }); } }; -exports.totalwaterLevelSum = async (request, reply) => { - const { tankLocation, typeOfWater } = request.query; +exports.sendUserManualStartAndStop = async (request, reply) => { + const { customerId, manualStartAndStopNotify } = request.body; - const waterlevelSum = await Tank.aggregate([ - { - $match: { tankLocation, typeOfWater } - }, - { - $group: { - _id: null, - totalWaterlevel: { $sum: { $toInt: '$waterlevel' } } - } - } - ]); + try { + const user = await User.findOneAndUpdate( + { customerId }, + { manualStartAndStopNotify}, + { new: true, upsert: true } // Create user if not exists + ); - const result = waterlevelSum[0]?totalWaterlevel : 0; + console.log(`User ${customerId} updated: Allowed - ${manualStartAndStopNotify}`); - reply.send({ waterlevelSum: result }); -} + return reply.send({ success: true, user }); + } catch (error) { + console.error("Error setting notification time:", error); + return reply.status(500).send({ success: false, message: "Internal server error" }); + } +}; +exports.sendUserAutomaticStartAndStop = async (request, reply) => { + const { customerId, automaticStartAndStopNotify } = request.body; -exports.startUpdateLoop = async (request, reply) => { - const updateInterval = 5000; + try { + const user = await User.findOneAndUpdate( + { customerId }, + { automaticStartAndStopNotify}, + { new: true, upsert: true } // Create user if not exists + ); - setInterval(async () => { - try { - const iotTank = await IotData.findOne({ hardwareId: request.body.hardwareId }); - if (!iotTank) { - console.log(`IOTtank not found for hardwareId ${request.body.hardwareId}`); - return; - } - - const currentWaterlevel = Number(iotTank.tankHeight) * 200; - const tank = await Tank.findOne({ hardwareId: iotTank.hardwareId }); - - let combinedWaterlevel; - if (tank) { - combinedWaterlevel = currentWaterlevel + Number(tank.waterlevel); - } else { - combinedWaterlevel = currentWaterlevel; - } - - await Tank.updateOne({ hardwareId: iotTank.hardwareId }, { $set: { waterlevel: combinedWaterlevel } }); - - console.log(`Waterlevel updated successfully for hardwareId ${iotTank.hardwareId}`); - console.log(`Previous waterlevel: ${tank ? tank.waterlevel : 0}`); - console.log(`Current waterlevel: ${currentWaterlevel}`); - console.log(`Combined waterlevel: ${combinedWaterlevel}`); - } catch (err) { - console.error(err); - } - }, updateInterval); -}; + console.log(`User ${customerId} updated: Allowed - ${automaticStartAndStopNotify}`); + return reply.send({ success: true, user }); + } catch (error) { + console.error("Error setting notification time:", error); + return reply.status(500).send({ success: false, message: "Internal server error" }); + } +}; -// exports.updatewaterlevelsatmidnight = async (req, reply) => { +// const calculateWaterLevelAndNotify = async () => { // try { -// // Schedule the task to run every day at 10 seconds past the minute -// cron.schedule('0 0 * * *', async () => { -// try { -// const tanks = await Tank.find({ customerId: req.query.customerId }); -// for (const tank of tanks) { -// tank.waterlevel_at_midnight = tank.waterlevel; -// console.log(tank.waterlevel_at_midnight) -// await tank.save(); -// } -// console.log('Waterlevel noted in waterlevel_at_midnight'); -// } catch (error) { -// console.error('Error occurred:', error); +// const now = moment(); +// const sixHoursAgo = moment().subtract(6, "hours"); + +// console.log(`Calculating water level between ${sixHoursAgo.format("HH:mm A")} and ${now.format("HH:mm A")}`); + +// const tanks = await Tank.find({}); + +// for (const tank of tanks) { +// const { +// customerId, +// tankName, +// tankLocation, +// typeOfWater, +// capacity, +// waterlevel, +// waterlevel_at_midnight, +// } = tank; + +// // ✅ Fix: Remove commas before parsing numbers +// const tankCapacity = parseFloat(capacity.replace(/,/g, '')) || 0; +// const currentWaterLevel = parseFloat(waterlevel.replace(/,/g, '')) || 0; +// const midnightWaterLevel = parseFloat(waterlevel_at_midnight.replace(/,/g, '')) || 0; + +// if (tankCapacity === 0) { +// console.log(`Skipping tank ${tankName} due to zero capacity`); +// continue; // } -// }); -// await Tank.find({ customerId: req.query.customerId }) -// .exec() -// .then((docs) => { -// reply.send({ status_code: 200, data: docs, count: docs.length }); -// }) -// .catch((err) => { -// console.log(err); -// reply.send({ error: err }); -// }); -// } catch (err) { -// throw boom.boomify(err); -// } -// }; +// const currentWaterLevelPercentage = ((currentWaterLevel / tankCapacity) * 100).toFixed(2); +// const waterUsedSinceMidnight = midnightWaterLevel - currentWaterLevel; +// const waterUsedPercentageSinceMidnight = ((waterUsedSinceMidnight / tankCapacity) * 100).toFixed(2); +// const user = await User.findOne({ customerId }); +// if (!user || !user.fcmIds || user.fcmIds.length === 0) { +// console.log(`No FCM tokens for customer: ${customerId}`); +// continue; +// } -const updatewaterlevelsatmidnight = async () => { - console.log('Cron job triggered at:', moment().tz('Asia/Kolkata').format()); +// let notificationBody = +// `🛢️ Tank Name: ${tankName}\n` + +// `🏢 Location: ${tankLocation}\n` + +// `💧 Type of Water: ${typeOfWater}\n` + +// `Current Water Level: ${currentWaterLevel} liters (${currentWaterLevelPercentage}%)\n`; - try { - const tanks = await Tank.find({}); - for (const tank of tanks) { - tank.waterlevel_at_midnight = tank.waterlevel; - tank.total_water_added_from_midnight = "0"; - await tank.save(); - console.log(`Updated tank ${tank._id} waterlevel_at_midnight to ${tank.waterlevel}`); - } - console.log('Waterlevel noted in waterlevel_at_midnight'); - } catch (error) { - console.error('Error occurred:', error); - } -}; +// await sendNotification(user.fcmIds, "Water Level Update", notificationBody); +// } -// Schedule the task to run every day at 13:49 IST (1:49 PM IST) -cron.schedule('0 0 * * *', updatewaterlevelsatmidnight, { - timezone: "Asia/Kolkata" -}); +// console.log("Water level notifications sent successfully."); +// } catch (err) { +// console.error("Error in water level calculation:", err); +// } +// }; +const calculateWaterLevelAndNotify = async () => { + try { + const now = moment(); + const currentTime = now.format("HH:mm"); // Current time in HH:mm format -let consumptionTask; + console.log(`Current time: ${currentTime}`); -// Function to clear the specific scheduled task -const clearConsumptionSchedule = () => { - if (consumptionTask) { - consumptionTask.stop(); // Stop the existing task if it exists - consumptionTask = null; // Clear the reference - } -}; + // Get all users who have allowed notifications and have set a notification time + const users = await User.find({ allowNotifications: true, notificationTime: currentTime }); -// Function to update total consumption till midnight -const updatetotalConsumptiontillmidnight = async () => { - console.log('Cron job triggered at:', moment().tz('Asia/Kolkata').format()); + if (users.length === 0) { + console.log("No users to notify at this time."); + return; + } - try { - const tanks = await Tank.find({}); - for (const tank of tanks) { - const waterlevel_at_midnight = parseInt((tank.waterlevel_at_midnight).replace(/,/g, ''), 10); - const total_water_added_from_midnight = parseInt((tank.total_water_added_from_midnight).replace(/,/g, ''), 10); - const waterlevel = parseInt((tank.waterlevel).replace(/,/g, ''), 10); - console.log(waterlevel_at_midnight,total_water_added_from_midnight,waterlevel) - const totalconsumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel; - console.log(totalconsumption,tank.tankName) + for (const user of users) { + const { customerId, fcmIds } = user; - // Format the date in the desired format - const formattedDate = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm'); + if (!Array.isArray(fcmIds) || fcmIds.length === 0) { + console.log(`No valid FCM tokens for customer ID: ${customerId}`); + continue; + } - // Check if the record already exists - const existingRecord = await TankConsumptionOriginalSchema.findOne({ - customerId: tank.customerId, - tankName: tank.tankName, - tankLocation: tank.tankLocation, - time: formattedDate - }); + // Get tanks associated with the user + const tanks = await Tank.find({ customerId }); - if (!existingRecord) { - // Create and save the new document if it doesn't exist - const newTankConsumption = new TankConsumptionOriginalSchema({ - customerId: tank.customerId, - tankName: tank.tankName, - tankLocation: tank.tankLocation, - consumption: totalconsumption.toString(), - time: formattedDate, // Save the formatted date - block:tank.blockName, - typeofwater:tank.typeOfWater - }); + for (const tank of tanks) { + const { + tankName, + tankLocation, + typeOfWater, + capacity, + waterlevel, + waterlevel_at_midnight, + } = tank; + + // Remove commas before parsing numbers + const tankCapacity = parseFloat(capacity.replace(/,/g, '')) || 0; + const currentWaterLevel = parseFloat(waterlevel.replace(/,/g, '')) || 0; + const midnightWaterLevel = parseFloat(waterlevel_at_midnight.replace(/,/g, '')) || 0; + + if (tankCapacity === 0) { + console.log(`Skipping tank ${tankName} due to zero capacity`); + continue; + } - await newTankConsumption.save(); - console.log(`Created new record for tank ${tank.tankName} at ${formattedDate}`); - } else { - console.log(`Record already exists for tank ${tank.tankName} at ${formattedDate}`); + const currentWaterLevelPercentage = ((currentWaterLevel / tankCapacity) * 100).toFixed(2); + const waterUsedSinceMidnight = midnightWaterLevel - currentWaterLevel; + const waterUsedPercentageSinceMidnight = ((waterUsedSinceMidnight / tankCapacity) * 100).toFixed(2); + + let notificationBody = + `🛢️ Tank Name: ${tankName}\n` + + `🏢 Location: ${tankLocation}\n` + + `💧 Type of Water: ${typeOfWater}\n` + + `Current Water Level: ${currentWaterLevel} liters (${currentWaterLevelPercentage}%)\n`; + + await sendNotification(customerId, fcmIds, "Water Level Update", notificationBody); + console.log("Notification sent for tank:", tankName); } } - console.log('Waterlevel noted in waterlevel_at_midnight'); - } catch (error) { - console.error('Error occurred:', error); + + console.log("Water level notifications processed."); + } catch (err) { + console.error("Error in water level calculation:", err); } }; +// const calculateLowWaterLevelAndNotify = async () => { +// try { +// const now = moment(); +// const currentTime = now.format("HH:mm"); // Current time in HH:mm format -// Clear the existing schedule for this task before creating a new one -clearConsumptionSchedule(); +// console.log(`Current time: ${currentTime}`); -// Schedule the task to run every day at 12:49 PM IST and store the reference -consumptionTask = cron.schedule('50 23 * * *', updatetotalConsumptiontillmidnight, { - timezone: "Asia/Kolkata" -}); +// // Get all users who have allowed notifications and have set a notification time +// const users = await User.find({ lowWaterAlert: true }); -console.log('Scheduled task to update total consumption till midnight.'); +// if (users.length === 0) { +// console.log("No users to notify at this time."); +// return; +// } + +// for (const user of users) { +// const { customerId, fcmIds } = user; + +// if (!Array.isArray(fcmIds) || fcmIds.length === 0) { +// console.log(`No valid FCM tokens for customer ID: ${customerId}`); +// continue; +// } +// // Get tanks associated with the user +// const tanks = await Tank.find({ customerId }); + +// for (const tank of tanks) { +// const { +// tankName, +// tankLocation, +// typeOfWater, +// capacity, +// waterlevel, +// waterlevel_at_midnight, +// } = tank; + +// // Remove commas before parsing numbers +// const tankCapacity = parseFloat(capacity.replace(/,/g, '')) || 0; +// const currentWaterLevel = parseFloat(waterlevel.replace(/,/g, '')) || 0; + +// if (tankCapacity === 0) { +// console.log(`Skipping tank ${tankName} due to zero capacity`); +// continue; +// } +// const currentWaterLevelPercentage = ((currentWaterLevel / tankCapacity) * 100).toFixed(2); +// // Send notification only if water level is below 15% +// if (currentWaterLevelPercentage < 15) { +// let notificationBody = +// `🛢️ Tank Name: ${tankName}\n` + +// `🏢 Location: ${tankLocation}\n` + +// `💧 Type of Water: ${typeOfWater}\n` + +// `Current Water Level: ${currentWaterLevel} liters (${currentWaterLevelPercentage}%)\n`; -// const updatetotalConsumptiontillmidnight = async () => { -// console.log('Cron job triggered at:', moment().tz('Asia/Kolkata').format()); +// await sendNotification(customerId, fcmIds, "Low Water Level Alert", notificationBody); +// console.log("Notification sent for tank:", tankName); +// } else { +// console.log(`Skipping notification for tank ${tankName}, water level is above 15%`); +// } +// } +// } +// console.log("Water level notifications processed."); +// } catch (err) { +// console.error("Error in water level calculation:", err); +// } +// }; +// const calculateCriticalLowWaterLevelAndNotify = async () => { // try { -// const tanks = await Tank.find({}); -// for (const tank of tanks) { -// const waterlevel_at_midnight = parseInt((tank.waterlevel_at_midnight).replace(/,/g, ''), 10); -// const total_water_added_from_midnight = parseInt((tank.total_water_added_from_midnight).replace(/,/g, ''), 10); -// const waterlevel = parseInt((tank.waterlevel).replace(/,/g, ''), 10); -// const totalconsumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel; +// const now = moment(); +// const currentTime = now.format("HH:mm"); // Current time in HH:mm format -// // Format the date in the desired format -// const formattedDate = moment().tz('Asia/Kolkata').format('DD-MMM-YYYY - HH:mm'); +// console.log(`Current time: ${currentTime}`); -// const newTankConsumption = new TankConsumptionOriginalSchema({ -// customerId: tank.customerId, -// tankName: tank.tankName, -// tankLocation: tank.tankLocation, -// consumption: totalconsumption.toString(), -// time: formattedDate // Save the formatted date -// }); +// // Get all users who have allowed critical low water notifications +// const users = await User.find({ criticalLowWaterAlert: true }); -// // Save the new document -// await newTankConsumption.save(); - +// if (users.length === 0) { +// console.log("No users to notify at this time."); +// return; // } -// console.log('Waterlevel noted in waterlevel_at_midnight'); -// } catch (error) { -// console.error('Error occurred:', error); + +// for (const user of users) { +// const { customerId, fcmIds } = user; + +// if (!Array.isArray(fcmIds) || fcmIds.length === 0) { +// console.log(`No valid FCM tokens for customer ID: ${customerId}`); +// continue; +// } + +// // Get tanks associated with the user +// const tanks = await Tank.find({ customerId }); + +// for (const tank of tanks) { +// const { +// tankName, +// tankLocation, +// typeOfWater, +// capacity, +// waterlevel, +// } = tank; + +// // Remove commas before parsing numbers +// const tankCapacity = parseFloat(capacity.replace(/,/g, '')) || 0; +// const currentWaterLevel = parseFloat(waterlevel.replace(/,/g, '')) || 0; + +// if (tankCapacity === 0) { +// console.log(`Skipping tank ${tankName} due to zero capacity`); +// continue; +// } + +// const currentWaterLevelPercentage = ((currentWaterLevel / tankCapacity) * 100).toFixed(2); + +// // Send notification only if water level is below 10% +// if (currentWaterLevelPercentage < 10) { +// let notificationBody = +// `🚨 *Critical Low Water Alert!*\n\n` + +// `🛢️ *Tank Name:* ${tankName}\n` + +// `🏢 *Location:* ${tankLocation}\n` + +// `💧 *Type of Water:* ${typeOfWater}\n` + +// `🔴 *Current Water Level:* ${currentWaterLevel} liters (${currentWaterLevelPercentage}%)\n\n` + +// `⚠️ Immediate action is recommended to avoid water shortage.`; + +// await sendNotification(customerId, fcmIds, "Critical Low Water Level Alert", notificationBody); +// console.log(`Critical low water level notification sent for tank: ${tankName}`); +// } else { +// console.log(`Skipping tank ${tankName}, water level is above 10%`); +// } +// } +// } + +// console.log("Critical low water level notifications processed."); +// } catch (err) { +// console.error("Error in critical water level calculation:", err); // } // }; -// Schedule the task to run every day at 23:55 IST (11:55 PM IST) -// cron.schedule('55 23 * * *', updatetotalConsumptiontillmidnight, { -// timezone: "Asia/Kolkata" + + +// Run the function every minute to check if any user needs a notification +// cron.schedule('* * * * *', async () => { +// console.log("Checking for user notification times..."); +// await calculateWaterLevelAndNotify(); +// }, { +// timezone: "Asia/Kolkata", // }); +//run the every one hour +// cron.schedule('0 */3 * * *', async () => { +// console.log("Checking for user notification times..."); +// await calculateLowWaterLevelAndNotify(); +// await calculateCriticalLowWaterLevelAndNotify(); +// }, { +// timezone: "Asia/Kolkata", +// }); +// Schedule notifications at 6 AM, 12 PM, 6 PM, and 12 AM +// cron.schedule( +// "0 6,12,18,0 * * *", // Cron expression for the required times +// async () => { +// console.log("Starting scheduled consumption notification task..."); +// //await calculateConsumptionAndNotify(); +// await calculateWaterLevelAndNotify(); +// }, +// { +// timezone: "Asia/Kolkata", // Specify the timezone +// } +// ); + +// Schedule a function to run every minute +// cron.schedule('* * * * *', async () => { +// console.log("Checking for user notification times..."); +// await calculateWaterLevelAndNotify(); +// }, { +// timezone: "Asia/Kolkata", // Specify the timezone +// }); +// const updateStopTimeFormat = async () => { +// try { +// // Find records where stopTime is null or not in the required format +// const motorDataDocs = await MotorData.find(); + +// for (const doc of motorDataDocs) { +// // Parse and validate startTime +// const startTime = moment(doc.startTime, "DD-MMM-YYYY - HH:mm", true); +// if (!startTime.isValid()) { +// console.log(`Invalid startTime for record ID: ${doc._id}`); +// continue; +// } -exports.deletemotordatarecordsbefore7days = async (req, reply) => { - try { - // Schedule the task to run every day at 10 seconds past the minute - cron.schedule('0 0 * * *', async () => { - try { - // Run the deletion task once a day - setInterval(async () => { - await deleteOldRecords(); - }, 24 * 60 * 60 * 1000); // 24 hours in milliseconds +// // Format startTime if it's not already formatted +// const formattedStartTime = startTime.format("DD-MMM-YYYY - HH:mm"); - } catch (error) { - console.error('Error occurred:', error); - } - }); +// // Check if stopTime is valid or calculate it +// let formattedStopTime = null; +// const stopTime = moment(doc.stopTime, "DD-MMM-YYYY - HH:mm", true); - - } catch (err) { - throw boom.boomify(err); - } -}; +// if (!stopTime.isValid()) { +// // Calculate stopTime by adding 30 minutes to startTime +// formattedStopTime = startTime.clone().add(30, "minutes").format("DD-MMM-YYYY - HH:mm"); +// } else { +// // Format the existing stopTime +// formattedStopTime = stopTime.format("DD-MMM-YYYY - HH:mm"); +// } + +// // Update the document if startTime or stopTime is not correctly formatted +// if (doc.startTime !== formattedStartTime || doc.stopTime !== formattedStopTime) { +// await MotorData.updateOne( +// { _id: doc._id }, +// { +// $set: { +// startTime: formattedStartTime, +// stopTime: formattedStopTime, +// }, +// } +// ); +// console.log(`Updated record ID: ${doc._id}`); +// } +// } + +// console.log("StopTime format update completed."); +// } catch (err) { +// console.error("Error updating stopTime format:", err); +// } +// }; + +// // Call the function to update stopTime +// updateStopTimeFormat(); -exports.motorstatus = async (req, reply) => { +exports.updatetankstatus = async (req, reply) => { try { - const motor_id = req.params.motor_id; - console.log(motor_id) - - const motorInfo = await Tank.findOne({ motor_id: motor_id }); + const { customerId } = req.params; + const { tankName, tankLocation, status } = req.body; - console.log(motorInfo) - + if (!["active", "inactive"].includes(status)) { + return reply.code(400).send({ message: "Invalid status value" }); + } - //return update; - - reply.send({ status_code: 200,status:motorInfo.motor_status}); + // Find the main tank + const mainTank = await Tank.findOneAndUpdate( + { customerId, tankName, tankLocation }, + { $set: { status } }, + { new: true } + ); + if (!mainTank) { + return reply.code(404).send({ message: "Tank not found" }); + } + // Update status in related outputConnections tanks + await Tank.updateMany( + { + customerId, + "connections.outputConnections.outputConnections": tankName, + }, + { $set: { "connections.outputConnections.$.status": status } } + ); - } - catch (err) { - throw boom.boomify(err); + // Update status in related inputConnections tanks + await Tank.updateMany( + { + customerId, + "connections.inputConnections.inputConnections": tankName, + }, + { $set: { "connections.inputConnections.$.status": status } } + ); + + return reply.send({ message: "Tank status updated successfully" }); + } catch (error) { + console.error("Error updating tank status:", error); + return reply.code(500).send({ message: "Internal Server Error" }); } }; -exports.readMotorStatus = async (req, reply) => { - try { - const motor_id = req.query.motor_id; - console.log("entered read api for iot") - - // Perform any necessary logic based on action (1: Start, 2: Stop) - // For example, you can update a database or trigger an action - const tanks = await Tank.find({}); - let motor_stop_status = null; +exports.listofactiveandinactivetankstatus = async (req, reply) => { + try { + const { customerId } = req.params; + const status = req.query.status.toLowerCase(); - for (let tank of tanks) { - const inputConnections = tank.connections.inputConnections; - const motorConnection = inputConnections.find(conn => conn.motor_id === motor_id); - if (motorConnection) { - // Check if motor_on_type is "forced_manual" and motor_stop_status is "1" - if (motorConnection.motor_on_type === "forced_manual" && motorConnection.motor_stop_status === "1") { - motor_stop_status = "2"; // Send motor_stop_status as "2" - } else { - motor_stop_status = motorConnection.motor_stop_status; // Otherwise, assign its original value - } - - break; - } - + if (!["active", "inactive"].includes(status)) { + return reply.code(400).send({ message: "Invalid status value" }); } - if (!motor_stop_status) { - return reply.status(404).send({ - status_code: 404, - message: 'Motor not found for the specified motor_id' - }); - } + // Find tanks based on customerId and status + const tanks = await Tank.find({ customerId, status }); - reply.send({ - status_code: 200, - motor_stop_status: motor_stop_status - }); - } catch (err) { - throw boom.boomify(err); + return reply.send({ tanks }); + } catch (error) { + console.error("Error fetching tank list:", error); + return reply.code(500).send({ message: "Internal Server Error" }); } }; -exports.readMotorStatusFromIot = async (req, reply) => { +exports.notificationTiming = async (req, reply) => { + const { customerId, notificationPreference } = req.body; + + if (!["never", "always", "6_hours", "8_hours", "1_month"].includes(notificationPreference)) { + return reply.status(400).send({ message: "Invalid preference" }); + } + + await User.updateOne({ customerId }, { notificationPreference }); + + return reply.send({ message: "Preference updated successfully" }); +} + + + +exports.adjustMeasurement = async (req, reply) => { try { - const motor_id = req.query.motor_id; - console.log(motor_id) + const { tankName, measuredHeight } = req.body; - // Find the tank that contains the specified motor_id in its inputConnections - const tank = await Tank.findOne({ "connections.inputConnections.motor_id": motor_id }); + if (!tankName || measuredHeight === undefined) { + return reply.status(400).send({ message: "Tank name and measured height are required." }); + } - if (!tank) { - return reply.status(404).send({ - status_code: 404, - message: 'Motor not found for the specified motor_id' - }); - } + // Fetch tank details using tankName + const tank = await Tank.findOne({ tankName }); - // Find the inputConnection with the specified motor_id - const inputConnection = tank.connections.inputConnections.find(conn => conn.motor_id === motor_id); + if (!tank) { + return reply.status(404).send({ message: "Tank not found." }); + } - // Extract motor_status and motor_stop_status from the inputConnection - const motor_status = inputConnection.motor_status; - const motor_stop_status = inputConnection.motor_stop_status; + const originalHeight = parseFloat(tank.height); // Example: 5.8 feet + const waterCapacityPerCm = parseFloat(tank.waterCapacityPerCm); // Example: 87 L/cm + const actualWaterLevel = parseFloat(tank.waterlevel); // Current water level in tank (liters) + const capacity = parseFloat(tank.capacity.replace(/,/g, "")); - // Send the response with motor_status and motor_stop_status - reply.send({ - status_code: 200, - motor_status: motor_status, - motor_stop_status: motor_stop_status - }); - } catch (err) { - throw boom.boomify(err); - } -}; + console.log("originalHeight",originalHeight) + console.log("waterCapacityPerCm",waterCapacityPerCm) + console.log("actualWaterLevel",actualWaterLevel) + console.log("capacity",capacity) + // Perform calculations + const heightDifference = originalHeight - measuredHeight; + const heightDifferenceInCm = heightDifference * 30.48; // Convert feet to cm + const calculatedWaterLevel = heightDifferenceInCm * waterCapacityPerCm; // Estimated water level in liters + console.log("heightDifference",heightDifference) + console.log("heightDifferenceInCm",heightDifferenceInCm) + console.log("calculatedWaterLevel",calculatedWaterLevel) -// exports.writeMotorStatus = async (req, reply) => { -// try { -// const motor_id = req.body.motor_id; + // **Ensure actualWaterLevel and calculatedWaterLevel do not exceed tank capacity** + const boundedActualWaterLevel = Math.min(actualWaterLevel, capacity); + const boundedCalculatedWaterLevel = Math.min(calculatedWaterLevel, capacity); -// // Perform any necessary logic to handle motor status update from the device + console.log("boundedActualWaterLevel",boundedActualWaterLevel) + console.log("boundedCalculatedWaterLevel",boundedCalculatedWaterLevel) -// // For example, update a database with the new status, current, and temp values -// const existingRecord = await Tank.findOne({ motor_id: motor_id }); + // Calculate original and calculated percentages correctly + const originalPercentage = (boundedActualWaterLevel / capacity) * 100; + const calculatedPercentage = (boundedCalculatedWaterLevel / capacity) * 100; -// if (existingRecord && (existingRecord.motor_stop_status === '1' || existingRecord.motor_stop_status === '2')) { -// const newMotorStatus = existingRecord.motor_stop_status; + // Calculate percentage difference + const percentageDifference = Math.abs(originalPercentage - calculatedPercentage); + let message; + if (percentageDifference === originalPercentage) { + message = "Tank details and measurement details match."; + } else { + message = "Please check the tank measurement."; + } -// if (existingRecord.motor_status !== newMotorStatus) { -// const result = await Tank.findOneAndUpdate( -// { motor_id: motor_id }, -// { $set: { motor_status: newMotorStatus } }, -// { new: true } // To return the updated document -// ); + reply.send({ + status_code: 200, + data: { + tankName, + originalHeight, + measuredHeight, + heightDifference: heightDifference.toFixed(2), + heightDifferenceInCm: heightDifferenceInCm.toFixed(2), + calculatedWaterLevel: calculatedWaterLevel.toFixed(2), + actualWaterLevel: actualWaterLevel.toFixed(2), + originalPercentage: originalPercentage.toFixed(2) + "%", + calculatedPercentage: calculatedPercentage.toFixed(2) + "%", + percentageDifference: percentageDifference.toFixed(2) + "%", + message + } + }); -// reply.send({ status_code: 200, motor_status: result.motor_status }); -// } else { -// reply.send({ status_code: 200, motor_status: newMotorStatus }); -// } -// } else { -// reply.send({ status_code: 200, message: 'Motor stop status is not "on" or "off".' }); -// } -// } catch (err) { -// throw boom.boomify(err); -// } -// }; + } catch (err) { + reply.status(500).send({ message: err.message }); + } +}; -exports.writeMotorStatus = async (req, reply) => { + +exports.validateTankHeight = async (req, reply) => { try { - const motor_id = req.body.motor_id; - const status = req.body.status; + const { hardwareId, tankhardwareId } = req.params; + + if (!tankhardwareId || !hardwareId) { + return reply.status(400).send({ message: "Both tankhardwareId and hardwareId are required." }); + } + + // Find tank details from tanksSchema + const tank = await Tank.findOne({ tankhardwareId, hardwareId }); + + if (!tank) { + return reply.status(404).send({ message: "Tank not found with the given tankhardwareId and hardwareId." }); + } + + // Find corresponding IoT data from IOttankSchema + const iotTank = await IotData.findOne({ hardwareId, "tanks.tankhardwareId": tankhardwareId }); + + if (!iotTank) { + return reply.status(404).send({ message: "IoT tank data not found for the given hardwareId and tankhardwareId." }); + } - // Find the tank that contains the specified motor_id in its inputConnections - const tank = await Tank.findOne({ "connections.inputConnections.motor_id": motor_id }); + // Convert tank height from feet to cm + const heightInCm = parseFloat(tank.height) * 30.48; + const iotTankHeight = parseFloat(iotTank.tanks.find(t => t.tankhardwareId === tankhardwareId).tankHeight); - if (!tank) { - return reply.status(404).send({ - status_code: 404, - message: 'Motor not found for the specified motor_id' - }); - } + console.log("Converted Tank Height (cm):", heightInCm); + console.log("IoT Tank Height (cm):", iotTankHeight); - // Find the inputConnection with the specified motor_id - const inputConnection = tank.connections.inputConnections.find(conn => conn.motor_id === motor_id); - - // Update the motor_status of the inputConnection - inputConnection.motor_status = status; - + // Check for extreme values + if (heightInCm < iotTankHeight) { + return reply.status(400).send({ + message: "Extreme high values detected! Tank height exceeds IoT data." + }); + } - // Save the updated tank - await tank.save(); + reply.send({ + status_code: 200, + message: "Tank height is within the valid range." + }); - // Send the response with the updated motor_status - reply.send({ - status_code: 200, - motor_status: status - }); } catch (err) { - throw boom.boomify(err); + reply.status(500).send({ message: err.message }); } }; +// exports.getActualWaterLevelInCm = async (req, reply) => { +// try { +// const { tankName } = req.params; -// const motor_id = req.body.motor_id; -// const status = req.body.status; +// if (!tankName) { +// return reply.status(400).send({ message: "Tank name is required." }); +// } -// // Find the tank that contains the specified motor_id in its inputConnections -// const tank = await Tank.findOne({ "connections.inputConnections.motor_id": motor_id }); +// // Fetch tank details using tankName +// const tank = await Tank.findOne({ tankName }); +// console.log("tank",tank) +// if (!tank) { +// return reply.status(404).send({ message: "Tank not found." }); +// } -// if (!tank) { -// return reply.status(404).send({ -// status_code: 404, -// message: 'Motor not found for the specified motor_id' -// }); -// } +// const actualWaterLevel = parseFloat(tank.waterlevel); // Current water level in liters +// const waterCapacityPerCm = parseFloat(tank.waterCapacityPerCm); // Liters per cm -// // Find the inputConnection with the specified motor_id -// const inputConnection = tank.connections.inputConnections.find(conn => conn.motor_id === motor_id); - -// // Update the motor_status of the inputConnection -// inputConnection.motor_status = status; +// if (!actualWaterLevel || !waterCapacityPerCm) { +// return reply.status(400).send({ message: "Tank data is incomplete for conversion." }); +// } -// // Update real_motor_status based on the conditions -// -// // Save the updated tank -// await tank.save(); +// // Convert actual water level from liters to cm +// const actualWaterLevelInCm = actualWaterLevel / waterCapacityPerCm; + +// reply.send({ +// status_code: 200, +// data: { +// tankName, +// actualWaterLevel: actualWaterLevel.toFixed(2) + " L", +// actualWaterLevelInCm: actualWaterLevelInCm.toFixed(2) + " cm" +// } +// }); -// // Send the response with the updated motor_status -// reply.send({ -// status_code: 200, -// motor_status: status -// }); // } catch (err) { -// throw boom.boomify(err); +// reply.status(500).send({ message: err.message }); // } // }; +// exports.getActualWaterLevelInCm = async (req, reply) => { +// try { +// const { tankName } = req.params; +// if (!tankName) { +// return reply.status(400).send({ message: "Tank name is required." }); +// } -exports.changeMotorStatus = async (req, reply) => { - try { - const motor_id = req.body.motor_id; - const action = req.body.action; +// const tank = await Tank.findOne({ tankName }); +// if (!tank) { +// return reply.status(404).send({ message: "Tank not found." }); +// } - // Perform any necessary logic to handle motor status update from the device +// const tankHeightFeet = parseFloat(tank.height); +// const tankHeightCm = tankHeightFeet * 30.48; - // For example, update a database with the new status, current, and temp values +// let actualWaterLevelInCm; +// let actualWaterLevelLiters; - await Tank.updateOne( - { "connections.inputConnections.motor_id": motor_id }, - { - $set: { - "connections.inputConnections.$.motor_stop_status":action , - - } - } - ); +// if (parseFloat(tank.waterlevel) > 0) { +// // ✅ Direct conversion from liters to cm +// const waterlevelLiters = parseFloat(tank.waterlevel); +// const capacityPerCm = parseFloat(tank.waterCapacityPerCm); - // Send immediat +// if (!capacityPerCm || capacityPerCm <= 0) { +// return reply.status(400).send({ message: "Invalid waterCapacityPerCm value." }); +// } - // Fetch the motor_status for the given motor_id - +// actualWaterLevelInCm = waterlevelLiters / capacityPerCm; +// actualWaterLevelLiters = waterlevelLiters; - // Send the response with motor_stop_status and motor_status - reply.send({ - status_code: 200, - motor_stop_status: action, - // motor_status: updatedMotor.motor_status // Assuming motor_status is a field in your Tank model - }); +// } else { +// // ✅ Fallback to IoT data to calculate cm & liters +// const iotData = await IotData.findOne({ +// hardwareId: tank.hardwareId, +// "tanks.tankhardwareId": tank.tankhardwareId +// }).sort({ date: -1 }); +// console.log("iotData",iotData) +// if (!iotData) { +// return reply.status(404).send({ message: "No IoT data found for this tank." }); +// } - - } catch (err) { - throw boom.boomify(err); - } -}; +// const matchingTank = iotData.tanks.find( +// t => t.tankhardwareId === tank.tankhardwareId +// ); +// if (!matchingTank) { +// return reply.status(404).send({ message: "No matching tank found in IoT data." }); +// } +// const tankHeightFromSensor = parseFloat(matchingTank.tankHeight); +// console.log("tankHeightFromSensor",tankHeightFromSensor) +// if (isNaN(tankHeightFromSensor)) { +// return reply.status(400).send({ message: "Invalid tankHeight from IoT data." }); +// } -exports.motortemperature = async (req, reply) => { +// actualWaterLevelInCm = tankHeightCm - tankHeightFromSensor; - try { - const motor_id = req.params.motor_id; - console.log(motor_id) - - const motorInfo = await Tank.findOne({ motor_id: motor_id }); +// const capacityPerCm = parseFloat(tank.waterCapacityPerCm); +// if (!capacityPerCm || capacityPerCm <= 0) { +// return reply.status(400).send({ message: "Invalid waterCapacityPerCm value." }); +// } - console.log(motorInfo) - +// // ✅ Convert back to liters using height in cm +// actualWaterLevelLiters = actualWaterLevelInCm * capacityPerCm; +// } - //return update; - - reply.send({ status_code: 200,temperature:motorInfo.motor_temperfature}); +// reply.send({ +// status_code: 200, +// data: { +// tankName: tank.tankName, +// tankHeight:tankHeightCm, +// actualWaterLevel: `${actualWaterLevelLiters.toFixed(2)} L`, +// actualWaterLevelInCm: `${actualWaterLevelInCm.toFixed(2)} cm` +// } +// }); +// } catch (err) { +// console.error(err); +// reply.status(500).send({ message: err.message || "Internal Server Error" }); +// } +// }; +// exports.getActualWaterLevelInCm = async (req, reply) => { +// try { +// const { tankName } = req.params; - } - catch (err) { - throw boom.boomify(err); - } -}; +// if (!tankName) { +// return reply.status(400).send({ message: "Tank name is required." }); +// } -exports.update_auto_mode = async (req, reply) => { - try { - const customerId = req.params.customerId; - const { motor_id, auto_mode } = req.body; +// const tank = await Tank.findOne({ tankName }); +// if (!tank) { +// return reply.status(404).send({ message: "Tank not found." }); +// } - // Update inputConnections' auto_mode - await Tank.updateOne( - { customerId: customerId, "connections.inputConnections.motor_id": motor_id }, - { $set: { "connections.inputConnections.$.auto_mode": auto_mode } } - ); +// const tankHeightFeet = parseFloat(tank.height); +// const tankHeightCm = tankHeightFeet * 30.48; - +// const capacityPerCm = parseFloat(tank.waterCapacityPerCm); +// console.log("capacityPerCm",capacityPerCm) +// if (!capacityPerCm || capacityPerCm <= 0) { +// return reply.status(400).send({ message: "Invalid waterCapacityPerCm value." }); +// } - reply.send({ status_code: 200, message: "Auto mode and percentages updated successfully." }); - } catch (error) { - throw boom.boomify(error); - } -}; +// let sensorWaterLevelInCm = 0; +// let sensorWaterLevelLiters = 0; -exports.update_auto_percentage = async (req, reply) => { - try { - const customerId = req.params.customerId; - const { tankName,tankLocation, auto_min_percentage, auto_max_percentage } = req.body; +// // By default, try to use tank.waterlevel +// let waterlevelLiters = parseFloat(tank.waterlevel); - // Update inputConnections' auto_mode - +// if (waterlevelLiters > 0) { +// // Use manual waterlevel +// sensorWaterLevelInCm = waterlevelLiters / capacityPerCm; +// sensorWaterLevelLiters = waterlevelLiters; +// // } else { +// // Fallback to IoT data +// const iotData = await IotData.findOne({ +// hardwareId: tank.hardwareId, +// "tanks.tankhardwareId": tank.tankhardwareId +// }).sort({ date: -1 }); - // Update auto_min_percentage and auto_max_percentage - await Tank.updateOne( - { customerId: customerId,tankLocation, tankName}, - { - $set: { - "auto_min_percentage": auto_min_percentage, - "auto_max_percentage": auto_max_percentage - } - } - ); +// console.log("iotData", iotData); - reply.send({ status_code: 200, message: "Auto mode and percentages updated successfully." }); - } catch (error) { - throw boom.boomify(error); - } -}; +// if (!iotData) { +// return reply.status(404).send({ message: "No IoT data found for this tank." }); +// } +// const matchingTank = iotData.tanks.find( +// t => t.tankhardwareId === tank.tankhardwareId +// ); -//storing water level for every 15 minutes +// console.log("matchingTank", matchingTank); -const getFormattedISTTime = () => { - return moment().tz('Asia/Kolkata').format('DD-MM-YYYY hh:mm:ss A'); -}; -const storeWaterLevels = async () => { - try { - const tanks = await Tank.find({}); - const currentTime = getFormattedISTTime(); - - const waterLevelRecords = tanks.map(tank => ({ - customerId: tank.customerId, - tankName: tank.tankName, - tankLocation: tank.tankLocation, - waterlevel: tank.waterlevel, - time: currentTime - })); - - await TankWaterLevel.insertMany(waterLevelRecords); - console.log('Water levels stored successfully'); - } catch (error) { - console.error('Error storing water levels:', error); - } -}; +// if (!matchingTank) { +// return reply.status(404).send({ message: "No matching tank found in IoT data." }); +// } -setInterval(storeWaterLevels, 15 * 60 * 1000); +// const tankHeightFromSensor = parseFloat(matchingTank.tankHeight); +// console.log("tankHeightFromSensor", tankHeightFromSensor); +// if (isNaN(tankHeightFromSensor)) { +// return reply.status(400).send({ message: "Invalid tankHeight from IoT data." }); +// } +// // sensorWaterLevelInCm = full tank height in cm - distance from sensor +// sensorWaterLevelInCm = tankHeightCm - tankHeightFromSensor; +// console.log("sensorWaterLevelInCm",sensorWaterLevelInCm) +// sensorWaterLevelLiters = sensorWaterLevelInCm * capacityPerCm; +// console.log("sensorWaterLevelLiters",sensorWaterLevelLiters) +// } -console.log('Cron job scheduled to update water levels at midnight'); +// reply.send({ +// status_code: 200, +// data: { +// tankName: tank.tankName, +// tankHeight: tankHeightCm, +// TankWaterLevel: tank.waterlevel, +// capacity: tank.capacity, +// sensorWaterLevelInCm: `${sensorWaterLevelInCm.toFixed(2)} cm`, +// sensorWaterLevel: `${sensorWaterLevelLiters.toFixed(2)} L`, +// } +// }); -exports.getBlockData = async (req, reply) => { - try { - const customerId = req.params.customerId; +// } catch (err) { +// console.error(err); +// reply.status(500).send({ message: err.message || "Internal Server Error" }); +// } +// }; - // Get all tank documents for the current customerId - const tanks = await Tank.find({ customerId }); +// exports.getActualWaterLevelInCm = async (req, reply) => { +// try { +// const { tankName } = req.params; - // Extract the blockName from each tank - const blockNames = tanks.map(tank => tank.blockName); +// if (!tankName) { +// return reply.status(400).send({ message: "Tank name is required." }); +// } - // Remove duplicates by converting the array to a Set and then back to an array - const uniqueBlockNames = [...new Set(blockNames)]; +// const tank = await Tank.findOne({ tankName }); +// if (!tank) { +// return reply.status(404).send({ message: "Tank not found." }); +// } - // Add "all" and "nduku sneha antha kopam" to the block names - uniqueBlockNames.unshift("All"); +// const tankHeightFeet = parseFloat(tank.height); +// const tankHeightCm = tankHeightFeet * 30.48; - // Send the unique blockNames in the response - reply.code(200).send({ blockNames: uniqueBlockNames }); +// const capacityPerCm = parseFloat(tank.waterCapacityPerCm); +// console.log("capacityPerCm", capacityPerCm); +// if (!capacityPerCm || capacityPerCm <= 0) { +// return reply.status(400).send({ message: "Invalid waterCapacityPerCm value." }); +// } - } catch (err) { - // Log the error for debugging purposes - console.error(err); +// let sensorWaterLevelInCm = 0; +// let sensorWaterLevelLiters = 0; - // Send an error response - reply.code(500).send({ error: err.message }); - } -}; +// let manualWaterLevelLiters = parseFloat(tank.waterlevel) || 0; +// let manualWaterLevelInCm = manualWaterLevelLiters > 0 +// ? manualWaterLevelLiters / capacityPerCm +// : 0; +// // Always try to also get IoT data +// const iotData = await IotData.findOne({ +// hardwareId: tank.hardwareId, +// "tanks.tankhardwareId": tank.tankhardwareId +// }).sort({ date: -1 }); +// console.log("iotData", iotData); +// if (iotData) { +// const matchingTank = iotData.tanks.find( +// t => t.tankhardwareId === tank.tankhardwareId +// ); +// console.log("matchingTank", matchingTank); -const mqtt = require('mqtt'); -const client = mqtt.connect('mqtt://35.207.198.4:1883'); // Connect to MQTT broker +// if (matchingTank) { +// const tankHeightFromSensor = parseFloat(matchingTank.tankHeight); +// console.log("tankHeightFromSensor", tankHeightFromSensor); -client.on('connect', () => { - console.log('Connected to MQTT broker'); - client.subscribe('water/iot-data', (err) => { - if (err) { - console.error('Error subscribing to topic:', err); - } else { - console.log('Subscribed to water/iot-data topic'); - } - }); -}); +// if (!isNaN(tankHeightFromSensor)) { +// sensorWaterLevelInCm = tankHeightCm - tankHeightFromSensor; +// sensorWaterLevelLiters = sensorWaterLevelInCm * capacityPerCm; +// } +// } +// } -// Handling incoming MQTT messages -client.on('message', async (topic, message) => { - console.log(`Message received on topic ${topic}:`, message.toString()); +// reply.send({ +// status_code: 200, +// data: { +// tankName: tank.tankName, +// tankHeight: tankHeightCm, +// capacity: tank.capacity, +// currentWaterLevel: `${manualWaterLevelLiters.toFixed(2)} L`, +// currentWaterLevelInCm: `${manualWaterLevelInCm.toFixed(2)} cm`, +// sensorWaterLevel: `${sensorWaterLevelLiters.toFixed(2)} L`, +// sensorWaterLevelInCm: `${sensorWaterLevelInCm.toFixed(2)} cm` +// } +// }); - if (topic === 'water/iot-data') { - try { - const data = JSON.parse(message.toString()); - const { hw_Id, Motor_status, tanks } = data.objects; // Updated variable names according to new format - - // Get the current date and time in the required format - const currentDate = new Date(); - const date = currentDate.toISOString(); // ISO string for date - const time = currentDate.toLocaleTimeString('en-IN', { hour12: false, timeZone: 'Asia/Kolkata' }); // Time in 'HH:MM:SS' - - // Create array of tank documents with current date and time - const tankDocuments = tanks.map(tank => ({ - tankhardwareId: tank.Id, // Updated to match the new format - tankHeight: tank.level, // Updated to match the new format - date, - time - })); +// } catch (err) { +// console.error(err); +// reply.status(500).send({ message: err.message || "Internal Server Error" }); +// } +// }; - - // Save IoT data for the received tanks - const iotTankData = new IotData({ - hardwareId: hw_Id, // Updated variable name - Motor_status, - tanks: tankDocuments, - date, - time - }); - await iotTankData.save(); +// exports.compareMeasuredHeight = async (req, reply) => { +// try { +// const { measuredHeight, tankName } = req.body; - // Delete excess records (keep only the latest three records) - const recordsToKeep = 3; - const recordsToDelete = await IotData.find({ hardwareId: hw_Id }) // Updated variable name - .sort({ date: -1, time: -1 }) - .skip(recordsToKeep); +// if (!tankName || measuredHeight === undefined) { +// return reply.status(400).send({ message: "Tank name and measured height are required." }); +// } - for (const record of recordsToDelete) { - await record.remove(); - } +// // Convert measuredHeight to a number +// const measuredHeightNum = parseFloat(measuredHeight); +// if (isNaN(measuredHeightNum)) { +// return reply.status(400).send({ message: "Invalid measuredHeight. It must be a number." }); +// } - // Process each tank to update water level and connections - for (const tank of tanks) { - const { Id: tankhardwareId, level: tankHeight } = tank; // Updated to match the new format - // Find the corresponding tank in the Tank schema using hardwareId and tankhardwareId - const existingTank = await Tank.findOne({ hardwareId: hw_Id, tankhardwareId }); // Updated variable name - if (!existingTank) continue; +// // Fetch tank details using tankName +// const tank = await Tank.findOne({ tankName }); - const customerId = existingTank.customerId; - const tank_name = existingTank.tankName; +// if (!tank) { +// return reply.status(404).send({ message: "Tank not found." }); +// } - // Calculate water level using tank height and capacity - const tankHeightInCm = (parseInt(existingTank.height.replace(/,/g, ''), 10)) * 30.48; // Convert height to cm - const tank_height = parseInt(tankHeightInCm.toFixed(0), 10); - const waterLevelHeight = tank_height - tankHeight; - const waterCapacityPerCm = parseInt(existingTank.waterCapacityPerCm.replace(/,/g, ''), 10); +// const actualWaterLevel = parseFloat(tank.waterlevel); // Current water level in liters +// const waterCapacityPerCm = parseFloat(tank.waterCapacityPerCm); // Liters per cm - const waterLevel = parseInt(waterLevelHeight * waterCapacityPerCm, 10); // Calculated water level +// if (isNaN(actualWaterLevel) || isNaN(waterCapacityPerCm)) { +// return reply.status(400).send({ message: "Tank data is incomplete for conversion." }); +// } - // Update water level in the existing tank - if (waterLevel >= 0) { - existingTank.waterlevel = waterLevel; - await existingTank.save(); +// // Convert actual water level from liters to cm +// const actualWaterLevelInCm = actualWaterLevel / waterCapacityPerCm; - // Update linked tanks (input/output connections) - for (const outputConnection of existingTank.connections.outputConnections) { - const linkedTank = await Tank.findOne({ customerId, tankName: outputConnection.outputConnections, tankLocation: outputConnection.output_type }); - if (linkedTank) { - for (const inputConnection of linkedTank.connections.inputConnections) { - if (inputConnection.inputConnections === tank_name) { - inputConnection.water_level = waterLevel; // Update water level for linked tank - await linkedTank.save(); // Save updated linked tank - } - } - } - } - } - } +// // Calculate difference between measured and actual water level in cm +// const heightDifferenceInCm = Math.abs(actualWaterLevelInCm - measuredHeightNum); + +// let message; +// if (heightDifferenceInCm <= 10) { +// message = "Manual measurement is match within 10 cm of the sensor data."; +// } else { +// message = "Manual measurement not matched within range 10cm from the sensor data."; +// } + +// reply.send({ +// status_code: 200, +// data: { +// tankName, +// measuredHeight: measuredHeightNum.toFixed(2) + " cm", +// actualWaterLevelInCm: actualWaterLevelInCm.toFixed(2) + " cm", +// heightDifferenceInCm: heightDifferenceInCm.toFixed(2) + " cm", +// message +// } +// }); + +// } catch (err) { +// console.error("Error in compareMeasuredHeight:", err); +// reply.status(500).send({ message: err.message }); +// } +// }; + +// exports.compareMeasuredHeight = async (req, reply) => { +// try { +// const { tankName, measuredHeight } = req.body; - // Update motor status - const status = Motor_status; - const motorTank = await Tank.findOne({ "connections.inputConnections.motor_id": hw_Id }); // Updated variable name +// if (!tankName) { +// return reply.status(400).send({ message: "Tank name is required." }); +// } - if (!motorTank) { - console.log('Motor not found for the specified motor_id'); - return; - } +// const tank = await Tank.findOne({ tankName }); +// if (!tank) { +// return reply.status(404).send({ message: "Tank not found." }); +// } - // Find the inputConnection for the motor and update motor status - const inputConnection = motorTank.connections.inputConnections.find(conn => conn.motor_id === hw_Id); // Updated variable name - if (inputConnection) { - inputConnection.motor_status = status; // Update motor status - await motorTank.save(); // Save the updated tank - } +// const tankHeightFeet = parseFloat(tank.height); +// const tankHeightCm = Math.round(tankHeightFeet * 30.48); - console.log('Data processed successfully for hardwareId:', hw_Id); // Updated variable name +// const capacityPerCm = parseFloat(tank.waterCapacityPerCm); +// if (!capacityPerCm || capacityPerCm <= 0) { +// return reply.status(400).send({ message: "Invalid waterCapacityPerCm value." }); +// } - } catch (err) { - console.error('Error processing message:', err.message); - } - } -}); +// const manualWaterLevelLiters = parseFloat(tank.waterlevel) || 0; +// const manualWaterLevelInCm = Math.round(manualWaterLevelLiters / capacityPerCm); +// let sensorWaterLevelInCm = 0; +// let sensorWaterLevelLiters = 0; +// // Try to get IoT data +// const iotData = await IotData.findOne({ +// hardwareId: tank.hardwareId, +// "tanks.tankhardwareId": tank.tankhardwareId +// }).sort({ date: -1 }); +// if (iotData) { +// const matchingTank = iotData.tanks.find( +// t => t.tankhardwareId === tank.tankhardwareId +// ); +// if (matchingTank) { +// const tankHeightFromSensor = parseFloat(matchingTank.tankHeight); +// if (!isNaN(tankHeightFromSensor)) { +// const rawSensorWaterLevelInCm = tankHeightCm - Math.round(tankHeightFromSensor); +// sensorWaterLevelInCm = Math.max(0, rawSensorWaterLevelInCm); +// sensorWaterLevelLiters = Math.round(sensorWaterLevelInCm * capacityPerCm); +// } +// } +// } -// Function to publish motor stop status -// exports.publishMotorStopStatus = async (motor_id, motor_stop_status) => { -// const payload = { -// topic: 'operation', -// object: { -// 'motor-id': motor_id, -// control: motor_stop_status +// // Include comparison if measuredHeight provided +// let comparison = null; +// if (measuredHeight !== undefined) { +// const measuredHeightNum = parseFloat(measuredHeight); +// if (!isNaN(measuredHeightNum)) { +// const measuredHeightRounded = Math.round(measuredHeightNum); +// const heightDifferenceInCm = Math.abs(manualWaterLevelInCm - measuredHeightRounded); +// const message = heightDifferenceInCm <= 10 +// ? "Manual measurement matches within 10 cm of sensor/manual data." +// : "Manual measurement not matched within range 10 cm."; + +// comparison = { +// measuredHeight: measuredHeightRounded + " cm", +// actualWaterLevelInCm: manualWaterLevelInCm + " cm", +// heightDifferenceInCm: heightDifferenceInCm + " cm", +// message +// }; +// } else { +// comparison = { message: "Invalid measuredHeight; must be a number." }; +// } // } -// }; -// client.publish('water/operation', JSON.stringify(payload)); +// reply.send({ +// status_code: 200, +// data: { +// tankName: tank.tankName, +// tankHeightInCm: tankHeightCm, +// capacity: tank.capacity, +// manualWaterLevel: manualWaterLevelLiters, // as integer +// manualWaterLevelInCm: manualWaterLevelInCm + " cm", +// sensorWaterLevel: sensorWaterLevelLiters, // as integer +// sensorWaterLevelInCm: sensorWaterLevelInCm + " cm", +// ...(comparison && { comparison }) +// } +// }); + +// } catch (err) { +// console.error(err); +// reply.status(500).send({ message: err.message || "Internal Server Error" }); +// } // }; +exports.compareMeasuredHeight = async (req, reply) => { + try { + const { tankName, measuredHeight, tankHeight } = req.body; + + if (!tankName || typeof measuredHeight !== 'number' || typeof tankHeight !== 'number') { + return reply.status(400).send({ message: "tankName, tankHeight and measuredHeight are required and must be numbers." }); + } + + if (tankHeight <= 0 || measuredHeight < 0) { + return reply.status(400).send({ message: "Invalid tankHeight or measuredHeight; must be positive numbers." }); + } + + const tank = await Tank.findOne({ tankName }); + if (!tank) { + return reply.status(404).send({ message: "Tank not found." }); + } + const capacityPerCm = parseFloat(tank.waterCapacityPerCm); + if (!capacityPerCm || capacityPerCm <= 0) { + return reply.status(400).send({ message: "Invalid waterCapacityPerCm in tank data." }); + } -//const moment = require('moment'); + // DB tank height (in feet → cm) + const tankHeightFeetFromDB = parseFloat(tank.height); + const tankHeightInCmFromDB = Math.round(tankHeightFeetFromDB * 30.48); + // tankHeight from body is already in cm + const tankHeightInCmFromBody = Math.round(tankHeight); + console.log("tankHeightInCmFromBody",tankHeightInCmFromBody) + console.log("tankHeightInCmFromDB",tankHeightInCmFromDB) + // 🔹 Sensor data (use DB tank height) + let sensorGapCm = null; + let sensorWaterLevelInCm = null; + let sensorWaterLevelLiters = null; + const iotData = await IotData.findOne({ + hardwareId: tank.hardwareId, + "tanks.tankhardwareId": tank.tankhardwareId + }).sort({ date: -1 }); -exports.getPendingAndCompletedsurveyOfparticularInstaller = async (request, reply) => { - try { - const { installationId } = request.params; - const survey_status = request.body; + if (iotData) { + const matchingTank = iotData.tanks.find( + t => t.tankhardwareId === tank.tankhardwareId + ); + if (matchingTank) { + const tankHeightFromSensor = parseFloat(matchingTank.tankHeight); + if (!isNaN(tankHeightFromSensor) && tankHeightFromSensor >= 0) { + sensorGapCm = Math.round(tankHeightFromSensor); + sensorWaterLevelInCm = Math.max(0, tankHeightInCmFromDB - sensorGapCm); + sensorWaterLevelLiters = Math.round(sensorWaterLevelInCm * capacityPerCm); + + var dynamicTankHeightInCm = sensorGapCm + sensorWaterLevelInCm; + console.log("dynamicTankHeightInCm",dynamicTankHeightInCm) + } + } + } - - const surveydata = await User.find({ - installationId, - survey_status, - - }); + // 🔹 Manual data (use tankHeight from body) + // const manualWaterLevelInCm = Math.round(measuredHeight); // measuredHeight in cm + // const manualWaterLevelLiters = Math.round(manualWaterLevelInCm * capacityPerCm); + + // // 🔹 Comparison + // const heightDifferenceInCm = Math.abs(manualWaterLevelInCm - (sensorWaterLevelInCm ?? 0)); + // const comparisonMessage = heightDifferenceInCm <= 10 + // ? "Manual measurement matches within 10 cm of sensor data." + // : "Manual measurement not matched within range 10 cm."; + + // reply.send({ + // status_code: 200, + // data: { + // tankName, + // capacity: tank.capacity, + // sensor: { + // tankHeightInCm: dynamicTankHeightInCm, // from DB + // sensorGapCm, + // waterLevelInCm: sensorWaterLevelInCm, + // waterLevelLiters: sensorWaterLevelLiters + // }, + // manual: { + // tankHeightInCm: tankHeightInCmFromBody, // passed in body + // measuredHeightCm: manualWaterLevelInCm, + // waterLevelLiters: manualWaterLevelLiters + // }, + // comparison: { + // heightDifferenceInCm, + // message: comparisonMessage + // } + // } + // }); + + // 🔹 Manual data +// 🔹 Manual data +const manualWaterLevelInCm = Math.round(measuredHeight); // measuredHeight in cm +const manualWaterLevelLiters = Math.round(manualWaterLevelInCm * capacityPerCm); + +// 🔹 Comparison +const waterLevelDifferenceInCm = Math.abs(manualWaterLevelInCm - (sensorWaterLevelInCm ?? 0)); +const tankHeightDifferenceInCm = Math.abs(tankHeightInCmFromDB - (tankHeightInCmFromBody ?? 0 )); + +// require *both* to be within 10cm +// const isWithinRange = waterLevelDifferenceInCm <= 10 && tankHeightDifferenceInCm <= 10; + +// const comparisonMessage = isWithinRange +// ? "Manual measurement matches within 10 cm of sensor data and tank height." +// : "Manual measurement not matched within range 10 cm."; +const isWaterLevelMatch = waterLevelDifferenceInCm <= 10; +const isTankHeightMatch = tankHeightDifferenceInCm <= 10; + +let comparisonMessage = ""; + +if (isWaterLevelMatch && isTankHeightMatch) { + comparisonMessage = "Manual measurement matches within 10 cm of sensor data."; +} else if (isWaterLevelMatch) { + comparisonMessage = "✅ Manual water level matches within 10 cm, but tank height does not."; +} else if (isTankHeightMatch) { + comparisonMessage = "✅ Manual tank height matches within 10 cm, but water level does not."; +} else { + comparisonMessage = "❌ Manual measurement does not match within 10 cm range for either water level or tank height."; +} - // Send the response, including both total consumption and filtered consumption records - reply.send({ - status_code: 200, - surveydata, - - }); +reply.send({ + status_code: 200, + data: { + tankName, + capacity: tank.capacity, + sensor: { + tankHeightInCm: dynamicTankHeightInCm, // from DB + IoT data + sensorGapCm, + waterLevelInCm: sensorWaterLevelInCm, + waterLevelLiters: sensorWaterLevelLiters + }, + manual: { + tankHeightInCm: tankHeightInCmFromBody, + measuredHeightCm: manualWaterLevelInCm, + waterLevelLiters: manualWaterLevelLiters + }, + comparison: { + waterLevelDifferenceInCm, + tankHeightDifferenceInCm, + message: comparisonMessage + } + } +}); + } catch (err) { - throw boom.boomify(err); + console.error(err); + reply.status(500).send({ message: err.message || "Internal Server Error" }); } }; @@ -4796,205 +8421,141 @@ exports.getPendingAndCompletedsurveyOfparticularInstaller = async (request, repl -exports.consumptionofparticulartank = async (request, reply) => { - try { - const { customerId } = request.params; - const { startDate, stopDate, tankName, tankLocation, block } = request.body; +//const ExcelJS = require('exceljs'); +//const IotData = require('../models/IotData'); // adjust the path - // Convert input dates into proper JavaScript Date objects for comparison - const start = moment(startDate, "DD-MMM-YYYY - HH:mm").toDate(); - const end = moment(stopDate, "DD-MMM-YYYY - HH:mm").toDate(); +// async function generateAndSaveTankExcel() { +// const startDate = "2025-06-03T00:00:00.000Z"; +// const endDate = new Date().toISOString(); - // Find the tank by customerId, tankLocation, and tankName - const tank = await Tank.findOne({ - customerId, - tankLocation: tankLocation || "overhead", // Default to "overhead" if not provided - tankName, - }); +// const getFilteredData = async (hardwareId) => { +// const query = { +// hardwareId, +// date: { +// $gte: startDate, +// $lte: endDate, +// }, +// }; - if (!tank) { - return reply.status(404).send({ - status_code: 404, - message: "Tank not found", - }); - } +// console.log(`Fetching data for hardwareId: ${hardwareId}`); +// const results = await IotData.find(query).sort({ date: 1 }); - const waterlevel_at_midnight = parseInt(tank.waterlevel_at_midnight.replace(/,/g, ""), 10); - const total_water_added_from_midnight = parseInt(tank.total_water_added_from_midnight.replace(/,/g, ""), 10); - const waterlevel = parseInt(tank.waterlevel.replace(/,/g, ""), 10); +// console.log(`Found ${results.length} records for hardwareId ${hardwareId}`); - // Fetch all records for the tank (no date filtering yet) - const tankConsumptions = await TankConsumptionOriginalSchema.find({ - customerId, - tankName, - tankLocation: tankLocation, - }); +// const data = []; - // Filter records in JavaScript by comparing the 'time' field after converting to Date - const filteredConsumptions = tankConsumptions.filter((record) => { - const recordTime = moment(record.time, "DD-MMM-YYYY - HH:mm").toDate(); - return recordTime >= start && recordTime <= end; - }); +// for (const entry of results) { +// if (!entry.tanks || !Array.isArray(entry.tanks)) continue; - // Sort filtered records by date (ascending) - filteredConsumptions.sort((a, b) => { - const dateA = moment(a.time, "DD-MMM-YYYY - HH:mm").toDate(); - const dateB = moment(b.time, "DD-MMM-YYYY - HH:mm").toDate(); - return dateA - dateB; // Sort in ascending order - }); +// for (const tank of entry.tanks) { +// if (tank.tankhardwareId === 'tank-1' || tank.tankhardwareId === 'tank-2') { +// data.push({ +// hardwareId: entry.hardwareId, +// tankhardwareId: tank.tankhardwareId, +// tankHeight: tank.tankHeight, +// date: tank.date.split('T')[0], // safe because it's a string +// time: tank.time, +// }); +// } +// } +// } - // Calculate total consumption from filtered records - const total_consumption_from_records = filteredConsumptions.reduce((acc, record) => { - return acc + parseInt(record.consumption, 10); - }, 0); +// console.log(`Filtered ${data.length} tank records for hardwareId ${hardwareId}`); +// return data; +// }; - // Calculate final consumption - const consumption = (waterlevel_at_midnight + total_water_added_from_midnight) - waterlevel + total_consumption_from_records; +// const data140924 = await getFilteredData("140924"); +// const data150924 = await getFilteredData("150924"); - // Prepare response data - const tankData = { - tankname: tank.tankName, - totalConsumption: consumption, - block: tank.blockName, - TypeofWater: tank.typeOfWater, - location: tank.tankLocation, - capacity: tank.capacity, - waterlevel: tank.waterlevel, - }; +// const workbook = new ExcelJS.Workbook(); +// const worksheet = workbook.addWorksheet("Tank Data"); - // Send the response, including both total consumption and filtered consumption records - reply.send({ - status_code: 200, - tankData, - totalConsumption: consumption, - consumptionRecords: filteredConsumptions, - }); - } catch (err) { - throw boom.boomify(err); - } -}; +// worksheet.columns = [ +// { header: "Hardware ID", key: "hardwareId", width: 15 }, +// { header: "Tank ID", key: "tankhardwareId", width: 15 }, +// { header: "Tank Height", key: "tankHeight", width: 15 }, +// { header: "Date", key: "date", width: 15 }, +// { header: "Time", key: "time", width: 15 }, +// ]; +// const allData = [...data140924, ...data150924]; +// allData.forEach(row => worksheet.addRow(row)); -// // Set start and end dates -// const startDate = new Date("2024-08-20T00:00:00Z"); -// const endDate = new Date("2024-11-04T00:00:00Z"); +// await workbook.xlsx.writeFile("tank_data.xlsx"); +// console.log("✅ Excel file saved as tank_data.xlsx with rows:", allData.length); +// } -// // Tank names array with respective blocks -// const tanks = [ -// { tankName: "REAL TANK OH", block: "A" }, -// { tankName: "DUMMY TANK OH1", block: "BLOCK C" }, -// { tankName: "DUMMY TANK OH2", block: "BLOCK D" }, -// { tankName: "DUMMY TANK OH3", block: "BLOCK C" }, -// { tankName: "DUMMY TANK OH4", block: "BLOCK C" }, -// { tankName: "DUMMY TANK OH5", block: "BLOCK C" }, -// { tankName: "DUMMY TANK OH6", block: "BLOCK C" } -// ]; -// const customerId = "AWSUSKY4"; -// const tankLocation = "overhead"; -// const typeofwater = "Bore Water"; +// generateAndSaveTankExcel(); -// // Function to format date to "DD-MMM-YYYY - HH:mm" -// function formatDateCustom(date) { -// const options = { day: '2-digit', month: 'short', year: 'numeric' }; -// return date.toLocaleDateString('en-GB', options).replace(/ /g, '-') + " - 23:55"; -// } -// // Main function to generate data -// async function generateData() { -// for (let date = new Date(startDate); date <= endDate; date.setDate(date.getDate() + 1)) { -// const formattedDate = formatDateCustom(date); // Format date to "DD-MMM-YYYY - 23:55" -// for (const { tankName, block } of tanks) { -// try { -// const existingRecord = await TankConsumptionOriginalSchema.findOne({ -// customerId: customerId, -// tankName: tankName, -// tankLocation: tankLocation, -// time: formattedDate -// }).exec(); -// console.log(`Checking record for ${tankName} on ${formattedDate}: ${existingRecord ? 'Exists' : 'Does not exist'}`); +async function publishAllTankMotorsForCustomer(customerId) { + try { + const tanks = await Tank.find({ customerId, status: 'active' }).lean(); -// if (!existingRecord) { -// // Random consumption between 7000 and 8000 -// const randomConsumption = Math.floor(Math.random() * (8000 - 7000 + 1)) + 7000; + for (const tank of tanks) { + const { tankName, tankLocation, connections } = tank; + const inputConnections = connections?.inputConnections || []; -// // Create a new document and save it -// const newRecord = new TankConsumptionOriginalSchema({ -// customerId: customerId, -// tankName: tankName, -// tankLocation: tankLocation, -// consumption: randomConsumption.toString(), -// time: formattedDate, -// block: block, -// typeofwater: typeofwater, -// __v: 0 -// }); -// await newRecord.save(); // Use .save() method to insert the record -// console.log(`Inserted record for ${tankName} on ${formattedDate}`); -// } -// } catch (error) { -// console.error(`Failed to check or insert record for ${tankName} on ${formattedDate}:`, error); -// } -// } -// } -// console.log("Data generation complete."); -// } + for (const conn of inputConnections) { + if (!conn.motor_id || conn.status !== 'active') continue; -// // Run the data generation function -// generateData(); + let time = 0; -async function removeDuplicates() { - try { - // Step 1: Find duplicates, considering time and ignoring case for typeofwater - const duplicates = await TankConsumptionOriginalSchema.aggregate([ - { - $group: { - _id: { - customerId: "$customerId", - tankName: "$tankName", - time: "$time" - }, - count: { $sum: 1 }, - ids: { $push: "$_id" }, // Store the _id values for further processing - latestConsumption: { $max: { $toInt: "$consumption" } }, // Get the max consumption - latestTypeofwater: { $last: "$typeofwater" } // Get the last typeofwater value - } - }, - { - $match: { - count: { $gt: 1 } // Only keep groups with more than one occurrence - } - } - ]); + if (conn.motor_status === '2') { + const now = moment().tz('Asia/Kolkata'); - console.log(`Found ${duplicates.length} groups of duplicates.`); + if (conn.startTime) { + const start = moment(conn.startTime, 'DD-MMM-YYYY - HH:mm'); - // Step 2: Prepare delete operations - for (const duplicateGroup of duplicates) { - // Filter the ids based on the maximum time to keep the latest entry - const idsToDelete = duplicateGroup.ids.filter(id => { - return id !== duplicateGroup.ids[0]; // Keep the first, delete the rest - }); + if (start.isValid()) { + const hoursElapsed = moment.duration(now.diff(start)).asHours(); - for (const id of idsToDelete) { - try { - await TankConsumptionOriginalSchema.deleteOne({ _id: id }); - console.log(`Deleted duplicate record with ID: ${id}`); - } catch (deleteError) { - console.error(`Failed to delete record with ID ${id}:`, deleteError); + if (hoursElapsed <= 12) { + time = Math.floor(moment.duration(now.diff(start)).asMinutes()); + + } else { + console.log(`⏳ Skipped motor_id ${conn.motor_id} — startTime older than 12 hours`); } + } else { + console.warn(`⚠️ Invalid startTime format for motor_id ${conn.motor_id}: ${conn.startTime}`); + } + } else { + console.log(`⚠️ startTime is null for motor_id ${conn.motor_id}, sending time = 0`); } - } + } - console.log("Duplicate removal complete."); - } catch (error) { - console.error("Failed to remove duplicates:", error); + const payload = { + tankName, + tankLocation, + motor_status: conn.motor_status, + customerId, + time, + }; + + const topic = `water/motor-status/${conn.motor_id}`; + client.publish(topic, JSON.stringify(payload), { qos: 1 }, (err) => { + if (err) { + console.error(`❌ Failed to publish to ${topic}:`, err.message); + } else { + console.log(`📤 Published to ${topic}:`, payload); + } + }); + } + } + } catch (err) { + console.error(`❌ Error publishing motor data for customer ${customerId}:`, err.message); } } -// Run the remove duplicates function -// removeDuplicates(); -console.log("this is for testing autopush,line located in tankscontroller") \ No newline at end of file +// 🕒 Call every 30 seconds + + +// Run every 30 seconds +setInterval(() => { + publishAllTankMotorsForCustomer('AWSUSKY4'); +}, 30000); + diff --git a/src/controllers/userController.js b/src/controllers/userController.js index d0b9b406..c82f4219 100644 --- a/src/controllers/userController.js +++ b/src/controllers/userController.js @@ -3,16 +3,20 @@ // }); //const axios = require('axios'); -const bcrypt = require("bcrypt"); +//const bcrypt = require("bcrypt"); +const bcrypt = require('bcryptjs'); + const saltRounds = 10; const libphonenumberjs = require("libphonenumber-js"); // External Dependancies // offers http-friendly error objects. const boom = require("boom"); +const { Tankerbooking} = require("../models/tankers") +const {EstimationOrder} = require("../models/store"); // Get Data Models -const { Supplier, generateSupplierId, FriendRequest,DeliveryBoy} = require("../models/supplier") -const { User,Counter, generateBookingId,resetCounter,generateCustomerId,ProfilePicture, AddTeamMembers} = require('../models/User') +const { RecurringRequestedBooking,RequestedBooking,Supplier, generateSupplierId, FriendRequest,DeliveryBoy} = require("../models/supplier") +const { User,Counter, generateBookingId,resetCounter,generateCustomerId,ProfilePicture, AddTeamMembers,Cart} = require('../models/User') //const User = require("../models/User"); const customJwtAuth = require("../customAuthJwt"); @@ -280,35 +284,107 @@ exports.addUser = async (req, reply) => { // Accepts a user , password , and checks in the system to see if user exists , and password is valid // returns a user object so that jwt token can be created and sent back to the client -exports.loginUser = async (req, fcmId, deviceId) => { +// exports.loginUser = async (req, fcmIds, deviceId) => { +// try { +// const { phone, password } = req.body; +// let user = await User.findOne({ phone }); +// let isStaff = false; +// let staffMember = null; + +// // If not a main user, check staff inside all users +// if (!user) { + +// const users = await User.find({ "staff.staff.phone": phone }); +// for (const u of users) { +// const foundStaff = u.staff.staff.find((s) => s.phone === phone); +// if (foundStaff) { +// user = u; // Assign user as the main user under which the staff exists +// staffMember = foundStaff; +// isStaff = true; +// break; +// } +// } +// } + +// // If no user or staff found, return invalid credentials +// if (!user) return { same: false }; + +// // Validate password +// let isSame = false; +// if (isStaff) { +// isSame = password === staffMember.password; // Plain text comparison for staff +// } else { +// isSame = await bcrypt.compare(password, user.services.password.bcrypt); // Bcrypt for main users +// } + +// if (!isSame) return { same: false }; + +// // Update deviceId +// user.deviceId = deviceId; +// await user.save(); + +// return { same: true, user, isStaff, staffMember }; +// } catch (err) { +// throw boom.boomify(err); +// } +// }; + + +exports.loginUser = async (req, fcmIds, deviceId) => { try { - const phone = req.body.phone; - const password = req.body.password; - - const user = await User.findOne({ phone: phone }); - if (user) { - const isSame = await bcryptComparePassword( - password, - user.services.password.bcrypt - ); - if (isSame) { - // Optionally, you can save/update fcmId and deviceId here - user.fcmId = fcmId; - user.deviceId = deviceId; - await user.save(); - - return { same: true, user: user }; - } else { - return { same: false }; + const { phone, password } = req.body; + let user = await User.findOne({ phone }); + let isStaff = false; + let staffMember = null; + + if (!user) { + const users = await User.find({ "staff.staff.phone": phone }); + for (const u of users) { + const foundStaff = u.staff.staff.find((s) => s.phone === phone); + if (foundStaff) { + user = u; + staffMember = foundStaff; + isStaff = true; + break; + } } + } + + if (!user) return { same: false }; + + let isSame = false; + + if (isStaff) { + isSame = password === staffMember.password; } else { - return { same: false }; + const otpMatch = + //user.oneTimePasswordSetFlag && + user.passwordResetCode && + password === user.passwordResetCode.toString(); + + if (otpMatch) { + isSame = true; + user.oneTimePasswordSetFlag = false; + user.passwordResetCode = null; + } else { + isSame = await bcrypt.compare(password, user.services.password.bcrypt); + } } + + if (!isSame) return { same: false }; + + user.deviceId = deviceId; + await user.save(); + + return { same: true, user, isStaff, staffMember }; } catch (err) { throw boom.boomify(err); } }; + + + exports.loginUserWithOTP = async (req) => { try { const phone = req.body.phone; @@ -550,15 +626,106 @@ exports.sendSms = async (request, reply) => { req.end(); } + +// exports.forgotPassword = async (req, reply) => { +// try { +// // Create a new User object from the request body +// var user = new User(req.body); + +// // Check if the request body is URL encoded +// checkFormEncoding = isUserFormUrlEncoded(req); +// if (checkFormEncoding.isUserFormUrlEncoded) { +// // Extract user information from the request body +// usertobeInserted = checkFormEncoding.user; +// user.username = usertobeInserted.username; +// user.firstName = usertobeInserted.firstName; +// user.lastName = usertobeInserted.lastName; +// user.phone = usertobeInserted.phone; +// user.emails = usertobeInserted.emails; +// } + +// // Find a user with the given phone number in the database +// userExists = await User.findOne({ +// phone: user.phone, +// }); + +// if (userExists) { +// // Generate a random password reset code +// const code = Math.floor(100000 + Math.random() * 900000); + +// // Convert the code to a string and hash it using bcrypt +// codestr = ""; +// codestr = code.toString(); +// hash = await bcryptPassword(codestr); + +// // Update the user's password reset code and password hash in the database +// const filter = { +// phone: userExists.phone, +// }; +// const update = { +// $set: { +// passwordResetCode: code, +// "services.password.bcrypt": hash, +// //oneTimePasswordSetFlag: true, +// }, +// }; +// const doc = await User.updateOne(filter, update); + +// // Find the updated user in the database +// updatedUser = await User.findOne({ phone: userExists.phone }); + +// if (updatedUser.oneTimePasswordSetFlag) { +// // Send an SMS with the password reset code +// const request = { +// body: { +// mobileNumbers: userExists.phone, +// }, +// }; +// const response = { +// send: (data) => { +// console.log(data); // Optional: Log the response from the SMS provider +// // Send a success response with the password reset code +// req.body.passwordResetCode = code; +// reply.send('{"armintatankdata":{"error":false,"forgotPassword": true}}'); +// }, +// }; +// await exports.sendSms(request, response); +// } else { +// // Send an error response if the password reset code was not set +// error = { +// armintatankdata: { +// error: true, +// code: 10007, +// message: "10007 - Unable to reset password", +// }, +// }; +// req.body.regError = error; +// reply.send(error); +// } +// } else { +// // Send an error response if no user was found with the given phone number +// error = { +// armintatankdata: { +// error: true, +// code: 10006, +// message: "10006 - Please check the phone number you entered..", +// }, +// }; +// req.body.regError = error; +// reply.send(error); +// } +// } catch (err) { +// // Handle any errors that occur during the API request +// throw boom.boomify(err); +// } +// }; + exports.forgotPassword = async (req, reply) => { try { - // Create a new User object from the request body var user = new User(req.body); - - // Check if the request body is URL encoded + checkFormEncoding = isUserFormUrlEncoded(req); if (checkFormEncoding.isUserFormUrlEncoded) { - // Extract user information from the request body usertobeInserted = checkFormEncoding.user; user.username = usertobeInserted.username; user.firstName = usertobeInserted.firstName; @@ -567,82 +734,219 @@ exports.forgotPassword = async (req, reply) => { user.emails = usertobeInserted.emails; } - // Find a user with the given phone number in the database - userExists = await User.findOne({ - phone: user.phone, - }); + const userExists = await User.findOne({ phone: user.phone }); - if (userExists) { - // Generate a random password reset code - const code = Math.floor(100000 + Math.random() * 900000); + if (!userExists) { + return reply.send({ + armintatankdata: { + error: true, + code: 10006, + message: "10006 - Please check the phone number you entered.", + }, + }); + } - // Convert the code to a string and hash it using bcrypt - codestr = ""; - codestr = code.toString(); - hash = await bcryptPassword(codestr); + // Generate a random 6-digit code + const code = Math.floor(100000 + Math.random() * 900000); - // Update the user's password reset code and password hash in the database - const filter = { - phone: userExists.phone, + // Store OTP only (not password hash) + const update = { + $set: { + passwordResetCode: code, + oneTimePasswordSetFlag: true, + }, + }; + + await User.updateOne({ phone: userExists.phone }, update); + + const updatedUser = await User.findOne({ phone: userExists.phone }); + + if (updatedUser.oneTimePasswordSetFlag) { + const request = { + body: { + mobileNumbers: userExists.phone, + }, }; - const update = { - $set: { - passwordResetCode: code, - "services.password.bcrypt": hash, - oneTimePasswordSetFlag: true, + const response = { + send: (data) => { + console.log(data); + req.body.passwordResetCode = code; + reply.send({ + armintatankdata: { + error: false, + forgotPassword: true, + }, + }); }, }; - const doc = await User.updateOne(filter, update); + await exports.sendSms(request, response); + } else { + return reply.send({ + armintatankdata: { + error: true, + code: 10007, + message: "Unable to reset password", + }, + }); + } + } catch (err) { + throw boom.boomify(err); + } +}; - // Find the updated user in the database - updatedUser = await User.findOne({ phone: userExists.phone }); - if (updatedUser.oneTimePasswordSetFlag) { - // Send an SMS with the password reset code - const request = { - body: { - mobileNumbers: userExists.phone, - }, - }; - const response = { - send: (data) => { - console.log(data); // Optional: Log the response from the SMS provider - // Send a success response with the password reset code - req.body.passwordResetCode = code; - reply.send('{"armintatankdata":{"error":false,"forgotPassword": true}}'); - }, - }; - await exports.sendSms(request, response); - } else { - // Send an error response if the password reset code was not set - error = { - armintatankdata: { - error: true, - code: 10007, - message: "10007 - Unable to reset password", - }, - }; - req.body.regError = error; - reply.send(error); - } - } else { - // Send an error response if no user was found with the given phone number - error = { + +// exports.forgotPassword = async (req, reply) => { +// try { +// const user = await User.findOne({ phone: req.body.phone }); + +// if (!user) { +// return reply.send({ +// armintatankdata: { +// error: true, +// code: 10006, +// message: "10006 - Please check the phone number you entered..", +// }, +// }); +// } + +// const code = Math.floor(100000 + Math.random() * 900000).toString(); +// const hashedOTP = await bcrypt.hash(code, 10); + +// await User.updateOne( +// { phone: user.phone }, +// { +// $set: { +// "services.password.bcrypt": hashedOTP, +// temporaryPasswordCode: code, +// oneTimePasswordSetFlag: true, +// }, +// } +// ); + +// // Simulated SMS logic +// console.log("OTP sent:", code); + +// reply.send({ +// armintatankdata: { +// error: false, +// forgotPassword: true, +// }, +// }); +// } catch (err) { +// throw boom.boomify(err); +// } +// }; + + + +exports.changePassword = async (req, reply) => { + try { + const {phone, oldPassword, newPassword } = req.body; + + + if (!oldPassword || !newPassword) { + return reply.send({ + armintatankdata: { + error: true, + code: 10008, + message: "10008 - Old password and new password are required", + }, + }); + } + + // Find user by phone + const user = await User.findOne({ phone }); + + if (!user) { + return reply.send({ armintatankdata: { error: true, code: 10006, - message: "10006 - Please check the phone number you entered..", + message: "10006 - User not found. Please check the phone number.", }, - }; - req.body.regError = error; - reply.send(error); + }); + } + + // Verify old password + const isMatch = await bcrypt.compare(oldPassword, user.services.password.bcrypt); + + if (!isMatch) { + return reply.send({ + armintatankdata: { + error: true, + code: 10009, + message: "10009 - Incorrect old password", + }, + }); } + + // Hash new password + const hashedPassword = await bcrypt.hash(newPassword, 10); + + // Update password + await User.updateOne( + { phone }, + { + $set: { + "services.password.bcrypt": hashedPassword, + oneTimePasswordSetFlag: false, // Reset OTP flag after password change + }, + } + ); + + reply.send({ + armintatankdata: { + error: false, + message: "Password changed successfully", + }, + }); } catch (err) { - // Handle any errors that occur during the API request throw boom.boomify(err); } }; +exports.addingfavoratesupplier = async (req, reply) => { + try { + const { customerId } = req.params; + const { supplierId } = req.query; + + if (!supplierId) { + return reply.code(400).send({ status_code: 400, message: "supplierId is required" }); + } + + // Find user + const user = await User.findOne({ customerId }); + if (!user) { + return reply.code(404).send({ status_code: 404, message: "User not found" }); + } + + // Add supplierId to favorate_suppliers if not already there + if (!user.favorate_suppliers.includes(supplierId)) { + user.favorate_suppliers.push(supplierId); + await user.save(); + } + + // Fetch FriendRequest status + const friendRequest = await FriendRequest.findOne({ customerId, supplierId }); + const status = friendRequest ? friendRequest.status : "not_requested"; + + reply.send({ + status_code: 200, + message: "Supplier added to favorites successfully", + data: { + customerId, + supplierId, + favorate_suppliers: user.favorate_suppliers, + status, + }, + }); + } catch (err) { + console.error(err); + reply.status(500).send({ status_code: 500, message: err.message }); + } +}; + exports.forgotPasswordSupplier = async (req, reply) => { try { @@ -801,6 +1105,65 @@ exports.deleteTeamMember = async (req, reply) => { } }; + + + +exports.editFavoriteSupplier = async (req, reply) => { + try { + const { customerId } = req.params; + const { oldSupplierId, newSupplierId } = req.query; + + const user = await User.findOne({ customerId }); + if (!user) return reply.code(404).send({ status_code: 404, message: "User not found" }); + + const index = user.favorate_suppliers.indexOf(oldSupplierId); + if (index === -1) + return reply.code(400).send({ status_code: 400, message: "Old supplier not found in favorites" }); + + user.favorate_suppliers[index] = newSupplierId; + await user.save(); + + reply.send({ + status_code: 200, + message: "Favorite supplier updated", + data: user.favorate_suppliers, + }); + } catch (err) { + throw boom.boomify(err); + } +}; + +exports.deleteFavoriteSupplier = async (req, reply) => { + try { + const { customerId } = req.params; + const { supplierId } = req.query; + + const user = await User.findOne({ customerId }); + if (!user) return reply.code(404).send({ status_code: 404, message: "User not found" }); + + const initialLength = user.favorate_suppliers.length; + user.favorate_suppliers = user.favorate_suppliers.filter(id => id !== supplierId); + + if (user.favorate_suppliers.length === initialLength) + return reply.code(400).send({ status_code: 400, message: "Supplier not found in favorites" }); + + await user.save(); + + reply.send({ + status_code: 200, + message: "Favorite supplier removed", + data: user.favorate_suppliers, + }); + } catch (err) { + throw boom.boomify(err); + } +}; + + + + + + exports.updateTeamMember = async (req, reply) => { try { var customerId = req.params.customerId; @@ -825,7 +1188,6 @@ exports.updateTeamMember = async (req, reply) => { - exports.createstaff = async (request, reply) => { try { const { customerId } = request.params; @@ -842,13 +1204,32 @@ exports.createstaff = async (request, reply) => { return reply.status(404).send({ error: 'Customer not found' }); } - // Validate each staff entry and append it to the user's staff array - const newStaff = staff.map((member) => ({ - name: member.name || null, - phone: member.phone || null, - password: member.password || null, - status: "active", // Default status - })); + // Check for duplicate phone numbers + const existingPhones = new Set(user.staff.staff.map((member) => member.phone)); + const newStaff = []; + const duplicatePhones = []; + + staff.forEach((member) => { + if (member.phone && existingPhones.has(member.phone)) { + duplicatePhones.push(member.phone); + } else { + if (member.phone) { + existingPhones.add(member.phone); + } + newStaff.push({ + name: member.name || null, + phone: member.phone || null, + password: member.password || null, + all_motor_access:member.all_motor_access, + status: "active", // Default status + }); + } + }); + + if (duplicatePhones.length > 0) { + return reply.status(400).send({ error: 'Duplicate phone numbers found', duplicatePhones }); + } + // Update the user document with the new staff members user.staff.staff.push(...newStaff); @@ -862,21 +1243,25 @@ exports.createstaff = async (request, reply) => { reply.status(500).send({ error: 'An error occurred while adding staff' }); } }; + + exports.editStaff = async (request, reply) => { try { const { customerId, phone } = request.params; - const { name, password } = request.body; + const { name, password,all_motor_access } = request.body; const user = await User.findOne({ customerId, "staff.staff.phone": phone }); if (!user) { return reply.status(404).send({ error: 'Staff member not found' }); } + const staffMember = user.staff.staff.find(member => member.phone === phone); staffMember.name = name || staffMember.name; staffMember.password = password || staffMember.password; + staffMember.all_motor_access = all_motor_access || staffMember.all_motor_access; await user.save(); reply.send({ message: 'Staff member updated successfully', staff: staffMember }); @@ -931,3 +1316,891 @@ exports.blockStaff = async (request, reply) => { reply.status(500).send({ error: 'An error occurred while blocking staff' }); } }; + + + + + + +exports.getFavoriteSuppliers = async (req, reply) => { + const { customerId } = req.params; + + try { + // Find the user by customerId + const user = await User.findOne({ customerId }); + + if (!user) { + return reply.status(404).send({ status_code: 404, message: "User not found" }); + } + + const supplierIds = user.favorate_suppliers || []; + + // Get full supplier details for those supplierIds + const suppliers = await Supplier.find({ + supplierId: { $in: supplierIds } + }).exec(); + + reply.send({ + status_code: 200, + data: suppliers, + count: suppliers.length + }); + + } catch (err) { + console.error("Error fetching favorite suppliers:", err); + reply.status(500).send({ status_code: 500, message: "Internal server error" }); + } +}; + + +exports.getCartByUserId = async (req, reply) => { + try { + const { customerId } = req.params; + + const cart = await Cart.findOne({ customerId }) || { customerId, items: [] }; + + reply.send({ + status_code: 200, + message: "Cart fetched successfully", + data: cart, + }); + } catch (err) { + console.error("Error fetching cart:", err); + reply.status(500).send({ error: "Internal server error" }); + } +}; + +exports.addItemToCart = async (req, reply) => { + try { + const { customerId } = req.params; + const { productId, name, quantity, price } = req.body; + + let cart = await Cart.findOne({ customerId }); + + if (!cart) { + cart = new Cart({ customerId, items: [] }); + } + + const existingItem = cart.items.find(item => item.productId === productId); + + if (existingItem) { + existingItem.quantity += quantity; + } else { + cart.items.push({ productId, name, quantity, price }); + } + + await cart.save(); + + reply.send({ + status_code: 200, + message: "Item added to cart", + data: cart, + }); + } catch (err) { + console.error("Error adding item:", err); + reply.status(500).send({ error: "Internal server error" }); + } +}; + +exports.removeItemFromCart = async (req, reply) => { + try { + const { customerId } = req.params; + const { productId } = req.body; + + const cart = await Cart.findOne({ customerId }); + + if (!cart) { + return reply.status(404).send({ error: "Cart not found" }); + } + + cart.items = cart.items.filter(item => item.productId !== productId); + await cart.save(); + + reply.send({ + status_code: 200, + message: "Item removed from cart", + data: cart, + }); + } catch (err) { + console.error("Error removing item:", err); + reply.status(500).send({ error: "Internal server error" }); + } +}; + +exports.clearCart = async (req, reply) => { + try { + const { customerId } = req.params; + + const cart = await Cart.findOneAndUpdate( + { customerId }, + { items: [] }, + { new: true } + ); + + reply.send({ + status_code: 200, + message: "Cart cleared", + data: cart, + }); + } catch (err) { + console.error("Error clearing cart:", err); + reply.status(500).send({ error: "Internal server error" }); + } +}; + + + + + + + +exports.getuserOrders = async (req, reply) => { + try { + const { customerId } = req.params; + + const orders = await Tankerbooking.find({ customerId }).sort({ createdAt: -1 }).lean(); + + return reply.send({ + status_code: 200, + message: `Orders for customer ${customerId} fetched successfully`, + data: orders + }); + + } catch (err) { + throw boom.boomify(err); + } +}; + + + +exports.getuserRequestbookings = async (req, reply) => { + try { + const { customerId } = req.params; + + // 1. Get all bookings + const bookings = await RequestedBooking.find({ customerId }).sort({ createdAt: -1 }).lean(); + + // 2. Collect all supplierIds used + const allSupplierIds = new Set(); + bookings.forEach(booking => { + booking.requested_suppliers?.forEach(s => { + if (s.supplierId) allSupplierIds.add(s.supplierId); + }); + }); + + // 3. Query all supplier details at once + const supplierList = await Supplier.find({ + supplierId: { $in: [...allSupplierIds] } + }).lean(); + + const supplierMap = {}; + supplierList.forEach(s => { + supplierMap[s.supplierId] = { + supplierId: s.supplierId, + supplierName: s.suppliername, + phone: s.phone, + longitude: s.longitude, + latitude: s.latitude, + address: s.profile?.office_address, + status: s.status + }; + }); + + // 4. Attach supplier_details inside each requested_suppliers[] object + const enrichedBookings = bookings.map(booking => { + booking.requested_suppliers = booking.requested_suppliers.map(supplier => ({ + ...supplier, + supplier_details: supplierMap[supplier.supplierId] || null + })); + return booking; + }); + + // 5. Send final response + return reply.send({ + status_code: 200, + message: `Orders for customer ${customerId} fetched successfully`, + data: enrichedBookings + }); + + } catch (err) { + console.error(err); + throw boom.boomify(err); + } +}; + +// controllers/user.controller.js (or wherever your controllers live) + +// const Supplier = require("../models/supplier.model"); // not needed here + +/** + * GET /api/getuserRequestbookingsforsupplier/:supplierId + * Returns bookings where this supplier was requested, showing only this supplier's sub-entry. + */ +exports.getuserRequestbookingsForSupplier = async (req, reply) => { + try { + const { supplierId } = req.params; + + if (!supplierId) { + return reply.code(400).send({ + status_code: 400, + message: "supplierId is required", + }); + } + + // 1) Find all bookings that include this supplier + const bookings = await RequestedBooking.find({ + "requested_suppliers.supplierId": supplierId, + }) + .sort({ createdAt: -1 }) + .lean(); + + if (!bookings.length) { + return reply.send({ + status_code: 200, + message: `No orders found for supplier ${supplierId}`, + data: [], + }); + } + + // 2) Collect all unique customerIds + const customerIds = [...new Set(bookings.map((b) => b.customerId))]; + + // 3) Fetch user details for those customers + const users = await User.find({ customerId: { $in: customerIds } }).lean(); + + // 4) Build map for quick lookup + const userMap = users.reduce((acc, u) => { + acc[u.customerId] = u; + return acc; + }, {}); + + // 5) Format data with user info + const data = bookings.map((b) => { + const mySupplierEntry = + (b.requested_suppliers || []).find( + (s) => s.supplierId === supplierId + ) || null; + + return { + _id: b._id, + customerId: b.customerId, + type_of_water: b.type_of_water, + capacity: b.capacity, + quantity: b.quantity, + total_required_capacity: b.total_required_capacity, + date: b.date, + time: b.time, + booking_status: b.status, + createdAt: b.createdAt, + updatedAt: b.updatedAt, + + // supplier-specific entry + my_supplier_entry: mySupplierEntry, + + // attach full user details here + customer_details: userMap[b.customerId] || null, + }; + }); + + return reply.send({ + status_code: 200, + message: `Orders for supplier ${supplierId} fetched successfully`, + data, + }); + } catch (err) { + console.error(err); + throw boom.boomify(err); + } +}; + + + +exports.getuserRequestbookingsforplansforsupplier = async (req, reply) => { + try { + const { supplierId } = req.params; + + if (!supplierId) { + return reply.code(400).send({ + status_code: 400, + message: "supplierId is required", + }); + } + + // 1) Find all bookings that include this supplier + const bookings = await RecurringRequestedBooking.find({ + "requested_suppliers.supplierId": supplierId, + }) + .sort({ createdAt: -1 }) + .lean(); + + // 2) For each booking, expose only this supplier's subdocument + const data = bookings.map((b) => { + const mySupplierEntry = (b.requested_suppliers || []).find( + (s) => s.supplierId === supplierId + ) || null; + + return { + _id: b._id, + customerId: b.customerId, + type_of_water: b.type_of_water, + capacity: b.capacity, + quantity: b.quantity, + frequency:b.frequency, + weekly_count:b.weekly_count, + total_required_capacity: b.total_required_capacity, + date: b.date, + time: b.time, + // booking-wide status (e.g., pending/confirmed/cancelled) + booking_status: b.status, + createdAt: b.createdAt, + updatedAt: b.updatedAt, + + // only the supplier's own requested_suppliers entry + my_supplier_entry: mySupplierEntry, // { supplierId, quoted_amount, time, status } + }; + }); + + return reply.send({ + status_code: 200, + message: `Orders for supplier ${supplierId} fetched successfully`, + data, + }); + } catch (err) { + console.error(err); + throw boom.boomify(err); + } +}; + + +// Assuming you have these models imported somewhere above: +// const RecurringRequestedBooking = require("..."); +// const Supplier = require("..."); + +exports.getuserRequestbookingsforplansforcustomer = async (req, reply) => { + try { + const { customerId } = req.params; + + if (!customerId) { + return reply.code(400).send({ + status_code: 400, + message: "customerId is required", + }); + } + + // 1) Get bookings + const bookings = await RecurringRequestedBooking.find({ customerId }) + .sort({ createdAt: -1 }) + .lean(); + + if (!bookings.length) { + return reply.send({ + status_code: 200, + message: `Orders for customer ${customerId} fetched successfully`, + data: [], + }); + } + + // 2) Collect unique supplierIds from requested_suppliers + const supplierIdSet = new Set(); + for (const b of bookings) { + const rs = Array.isArray(b.requested_suppliers) ? b.requested_suppliers : []; + for (const s of rs) { + if (s && typeof s.supplierId === "string" && s.supplierId.trim() && s.supplierId !== "string") { + supplierIdSet.add(s.supplierId.trim()); + } + } + } + const supplierIds = Array.from(supplierIdSet); + + // 3) Fetch suppliers and index by supplierId + const suppliers = supplierIds.length + ? await Supplier.find({ supplierId: { $in: supplierIds } }) + .select("-__v") // tweak projection as you like + .lean() + : []; + + const supplierById = new Map(); + for (const s of suppliers) { + // Key is Supplier.supplierId (string), not _id + supplierById.set(s.supplierId, s); + } + + // 4) Attach supplier details into each requested_suppliers entry + const data = bookings.map((b) => { + const rs = Array.isArray(b.requested_suppliers) ? b.requested_suppliers : []; + const enriched = rs.map((item) => ({ + ...item, + supplier: supplierById.get(item?.supplierId) || null, // attach or null if not found + })); + return { + ...b, + requested_suppliers: enriched, + }; + }); + + return reply.send({ + status_code: 200, + message: `Orders for customer ${customerId} fetched successfully`, + data, + }); + } catch (err) { + console.error(err); + throw boom.boomify(err); + } +}; + + + + +const mongoose = require('mongoose'); + + + exports.acceptRequestedBooking = async (req, reply) => { + const { supplierId } = req.params; + const { action, _id } = req.body; + + if (!["accept", "reject"].includes(action)) { + return reply.code(400).send({ message: "Invalid action. Must be 'accept' or 'reject'." }); + } + + try { + const requestedBooking = await RequestedBooking.findOne({ + _id: new mongoose.Types.ObjectId(_id), + 'requested_suppliers.supplierId': supplierId + }); + + if (!requestedBooking) { + return reply.code(404).send({ message: "No matching request for given ID and supplier" }); + } + + const matchedSupplier = requestedBooking.requested_suppliers.find(s => s.supplierId === supplierId); + if (!matchedSupplier) { + return reply.code(404).send({ message: "Supplier not found in requested_suppliers array" }); + } + + if (action === "reject") { + matchedSupplier.status = "rejected_by_user"; + await requestedBooking.save(); + + return reply.code(200).send({ + status_code: 200, + message: "Supplier request rejected by user", + data: requestedBooking + }); + } + + // Accept path + requestedBooking.status = 'accepted'; + await requestedBooking.save(); + + const customer = await User.findOne({ customerId: requestedBooking.customerId }).lean(); + if (!customer) return reply.code(404).send({ message: "Customer not found" }); + + const supplier = await Supplier.findOne({ supplierId }).lean(); + if (!supplier) return reply.code(404).send({ message: "Supplier not found" }); + + if (!matchedSupplier.quoted_amount) { + return reply.code(400).send({ message: "Quoted amount missing for this supplier" }); + } +requestedBooking.requested_suppliers = requestedBooking.requested_suppliers.filter( + s => s.supplierId !== supplierId +); + +// ✅ Optional: Mark booking as fully processed if no more suppliers remain +if (requestedBooking.requested_suppliers.length === 0) { + requestedBooking.status = 'processed'; +} + +await requestedBooking.save(); + +// Format: ARM + YYYYMMDD + random digit (0–9) +const today = new Date(); +const datePart = today.toISOString().slice(0, 10).replace(/-/g, ''); // YYYYMMDD +const randomDigit = Math.floor(Math.random() * 10); // 0–9 +const bookingId = `ARM${datePart}${randomDigit}`; +const amount_due = matchedSupplier.quoted_amount-matchedSupplier.advance_paid + + const newBooking = new Tankerbooking({ + bookingid: bookingId, + customerId: customer.customerId, + customerName: customer.profile.firstName, + customerPhone: customer.phone, + address: customer.address1, + latitude: customer.latitude, + longitude: customer.longitude, + + supplierId: supplier.supplierId, + supplierName: supplier.suppliername, + supplierPhone: supplier.phone, + supplierAddress: customer.address, + amount_paid: String(matchedSupplier.advance_paid), + amount_due: String(amount_due), + advance_reference_number: matchedSupplier.advance_ref_number, + type_of_water: requestedBooking.type_of_water, + capacity: requestedBooking.capacity, + quantity: requestedBooking.quantity, + total_required_capacity: requestedBooking.total_required_capacity, + expectedDateOfDelivery: requestedBooking.date, + time: requestedBooking.time, + price: matchedSupplier.quoted_amount, + + status: 'pending' + }); + + await newBooking.save(); + + reply.code(200).send({ + status_code: 200, + message: "Booking accepted and moved to tanker bookings", + data: newBooking + }); + + } catch (err) { + console.error(err); + throw boom.internal("Failed to handle booking action", err); + } +}; + + +exports.getordersofcustomer = async (req, reply) => { + try { + const customerId = req.params.customerId; + + // Find the specific tank + const mainTank = await Tankerbooking.find({ + + customerId: customerId, + + }); + + if (!mainTank) { + return reply.send({ status_code: 404, error: "Main tank not found" }); + } + + // Send the found tank within a list + reply.send({ status_code: 200, data: [mainTank] }); + } catch (err) { + throw boom.boomify(err); + } +}; + + + +exports.getallsuppliers = async (req, reply) => { + try { + // Find the specific tank + const suppliers = await Supplier.find({ + }); + if (!suppliers) { + return reply.send({ status_code: 404, error: "suppliers not found" }); + } + // Send the found tank within a list + reply.send({ status_code: 200, data: [suppliers] }); + } catch (err) { + throw boom.boomify(err); + } +}; + + +exports.estimationsget = async (req, reply) => { + try { + // Find the specific tank + const estimations = await EstimationOrder.find({customerId:req.params.customerId + }); + if (!estimations) { + return reply.send({ status_code: 404, error: "estimations not found" }); + } + // Send the found tank within a list + reply.send({ status_code: 200, data: [estimations] }); + } catch (err) { + throw boom.boomify(err); + } +}; + + + +exports.updatePaymentForBooking = async (req, reply) => { + try { + const { bookingid } = req.params; + const { payment_mode, payment_reference_number } = req.body; + + if (!bookingid) { + return reply.code(400).send({ + status_code: 400, + message: 'bookingid (param) is required', + }); + } + + if (typeof payment_mode === 'undefined' && typeof payment_reference_number === 'undefined') { + return reply.code(400).send({ + status_code: 400, + message: 'At least one of payment_mode or payment_reference_number must be provided in body', + }); + } + + const update = {}; + if (typeof payment_mode !== 'undefined') update.payment_mode = payment_mode; + if (typeof payment_reference_number !== 'undefined') update.payment_reference_number = payment_reference_number; + + const updated = await Tankerbooking.findOneAndUpdate( + { bookingid }, + { $set: update }, + { new: true, runValidators: true } + ).lean(); + + if (!updated) { + return reply.code(404).send({ + status_code: 404, + message: `No booking found with bookingid ${bookingid}`, + }); + } + + return reply.code(200).send({ + status_code: 200, + message: 'Payment info updated successfully', + data: updated, + }); + } catch (err) { + console.error('updatePaymentForBooking error:', err); + // keep using boom as you used earlier + throw boom.boomify(err); + } +}; + + +// controllers/recurringRequestedBookingController.js + +exports.updateQuotedAmountForSupplier = async (req, reply) => { + try { + const { _id } = req.params; + const { supplierId, amount } = req.body; + + if (!_id) { + return reply.code(400).send({ status_code: 400, message: '_id (param) is required' }); + } + if (!supplierId) { + return reply.code(400).send({ status_code: 400, message: 'supplierId (body) is required' }); + } + if (typeof amount === 'undefined' || amount === null || amount === '') { + return reply.code(400).send({ status_code: 400, message: 'amount (body) is required' }); + } + + // convert amount to number if possible + const numericAmount = Number(amount); + if (Number.isNaN(numericAmount)) { + return reply.code(400).send({ status_code: 400, message: 'amount must be a valid number' }); + } + + // Atomic update using positional $ operator + const filter = { _id, 'requested_suppliers.supplierId': supplierId }; + const update = { $set: { 'requested_suppliers.$.quoted_amount': numericAmount } }; + + const updated = await RequestedBooking.findOneAndUpdate(filter, update, { + new: true, + runValidators: true, + }).lean(); + + if (!updated) { + // either booking _id not found OR supplierId not found inside requested_suppliers + // let's check which one + const bookingExists = await RequestedBooking.findById(_id).lean(); + if (!bookingExists) { + return reply.code(404).send({ status_code: 404, message: `Booking with _id ${_id} not found` }); + } + + // booking exists but supplier entry missing + return reply.code(404).send({ + status_code: 404, + message: `Supplier ${supplierId} not found in requested_suppliers for booking ${_id}`, + }); + } + + return reply.code(200).send({ + status_code: 200, + message: `quoted_amount updated for supplier ${supplierId}`, + data: updated, + }); + } catch (err) { + console.error('updateQuotedAmountForSupplier error:', err); + throw boom.boomify(err); + } +}; + +exports.updatestatusForSupplier = async (req, reply) => { + try { + const { _id } = req.params; + const { supplierId, status } = req.body; + + if (!_id) { + return reply.code(400).send({ status_code: 400, message: '_id (param) is required' }); + } + if (!supplierId) { + return reply.code(400).send({ status_code: 400, message: 'supplierId (body) is required' }); + } + if (typeof status === 'undefined' || status === null || String(status).trim() === '') { + return reply.code(400).send({ status_code: 400, message: 'status (body) is required' }); + } + + // Map short keywords to the stored values + let statusToSave = String(status).trim().toLowerCase(); + if (statusToSave === 'accept') statusToSave = 'accepted_by_user'; + else if (statusToSave === 'reject') statusToSave = 'rejected_by_user'; + // otherwise keep the original (but normalized) value + + // Atomic update using positional $ operator + const filter = { _id, 'requested_suppliers.supplierId': supplierId }; + const update = { $set: { 'requested_suppliers.$.status': statusToSave } }; + + const updated = await RequestedBooking.findOneAndUpdate(filter, update, { + new: true, + runValidators: true, + }).lean(); + + if (!updated) { + // either booking _id not found OR supplierId not found inside requested_suppliers + const bookingExists = await RequestedBooking.findById(_id).lean(); + if (!bookingExists) { + return reply.code(404).send({ status_code: 404, message: `Booking with _id ${_id} not found` }); + } + + // booking exists but supplier entry missing + return reply.code(404).send({ + status_code: 404, + message: `Supplier ${supplierId} not found in requested_suppliers for booking ${_id}`, + }); + } + + return reply.code(200).send({ + status_code: 200, + message: `status updated for supplier ${supplierId}`, + data: updated, + }); + } catch (err) { + console.error('updatestatusForSupplier error:', err); + throw boom.boomify(err); + } +}; + + +exports.updateadvanceForSupplier = async (req, reply) => { + try { + const { _id } = req.params; + const { supplierId, advance_paid,advance_ref_number } = req.body; + + if (!_id) { + return reply.code(400).send({ status_code: 400, message: '_id (param) is required' }); + } + if (!supplierId) { + return reply.code(400).send({ status_code: 400, message: 'supplierId (body) is required' }); + } + + + // Map short keywords to the stored values + + + // otherwise keep the original (but normalized) value + + // Atomic update using positional $ operator + const filter = { _id, 'requested_suppliers.supplierId': supplierId }; + const update = { $set: { 'requested_suppliers.$.advance_paid': advance_paid,'requested_suppliers.$.advance_ref_number': advance_ref_number } }; + + const updated = await RequestedBooking.findOneAndUpdate(filter, update, { + new: true, + runValidators: true, + }).lean(); + + if (!updated) { + // either booking _id not found OR supplierId not found inside requested_suppliers + const bookingExists = await RequestedBooking.findById(_id).lean(); + if (!bookingExists) { + return reply.code(404).send({ status_code: 404, message: `Booking with _id ${_id} not found` }); + } + + // booking exists but supplier entry missing + return reply.code(404).send({ + status_code: 404, + message: `Supplier ${supplierId} not found in requested_suppliers for booking ${_id}`, + }); + } + + return reply.code(200).send({ + status_code: 200, + message: `status updated for supplier ${supplierId}`, + data: updated, + }); + } catch (err) { + console.error('updatestatusForSupplier error:', err); + throw boom.boomify(err); + } +}; + + +exports.splitBookingForSupplier = async (req, reply) => { + try { + const { id } = req.params; + const { splits } = req.body; + + if (!Array.isArray(splits) || splits.length === 0) { + return reply.code(400).send({ status_code: 400, message: "splits array is required" }); + } + + // 1) Find the existing booking + const existing = await RequestedBooking.findById(id); + if (!existing) { + return reply.code(404).send({ status_code: 404, message: "Booking not found" }); + } + + // 2) Update the original booking with the first split + const first = splits[0]; + existing.capacity = `${first.capacity} L`; + existing.quantity = "1"; + existing.total_required_capacity = first.capacity; + if (first.date) existing.date = first.date; + if (first.time) existing.time = first.time; + if (first.quoted_amount) { + existing.requested_suppliers[0].quoted_amount = first.quoted_amount; + } + await existing.save(); + + // 3) Create new bookings for remaining splits + const newBookings = []; + for (let i = 1; i < splits.length; i++) { + const s = splits[i]; + const newBooking = new RequestedBooking({ + status: "pending", + customerId: existing.customerId, + type_of_water: existing.type_of_water, + capacity: `${s.capacity} L`, + quantity: "1", + total_required_capacity: s.capacity, + date: s.date || existing.date, + time: s.time || existing.time, + requested_suppliers: [ + { + time: new Date().toISOString().slice(0, 16).replace("T", " "), + status: "pending", + supplierId: existing.requested_suppliers[0].supplierId, + quoted_amount: s.quoted_amount || existing.requested_suppliers[0].quoted_amount, + }, + ], + }); + newBookings.push(newBooking); + } + + if (newBookings.length > 0) { + await RequestedBooking.insertMany(newBookings); + } + + return reply.code(200).send({ + status_code: 200, + message: `${splits.length} booking(s) created/updated successfully`, + updated: existing, + newEntries: newBookings, + }); + } catch (err) { + console.error("splitBookingForSupplier error:", err); + return reply.code(500).send({ status_code: 500, message: "Internal Server Error" }); + } +}; + + diff --git a/src/handlers/supplierHandler.js b/src/handlers/supplierHandler.js index 7d3cf100..3e3fe640 100644 --- a/src/handlers/supplierHandler.js +++ b/src/handlers/supplierHandler.js @@ -1,6 +1,7 @@ //Get the data models const { Supplier, DeliveryBoy, profilePictureSupplier } = require("../models/supplier"); -const { FriendRequest } = require("../models/supplier"); +const { FriendRequest,RequestedBooking,RecurringRequestedBooking } = require("../models/supplier"); +const { Tanker,Tankerbooking } = require("../models/tankers"); const { ProfilePicture, User } = require("../models/User"); const supplierController = require("../controllers/supplierController"); const customJwtAuth = require("../customAuthJwt"); @@ -938,44 +939,300 @@ exports.getCurrentSupplier = async (req, reply) => { // } // }; +// "10-09-2025" or "10/09/2025" -> "10-Sep-2025"; if already "10-Sep-2025", returns as-is +const toDDMonYYYY = (s) => { + if (!s) return null; + const onlyDate = String(s).trim().split(/\s+/)[0]; + if (/^\d{2}-[A-Za-z]{3}-\d{4}$/.test(onlyDate)) return onlyDate; // already DD-Mon-YYYY + const parts = onlyDate.includes("/") ? onlyDate.split("/") : onlyDate.split("-"); + if (parts.length !== 3) return onlyDate; + const [dd, mm, yyyy] = parts; + const MON = ["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]; + const mon = MON[Math.max(0, Math.min(11, parseInt(mm,10)-1))]; + return `${dd.padStart(2,"0")}-${mon}-${String(yyyy).padStart(4,"0")}`; +}; + +// Normalise capacity strings like "10,000 L" / "10000" / 100 -> 100 (number) +const numify = (v) => { + if (v == null) return 0; + const n = parseFloat(String(v).replace(/[^\d.]/g, "")); + return Number.isFinite(n) ? n : 0; +}; exports.getSuppliers = async (req, reply) => { - const limit = parseInt(req.query.limit) || 100; - const page = parseInt(req.query.page) || 1; - const startindex = (page - 1) * limit; - const customerId = req.params.customerId; // Assuming you have already authenticated the user and stored their ID in the request object + const customerId = req.params.customerId; + + const { + type_of_water, + capacity: requestedCapacityStr, + quantity: requestedQuantityStr, + date, + time, + price_from, + price_to, + radius_from, + radius_to, + rating_from, + rating_to + } = req.body; + + const parseCapacity = (value) => parseFloat((value || "0").toString().replace(/,/g, "")); + const parsePrice = (value) => parseInt((value || "0").toString().replace(/,/g, "")); + + const requestedCapacity = parseCapacity(requestedCapacityStr); + const requestedQuantity = parseInt(requestedQuantityStr || "0"); + const totalRequiredCapacity = requestedCapacity * requestedQuantity; + + const priceFrom = parsePrice(price_from); + const priceTo = parsePrice(price_to); + try { - const friendRequests = await FriendRequest.find({ customerId }); - const supplierIdsToExclude = friendRequests.map( - (request) => request.supplierId + const customerData = await User.findOne({ customerId }); + const favorateSuppliers = customerData?.favorate_suppliers || []; + + const tankerBookings = await Tankerbooking.find({ date }); + const bookedTankerSet = new Set( + tankerBookings.map(booking => `${booking.supplierId}_${booking.tankerName}`) ); - const suppliers = await Supplier.find({ supplierId: { $nin: supplierIdsToExclude } }) - .limit(limit) - .skip(startindex) - .exec(); - const supplierIds = suppliers.map((supplier) => supplier.supplierId); - const profilePictures = await profilePictureSupplier.find({ - supplierId: { $in: supplierIds }, - }).exec(); + const tankerQuery = {}; - const data = suppliers.map((supplier) => { - const profilePicture = profilePictures.find( - (picture) => picture.supplierId === supplier.supplierId + if (type_of_water && type_of_water.trim() !== "") { + tankerQuery.typeofwater = type_of_water; + } + + let tankers = await Tanker.find(tankerQuery); + +const isValidPrice = (val) => { + const num = parseInt(val); + return !isNaN(num) && isFinite(num); +}; + +if (isValidPrice(price_from) && isValidPrice(price_to)) { + tankers = tankers.filter(tanker => { + const tankerPrice = parsePrice(tanker.price); + return tankerPrice >= priceFrom && tankerPrice <= priceTo; + }); +} + + tankers = tankers.filter(tanker => { + const key = `${tanker.supplierId}_${tanker.tankerName}`; + return !bookedTankerSet.has(key); + }); + + const supplierTankerMap = {}; + for (let tanker of tankers) { + if (!supplierTankerMap[tanker.supplierId]) { + supplierTankerMap[tanker.supplierId] = []; + } + supplierTankerMap[tanker.supplierId].push(tanker); + } + + const qualifiedSuppliers = []; + + for (let [supplierId, supplierTankers] of Object.entries(supplierTankerMap)) { + const totalAvailableCapacity = supplierTankers.reduce( + (sum, t) => sum + parseCapacity(t.capacity), + 0 ); - return { - ...supplier.toObject(), - picture: profilePicture ? profilePicture.picture : null, - }; + + if (requestedCapacity > 0 && requestedQuantity > 0) { + if (totalAvailableCapacity < totalRequiredCapacity) { + continue; + } + } + + qualifiedSuppliers.push({ supplierId, tankers: supplierTankers }); + } + + const suppliers = []; + + for (let supplierObj of qualifiedSuppliers) { + const supplierData = await Supplier.findOne({ supplierId: supplierObj.supplierId }); + + const friendRequest = await FriendRequest.findOne({ + customerId: customerId, + supplierId: supplierObj.supplierId + }); + + const isConnected = friendRequest && friendRequest.status === "accepted"; + const isFavorite = favorateSuppliers.includes(supplierObj.supplierId); +// Normalize inputs for matching RequestedBooking +// --- compute these once per request (you can move them above the loop too) --- +// --- normalize once per request (you can move these above the loop if you like) --- +// --- helpers (put once near your other helpers) --- +const escapeRegExp = (s) => String(s || "").replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + +// normalise inputs once +const targetDate = toDDMonYYYY((date || "").trim()); // e.g. "10-Sep-2025" +const reqCapNum = numify(requestedCapacityStr); // "100" -> 100 +const reqQtyNum = parseInt((requestedQuantityStr || "0"), 10); + +let requestedBooking = { status: false }; + +// be defensive about supplierId +const supId = String(supplierObj.supplierId || "").trim(); +// case-insensitive exact match for supplierId +const supIdRegex = new RegExp(`^${escapeRegExp(supId)}$`, "i"); + +// 1) fetch *all* candidates with same customer + date + supplierId +const rbDocs = await RequestedBooking.find( + { + customerId, + date: targetDate, + "requested_suppliers.supplierId": { $regex: supIdRegex } + }, + { + capacity: 1, + quantity: 1, + time: 1, + requested_suppliers: 1 + } +).lean(); + +// 2) scan docs and confirm cap/qty numerically +for (const rb of (rbDocs || [])) { + const dbCapNum = numify(rb.capacity); + const dbQtyNum = numify(rb.quantity); // quantity may be string + + if (dbCapNum === reqCapNum && dbQtyNum === reqQtyNum) { + // pick the exact supplier subdoc (there can be several) + const supSub = (rb.requested_suppliers || []).find( + s => String(s.supplierId || "").trim().toLowerCase() === supId.toLowerCase() + ); + + requestedBooking = { + status: true, + time: supSub?.time || rb.time || null + }; + break; // we found a match, no need to continue + } +} + +// Optional: quick debug if still false +// if (!requestedBooking.status) { +// console.log('[DBG requestedBooking miss]', { customerId, targetDate, supId, reqCapNum, reqQtyNum, rbDocs }); +// } + + + suppliers.push({ + supplier: supplierData, + tankers: supplierObj.tankers, + isConnected: isConnected, + isFavorite: isFavorite, + requestedBooking: requestedBooking + }); + } + + reply.send({ + status_code: 200, + suppliers }); - reply.send({ status_code: 200, data, count: data.length }); } catch (err) { - throw boom.boomify(err); + console.error(err); + reply.send({ + status_code: 500, + message: "Something went wrong", + error: err.message + }); + } +}; + + +// GET SUPPLIERS (simple): only needs customerId; no tanker checks +exports.getSupplierswithoutbooking = async (req, reply) => { + try { + const { customerId } = req.params; + + // 1) Load customer to read favorites + const customer = await User.findOne({ customerId }, { favorate_suppliers: 1, _id: 0 }); + if (!customer) { + return reply.code(404).send({ status_code: 404, message: "Customer not found" }); + } + const favoriteSet = new Set(customer.favorate_suppliers || []); + + // 2) Load all suppliers + const suppliers = await Supplier.find({}); // add projection if you want to slim payload + + // 3) Find accepted connections for this customer across ALL suppliers in one go + const supplierIds = suppliers.map(s => s.supplierId).filter(Boolean); + const acceptedReqs = await FriendRequest.find( + { customerId, supplierId: { $in: supplierIds }, status: "accepted" }, + { supplierId: 1, _id: 0 } + ); + const connectedSet = new Set(acceptedReqs.map(r => r.supplierId)); + + // 4) Build response + const result = suppliers.map(s => ({ + supplier: s, + isFavorite: favoriteSet.has(s.supplierId), + isConnected: connectedSet.has(s.supplierId), + })); + + return reply.send({ status_code: 200, suppliers: result }); + } catch (err) { + console.error(err); + return reply.code(500).send({ + status_code: 500, + message: "Something went wrong", + error: err.message, + }); } }; + +exports.createRequestedBooking = async (req, reply) => { + const { + customerId, + type_of_water, + capacity, + quantity, + date, + time, + requested_suppliers + } = req.body; + + const parseCapacity = (value) => parseFloat((value || "0").toString().replace(/,/g, "")); + const requestedCapacity = parseCapacity(capacity); + const requestedQuantity = parseInt(quantity || "0"); + const totalRequiredCapacity = requestedCapacity * requestedQuantity; + + try { + const requestedBooking = new RequestedBooking({ + customerId, + type_of_water, + capacity, + quantity, + total_required_capacity: totalRequiredCapacity, + date, + time, + requested_suppliers, // ✅ already contains supplierId, quoted_amount, custom_field + status: "pending" + }); + + await requestedBooking.save(); + + reply.send({ + status_code: 200, + message: "Requested booking created successfully", + data: requestedBooking + }); + + } catch (err) { + console.error(err); + reply.send({ + status_code: 500, + message: "Something went wrong while saving", + error: err.message + }); + } +} + + + + // Get single user by ID exports.getSingleSupplier = async (req, reply) => { try { @@ -1021,17 +1278,22 @@ exports.getConnectedSuppliers = async (req, reply) => { const limit = parseInt(req.query.limit) || 100; const page = parseInt(req.query.page) || 1; const startindex = (page - 1) * limit; - const customerId = req.params.customerId; // Assuming you have already authenticated the user and stored their ID in the request object + const customerId = req.params.customerId; + try { + // Get user's favorite suppliers + const user = await User.findOne({ customerId }, 'favorate_suppliers'); + const favorateSuppliers = user?.favorate_suppliers || []; + + // Get accepted friend requests const friendRequests = await FriendRequest.find({ customerId, status: "accepted", }); - const supplierIdsToInclude = friendRequests.map( - (request) => request.supplierId - ); + const supplierIdsToInclude = friendRequests.map(req => req.supplierId); + // Get suppliers const suppliers = await Supplier.find({ supplierId: { $in: supplierIdsToInclude } }) @@ -1039,19 +1301,24 @@ exports.getConnectedSuppliers = async (req, reply) => { .skip(startindex) .exec(); - const supplierIds = suppliers.map((supplier) => supplier.supplierId); + const supplierIds = suppliers.map(s => s.supplierId); + // Get profile pictures const profilePictures = await profilePictureSupplier.find({ supplierId: { $in: supplierIds } - }).exec(); + }); + // Construct final response const data = suppliers.map((supplier) => { const profilePicture = profilePictures.find( - (picture) => picture.supplierId === supplier.supplierId + (pic) => pic.supplierId === supplier.supplierId ); + const isFavorate = favorateSuppliers.includes(supplier.supplierId); + return { ...supplier.toObject(), picture: profilePicture ? profilePicture.picture : null, + favorate: isFavorate, }; }); @@ -1714,3 +1981,563 @@ fastify.get('/api/users/profile-picture-supplier/:supplierId', async (req, res) res.status(500).send({ error: error.message }); } }); + + + +exports.getSuppliersForPlanSearch = async (req, reply) => { + const { customerId } = req.params; + const { + type_of_water, + capacity: requestedCapacityStr, + quantity: requestedQuantityStr, + frequency, start_date, end_date, // currently not used to filter suppliers + + // new filters + radius_from, radius_to, + rating_from, rating_to, + price_from, price_to, + pump + } = req.body; + + // ---- helpers (kept inside as you prefer) ---- + const parseFloatSafe = (v) => { + const n = parseFloat((v ?? "").toString().replace(/,/g, "")); + return Number.isFinite(n) ? n : NaN; + }; + const parseIntSafe = (v) => { + const n = parseInt((v ?? "").toString().replace(/,/g, ""), 10); + return Number.isFinite(n) ? n : NaN; + }; + const isValid = (n) => Number.isFinite(n); + const inRange = (n, from, to) => + (!isValid(from) || n >= from) && (!isValid(to) || n <= to); + + const normalizePump = (val) => { + if (val == null) return undefined; + const s = String(val).trim().toLowerCase(); + if (["1","true","yes","y"].includes(s)) return true; + if (["0","false","no","n"].includes(s)) return false; + return undefined; // ignore if unknown + }; + + const parseLatLng = (raw) => { + // supports: "17.38,78.49" | {lat: 17.38, lng: 78.49} | [17.38, 78.49] + if (!raw) return null; + try { + if (typeof raw === "string") { + const parts = raw.split(",").map(x => parseFloat(x.trim())); + if (parts.length === 2 && parts.every(Number.isFinite)) return { lat: parts[0], lng: parts[1] }; + // try JSON + const j = JSON.parse(raw); + return parseLatLng(j); + } + if (Array.isArray(raw) && raw.length === 2) { + const [lat, lng] = raw.map(Number); + if (Number.isFinite(lat) && Number.isFinite(lng)) return { lat, lng }; + } + if (typeof raw === "object" && raw !== null) { + const lat = parseFloat(raw.lat ?? raw.latitude); + const lng = parseFloat(raw.lng ?? raw.lon ?? raw.longitude); + if (Number.isFinite(lat) && Number.isFinite(lng)) return { lat, lng }; + } + } catch (_) {} + return null; + }; + + const haversineKm = (a, b) => { + const R = 6371; + const dLat = (b.lat - a.lat) * Math.PI / 180; + const dLng = (b.lng - a.lng) * Math.PI / 180; + const s1 = Math.sin(dLat/2) ** 2; + const s2 = Math.cos(a.lat*Math.PI/180) * Math.cos(b.lat*Math.PI/180) * Math.sin(dLng/2) ** 2; + return 2 * R * Math.asin(Math.sqrt(s1 + s2)); + }; + + const getSupplierRating = (s) => { + // adapt to whatever field you actually store + const cands = [s.rating, s.avgRating, s.averageRating, s.overallRating]; + const n = cands.find(x => Number.isFinite(Number(x))); + return Number(n ?? NaN); + }; + // ---- end helpers ---- + + // parse inputs + const requestedCapacity = parseFloatSafe(requestedCapacityStr) || 0; + const requestedQuantity = parseIntSafe(requestedQuantityStr) || 0; + const totalRequiredCapacity = requestedCapacity * requestedQuantity; + + const priceFrom = parseIntSafe(price_from); + const priceTo = parseIntSafe(price_to); + const ratingFrom = parseFloatSafe(rating_from); + const ratingTo = parseFloatSafe(rating_to); + const radiusFrom = parseFloatSafe(radius_from); + const radiusTo = parseFloatSafe(radius_to); + const pumpWanted = normalizePump(pump); + + try { + // favorites + customer coords (for radius) + const customer = await User.findOne({ customerId }, { favorate_suppliers: 1, googleLocation: 1, location: 1 }).lean(); + const favoriteSet = new Set(customer?.favorate_suppliers || []); + const customerCoords = + parseLatLng(customer?.googleLocation) || + parseLatLng(customer?.location); + + // 1) Tankers base query: by type_of_water (+ pump if requested) + const tankerQuery = {}; + if (type_of_water?.trim()) tankerQuery.typeofwater = type_of_water.trim(); + if (pumpWanted !== undefined) { + // try to match common representations + tankerQuery.$or = [ + { pump: pumpWanted ? { $in: [true, "1", "yes", "true", 1, "Y", "y"] } : { $in: [false, "0", "no", "false", 0, "N", "n"] } }, + { pumpAvailable: pumpWanted } // if you store as boolean + ]; + } + + let tankers = await Tanker.find(tankerQuery).lean(); + + // 2) Price range on tanker.price + if (isValid(priceFrom) || isValid(priceTo)) { + tankers = tankers.filter(t => { + const p = parseIntSafe(t.price); + return isValid(p) && inRange(p, priceFrom, priceTo); + }); + } + + // 3) Group by supplier + const supplierTankerMap = {}; + for (const t of tankers) { + if (!t?.supplierId) continue; + (supplierTankerMap[t.supplierId] ||= []).push(t); + } + + // 4) Capacity qualification + let qualified = []; + for (const [supplierId, supplierTankers] of Object.entries(supplierTankerMap)) { + const totalAvail = supplierTankers.reduce((sum, tt) => sum + (parseFloatSafe(tt.capacity) || 0), 0); + if (requestedCapacity > 0 && requestedQuantity > 0 && totalAvail < totalRequiredCapacity) continue; + qualified.push({ supplierId, tankers: supplierTankers }); + } + + // 5) Fetch suppliers for remaining filters (rating & radius) + flags + const supplierIds = qualified.map(q => q.supplierId); + const [suppliersData, acceptedReqs] = await Promise.all([ + Supplier.find({ supplierId: { $in: supplierIds } }).lean(), + FriendRequest.find( + { customerId, supplierId: { $in: supplierIds }, status: "accepted" }, + { supplierId: 1, _id: 0 } + ).lean() + ]); + + // Build quick lookup + const supplierById = new Map(suppliersData.map(s => [s.supplierId, s])); + const connectedSet = new Set(acceptedReqs.map(r => r.supplierId)); + + // 6) Apply rating & radius filters on suppliers + if (isValid(ratingFrom) || isValid(ratingTo) || (isValid(radiusFrom) || isValid(radiusTo))) { + qualified = qualified.filter(q => { + const s = supplierById.get(q.supplierId); + if (!s) return false; + + // rating + if (isValid(ratingFrom) || isValid(ratingTo)) { + const r = getSupplierRating(s); + if (!isValid(r) || !inRange(r, ratingFrom, ratingTo)) return false; + } + + // radius (requires coords on both sides) + if ((isValid(radiusFrom) || isValid(radiusTo)) && customerCoords) { + const supCoords = + parseLatLng(s.googleLocation) || + parseLatLng(s.location) || + parseLatLng(s.addressLocation); + if (!supCoords) return false; + const distKm = haversineKm(customerCoords, supCoords); + if (!inRange(distKm, radiusFrom, radiusTo)) return false; + } + + return true; + }); + } + + // 7) Build response with flags + optional 'requestedBooking' flag + const suppliers = []; + for (const q of qualified) { + const s = supplierById.get(q.supplierId); + if (!s) continue; + + const isConnected = connectedSet.has(q.supplierId); + const isFavorite = favoriteSet.has(q.supplierId); + + // If you want to expose a hint that user has already sent a single-day request earlier + const requestedBookingRecord = await RequestedBooking.findOne({ + customerId, + "requested_suppliers.supplierId": q.supplierId + }, { time: 1 }).lean(); + + suppliers.push({ + supplier: s, + tankers: q.tankers, + isConnected, + isFavorite, + requestedBooking: requestedBookingRecord ? { status: true, time: requestedBookingRecord.time } : { status: false } + }); + } + + return reply.send({ status_code: 200, suppliers }); + } catch (err) { + console.error(err); + return reply.send({ + status_code: 500, + message: "Something went wrong", + error: err.message + }); + } + }; + +// controllers/validationHandler.js (add below the previous handler) + +// exports.createRequestedPlanBooking = async (req, reply) => { +// const { +// customerId, +// type_of_water, +// capacity, +// quantity, +// start_date, +// end_date, +// time, +// frequency, // "daily" | "weekly_once" | "weekly_twice" | "weekly_thrice" | "weekly" +// weekly_count, // used only if frequency === "weekly" +// requested_suppliers +// } = req.body; + +// // helpers inside function (as you prefer) +// const parseCapacity = (v) => parseFloat((v || "0").toString().replace(/,/g, "")) || 0; +// const parseIntSafe = (v) => parseInt((v || "0").toString().replace(/,/g, ""), 10) || 0; +// const toISODate = (d) => d.toISOString().slice(0, 10); +// const mkUTCDate = (yyyy_mm_dd) => { +// const [y, m, d] = (yyyy_mm_dd || "").split("-").map(Number); +// return new Date(Date.UTC(y, (m || 1) - 1, d || 1)); +// }; +// const normalizeWeeklyCount = (freq, wc) => { +// if (freq === "weekly_once") return 1; +// if (freq === "weekly_twice") return 2; +// if (freq === "weekly_thrice") return 3; +// if (freq === "weekly") return wc || 1; +// return 1; +// }; +// const computeWeeklyDOWs = ({ anchorDow, weeklyCount }) => { +// if (weeklyCount === 1) return [anchorDow]; +// if (weeklyCount === 2) return [anchorDow, (anchorDow + 3) % 7]; +// if (weeklyCount === 3) return [anchorDow, (anchorDow + 2) % 7, (anchorDow + 4) % 7]; +// return [anchorDow]; +// }; +// const generateDates = ({ frequency, start_date, end_date, weekly_count }) => { +// const start = mkUTCDate(start_date); +// const end = mkUTCDate(end_date); +// if (isNaN(start) || isNaN(end)) throw new Error("Invalid start_date or end_date"); +// if (end < start) throw new Error("end_date must be after or equal to start_date"); + +// // ~3 months cap +// const maxMs = 92 * 24 * 60 * 60 * 1000; +// if ((end - start) > maxMs) throw new Error("Range exceeds 3 months"); + +// const out = []; +// if (frequency === "daily") { +// for (let d = new Date(start); d <= end; d.setUTCDate(d.getUTCDate() + 1)) { +// out.push(toISODate(d)); +// } +// return out; +// } + +// if (frequency.startsWith("weekly") || frequency === "weekly") { +// const wc = normalizeWeeklyCount(frequency, weekly_count); +// const dows = computeWeeklyDOWs({ anchorDow: start.getUTCDay(), weeklyCount: wc }); +// const set = new Set(dows); +// for (let d = new Date(start); d <= end; d.setUTCDate(d.getUTCDate() + 1)) { +// if (set.has(d.getUTCDay())) out.push(toISODate(d)); +// } +// return out; +// } + +// throw new Error("Unsupported frequency"); +// }; + +// try { +// if (!customerId || !type_of_water || !capacity || !quantity || +// !start_date || !end_date || !time || !frequency || !requested_suppliers) { +// return reply.code(400).send({ +// status_code: 400, +// message: "Missing required fields" +// }); +// } + +// const cap = parseCapacity(capacity); +// const qty = parseIntSafe(quantity); +// const total_required_capacity = cap * qty; + +// const dates = generateDates({ frequency, start_date, end_date, weekly_count }); +// if (dates.length === 0) { +// return reply.code(400).send({ status_code: 400, message: "No dates generated for given inputs" }); +// } + +// const doc = new RecurringRequestedBooking({ +// customerId, +// type_of_water, +// capacity, +// quantity, +// total_required_capacity, +// frequency, +// weekly_count: normalizeWeeklyCount(frequency, weekly_count), +// start_date, +// end_date, +// time, +// dates, +// requested_suppliers, +// status: "pending" +// }); + +// await doc.save(); + +// return reply.send({ +// status_code: 200, +// message: "Plan requested booking created successfully", +// count: dates.length, +// dates, +// data: doc +// }); +// } catch (err) { +// console.error(err); +// return reply.code(500).send({ +// status_code: 500, +// message: "Something went wrong while saving", +// error: err.message +// }); +// } +// }; +// controllers/plan.controller.js +//const RecurringRequestedBooking = require("../models/RecurringRequestedBooking"); + +// ---------- Helpers ---------- +const MONTHS = { + jan: 0, feb: 1, mar: 2, apr: 3, may: 4, jun: 5, + jul: 6, aug: 7, sep: 8, oct: 9, nov: 10, dec: 11, +}; + +const parseCapacity = (v) => parseFloat((v ?? "0").toString().replace(/,/g, "")) || 0; +const parseIntSafe = (v) => parseInt((v ?? "0").toString().replace(/,/g, ""), 10) || 0; +const toISODate = (d) => d.toISOString().slice(0, 10); + +/** + * Accepts: + * - "YYYY-MM-DD" + * - "DD-MMM-YYYY" + * - "DD-MMM-YYYY - HH:mm" (time portion ignored for date gen) + * Returns a Date in UTC midnight of that calendar day. + */ +const mkUTCDate = (input) => { + if (!input || typeof input !== "string") throw new Error("Invalid date string"); + + const s = input.trim(); + + // ISO: YYYY-MM-DD (optionally with time, but we only take the first three parts) + const iso = s.match(/^(\d{4})-(\d{2})-(\d{2})/); + if (iso) { + const y = Number(iso[1]); + const m = Number(iso[2]) - 1; + const d = Number(iso[3]); + return new Date(Date.UTC(y, m, d)); + } + + // D-MMM-YYYY (optional " - HH:mm") + const mmm = s.match(/^(\d{1,2})-([A-Za-z]{3})-(\d{4})(?:\s*-\s*(\d{1,2}):(\d{2}))?$/); + if (mmm) { + const d = Number(mmm[1]); + const mon = MONTHS[mmm[2].toLowerCase()]; + const y = Number(mmm[3]); + if (mon == null) throw new Error("Invalid month abbreviation in date"); + return new Date(Date.UTC(y, mon, d)); + } + + throw new Error("Unsupported date format. Use YYYY-MM-DD or DD-MMM-YYYY (- HH:mm)."); +}; + +const normalizeWeeklyCount = (freq, wc) => { + if (freq === "weekly_once") return 1; + if (freq === "weekly_twice") return 2; + if (freq === "weekly_thrice") return 3; + if (freq === "weekly") return wc || 1; + return 1; +}; + +const computeWeeklyDOWs = ({ anchorDow, weeklyCount }) => { + if (weeklyCount === 1) return [anchorDow]; + if (weeklyCount === 2) return [anchorDow, (anchorDow + 3) % 7]; + if (weeklyCount === 3) return [anchorDow, (anchorDow + 2) % 7, (anchorDow + 4) % 7]; + return [anchorDow]; +}; + +const generateDates = ({ frequency, start_date, end_date, weekly_count }) => { + const start = mkUTCDate(start_date); + const end = mkUTCDate(end_date); + + if (Number.isNaN(start.getTime()) || Number.isNaN(end.getTime())) { + throw new Error("Invalid start_date or end_date"); + } + if (end < start) throw new Error("end_date must be after or equal to start_date"); + + // ~3 months cap + const maxMs = 92 * 24 * 60 * 60 * 1000; + if ((end - start) > maxMs) throw new Error("Range exceeds 3 months"); + + const out = []; + + if (frequency === "daily") { + for (let d = new Date(start); d <= end; d.setUTCDate(d.getUTCDate() + 1)) { + out.push(toISODate(d)); + } + return out; + } + + if (frequency.startsWith("weekly") || frequency === "weekly") { + const wc = normalizeWeeklyCount(frequency, weekly_count); + const dows = computeWeeklyDOWs({ anchorDow: start.getUTCDay(), weeklyCount: wc }); + const set = new Set(dows); + for (let d = new Date(start); d <= end; d.setUTCDate(d.getUTCDate() + 1)) { + if (set.has(d.getUTCDay())) out.push(toISODate(d)); + } + return out; + } + + throw new Error("Unsupported frequency"); +}; + +const ensureRequestedSuppliers = (arr) => { + const inArr = Array.isArray(arr) ? arr : []; + return inArr.map((x) => ({ + supplierId: x?.supplierId ?? "", + quoted_amount: typeof x?.quoted_amount === "number" ? x.quoted_amount : 0, + time: x?.time ?? null, + status: x?.status ?? "pending", + })); +}; + +// ---------- Controller ---------- +exports.createRequestedPlanBooking = async (req, reply) => { + try { + const { + customerId, + type_of_water, + capacity, + quantity, + start_date, + end_date, + time, + frequency, // "daily" | "weekly_once" | "weekly_twice" | "weekly_thrice" | "weekly" + weekly_count, // used only if frequency === "weekly" + requested_suppliers + } = req.body || {}; + + // Basic presence check + const missing = [ + ["customerId", customerId], + ["type_of_water", type_of_water], + ["capacity", capacity], + ["quantity", quantity], + ["start_date", start_date], + ["end_date", end_date], + ["time", time], + ["frequency", frequency], + ["requested_suppliers", requested_suppliers], + ].filter(([k, v]) => v == null || (typeof v === "string" && v.trim() === "")); + + if (missing.length) { + return reply.code(400).send({ + status_code: 400, + message: `Missing required fields: ${missing.map(([k]) => k).join(", ")}` + }); + } + + // Validate frequency early + const ALLOWED_FREQ = new Set(["daily", "weekly_once", "weekly_twice", "weekly_thrice", "weekly"]); + if (!ALLOWED_FREQ.has(frequency)) { + return reply.code(400).send({ + status_code: 400, + message: "Invalid frequency. Allowed: daily, weekly_once, weekly_twice, weekly_thrice, weekly" + }); + } + + // Parse numbers + const cap = parseCapacity(capacity); + const qty = parseIntSafe(quantity); + const total_required_capacity = cap * qty; + + if (cap <= 0 || qty <= 0) { + return reply.code(400).send({ + status_code: 400, + message: "capacity and quantity must be positive numbers" + }); + } + + // Build dates + let dates; + try { + dates = generateDates({ frequency, start_date, end_date, weekly_count }); + } catch (e) { + return reply.code(400).send({ + status_code: 400, + message: e.message || "Invalid dates" + }); + } + + if (!Array.isArray(dates) || dates.length === 0) { + return reply.code(400).send({ + status_code: 400, + message: "No dates generated for the given inputs" + }); + } + + // Suppliers normalization + const suppliers = ensureRequestedSuppliers(requested_suppliers); + if (suppliers.length === 0) { + return reply.code(400).send({ + status_code: 400, + message: "requested_suppliers must contain at least one supplier" + }); + } + + const doc = new RecurringRequestedBooking({ + customerId, + type_of_water, + capacity, + quantity, + total_required_capacity, + frequency, + weekly_count: normalizeWeeklyCount(frequency, weekly_count), + start_date, + end_date, + time, + dates, + requested_suppliers: suppliers, + status: "pending" + }); + + await doc.save(); + + return reply.send({ + status_code: 200, + message: "Plan requested booking created successfully", + count: dates.length, + dates, + data: doc + }); + } catch (err) { + console.error(err); + return reply.code(500).send({ + status_code: 500, + message: "Something went wrong while saving", + error: err.message + }); + } +}; + + diff --git a/src/handlers/userHandler.js b/src/handlers/userHandler.js index eabf6fb1..a8710a36 100644 --- a/src/handlers/userHandler.js +++ b/src/handlers/userHandler.js @@ -3,7 +3,9 @@ const { User,Counter, generateBookingId } = require('../models/User') //const User = require("../models/User"); const Message = require("../models/Message"); const generator = require("generate-password"); -const bcrypt = require("bcrypt"); +//const bcrypt = require("bcrypt"); +const bcrypt = require('bcryptjs'); + const saltRounds = 10; const jwt = require('jsonwebtoken') const JWT_SECRET = 'your-secret-key'; @@ -412,22 +414,39 @@ exports.changePassword = async (req, reply) => { // Check the result of the update operation if (updateResult.nModified > 0) { // Fetch the updated user data (excluding password) - const updatedUser = await User.findOne({ phone }).select('-services.password.bcrypt'); + const updatedUser = await User.findOne({ phone }).select( + "-services.password.bcrypt" + ); // Generate a new token for the user const token = jwt.sign( - { id: updatedUser._id, phone: updatedUser.phone }, // You can include more user details if needed + { id: updatedUser._id, phone: updatedUser.phone }, // Include user details as needed JWT_SECRET, // Use your secret key from environment variables - { expiresIn: '1h' } // Token expiration time + { expiresIn: "1h" } // Token expiration time ); return reply.send({ simplydata: { error: false, - passwordChanged: true, - userData: updatedUser, // Include updated user data - token, // Include the token in the response - message: "Password updated successfully.", + apiversion: "1.0.0", + access_token: token, // Include the token in the response + buildingName: updatedUser.buildingName, + email: updatedUser.emails, + phone: updatedUser.phone, + customerId: updatedUser.customerId, + username: updatedUser.username, + address1: updatedUser.profile.address1, + address2: updatedUser.profile.address2, + phoneVerified: updatedUser.phoneVerified, + oneTimePasswordSetFlag: updatedUser.oneTimePasswordSetFlag, + latitude: updatedUser.latitude, + longitude: updatedUser.longitude, + type: updatedUser.profile.role, + loginType: updatedUser.profile.role[0], + typeasobj: updatedUser.profile.role.reduce((obj, role, index) => { + obj[index] = role; + return obj; + }, {}), }, }); } else { @@ -445,74 +464,75 @@ exports.changePassword = async (req, reply) => { } }; -exports.verifyOldNewPassword = async (req, reply) => { - try { - const { phone, oldPassword, newPassword } = req.body; - - // Check if the user exists with the provided mobile number - const user = await User.findOne({ phone }); - if (!user) { - return reply.send({ - armintatankdata: { - error: true, - code: 10009, - message: "User not found.", - }, - }); - } - - // Verify the old password - const isOldPasswordCorrect = await bcrypt.compare(oldPassword, user.services.password.bcrypt); - if (!isOldPasswordCorrect) { - return reply.send({ - armintatankdata: { - error: true, - code: 10012, - message: "Old password is incorrect.", - }, - }); - } - - // Hash the new password - const hashedNewPassword = await bcrypt.hash(newPassword, 10); // Ensure you use bcrypt.hash here - - // Update the password in the database - const updateResult = await User.updateOne( - { phone }, - { - $set: { - "services.password.bcrypt": hashedNewPassword, - oneTimePasswordSetFlag: false, - }, - } - ); - - // Check if the update was successful - if (updateResult.nModified > 0) { - // Fetch the updated user details to send back in the response - const updatedUser = await User.findOne({ phone }).select('-services.password.bcrypt'); // Exclude the password - return reply.send({ - armintatankdata: { - error: false, - message: "Password changed successfully.", - updatedUser, // Include the updated user details - }, - }); - } else { - return reply.send({ - armintatankdata: { - error: true, - code: 10011, - message: "Failed to update the password. Try again.", - }, - }); - } - } catch (err) { - console.error("Error in changePassword:", err); - throw boom.boomify(err); - } -}; +// exports.verifyOldNewPassword = async (req, reply) => { +// try { +// const { phone, oldPassword, newPassword } = req.body; + +// // Check if the user exists with the provided mobile number +// const user = await User.findOne({ phone }); +// if (!user) { +// return reply.send({ +// armintatankdata: { +// error: true, +// code: 10009, +// message: "User not found.", +// }, +// }); +// } + +// // Verify the old password +// const isOldPasswordCorrect = await bcrypt.compare(oldPassword, user.services.password.bcrypt); +// if (!isOldPasswordCorrect) { +// return reply.send({ +// armintatankdata: { +// error: true, +// code: 10012, +// message: "Old password is incorrect.", +// }, +// }); +// } + +// // Hash the new password +// const hashedNewPassword = await bcrypt.hash(newPassword, 10); // Ensure you use bcrypt.hash here + +// // Update the password in the database +// const updateResult = await User.updateOne( +// { phone }, +// { +// $set: { +// "services.password.bcrypt": hashedNewPassword, +// oneTimePasswordSetFlag: false, +// }, +// } +// ); + +// // Check if the update was successful +// if (updateResult.nModified > 0) { +// // Fetch the updated user details to send back in the response +// const updatedUser = await User.findOne({ phone }).select('-services.password.bcrypt'); // Exclude the password + +// return reply.send({ +// armintatankdata: { +// error: false, +// message: "Password changed successfully.", +// updatedUser, // Include the updated user details +// }, +// }); +// } else { +// return reply.send({ +// armintatankdata: { +// error: true, +// code: 10011, +// message: "Failed to update the password. Try again.", +// }, +// }); +// } +// } catch (err) { +// console.error("Error in changePassword:", err); +// throw boom.boomify(err); +// } +// }; diff --git a/src/index.js b/src/index.js index 19dfed5c..22ec213f 100644 --- a/src/index.js +++ b/src/index.js @@ -6,8 +6,10 @@ const tankersController = require("./controllers/tankersController.js"); const createConnectionController = require("./controllers/createConnectionController"); const storeController = require("./controllers/storeController.js") const boom = require("boom"); -const bcrypt = require('bcrypt'); -const { ProfilePictureStore,generateinstallationId,Store} = require("./models/store"); +//const bcrypt = require('bcrypt'); +const bcrypt = require('bcryptjs'); + +const { ProfilePictureStore,generateinstallationId,Store, Survey, PlumbingWorkPictures, ElectrictyWorkPictures, MaterialRecievedPictures, Support, ManualTestVideo, ProfilePictureInstallTeamMember} = require("./models/store"); const cors = require('fastify-cors'); @@ -27,8 +29,9 @@ const fastify = require("fastify")({ // const Fastify = require("fastify"); fastify.register(cors, { - origin: 'http://localhost:3001', // Allow only your frontend URL + origin: 'http://armintaaqua.com:3000', // Allow only your frontend URL methods: ['GET', 'POST', 'PUT', 'DELETE'], // Allowed HTTP methods + allowedContentTypes: ['application/json', 'multipart/form-data'], }); @@ -166,107 +169,19 @@ fastify.post("/api/login", { properties: { phone: { type: "string" }, password: { type: "string" }, - fcmId: { type: "string" }, // Add this line - deviceId: { type: "string" } // Add this line + fcmIds: { type: "array", items: { type: "string" }, default: [] }, + deviceId: { type: "string" }, }, }, }, async handler(req, reply) { - // Pass fcmId and deviceId to the loginUser function - const { phone, password, fcmId, deviceId } = req.body; - const loginObject = await userController.loginUser(req, fcmId, deviceId); - - if (loginObject.same) { - const phoneVerified = loginObject.user.phoneVerified; - const oneTimePasswordSetFlag = loginObject.user.oneTimePasswordSetFlag; - console.log( - "oneTimePasswordSetFlag is ......", - oneTimePasswordSetFlag, - typeof oneTimePasswordSetFlag, - typeof phoneVerified - ); - if (!phoneVerified) { - reply.send({ - simplydata: { - error: false, - phoneVerified: false, - phone: loginObject.user.phone, - oneTimePasswordSetFlag: oneTimePasswordSetFlag, - message: "Please Verify your phone number", - }, - }); - } else if (oneTimePasswordSetFlag) { - reply.send({ - simplydata: { - error: false, - phoneVerified: phoneVerified, - phone: loginObject.user.phone, - oneTimePasswordSetFlag: true, - message: "Password must be reset", - }, - }); - } else { - const token = fastify.jwt.sign( - { - username: loginObject.user.username, - userId: loginObject.user._id, - roles: loginObject.user.profile.role, - }, - { expiresIn: "30d" } - ); - const arr = loginObject.user.profile.role; - const arrayToString = JSON.stringify(Object.assign({}, arr)); // convert array to string - const stringToJsonObject = JSON.parse(arrayToString); // convert string to json object - const c_id = loginObject.user.customerId; - const profilePicture = await ProfilePicture.findOne({ customerId: c_id }); - - if (!profilePicture) { - reply.send({ - simplydata: { - error: false, - apiversion: fastify.config.APIVERSION, - access_token: token, - buildingName: loginObject.user.buildingName, - email: loginObject.user.emails, - phone: loginObject.user.phone, - customerId: loginObject.user.customerId, - username: loginObject.user.username, - address1: loginObject.user.profile.address1, - address2: loginObject.user.profile.address2, - phoneVerified: loginObject.user.phoneVerified, - oneTimePasswordSetFlag: loginObject.user.oneTimePasswordSetFlag, - latitude: loginObject.user.latitude, - longitude: loginObject.user.longitude, - type: loginObject.user.profile.role, - typeasobj: stringToJsonObject, - }, - }); - } else { - reply.send({ - simplydata: { - error: false, - apiversion: fastify.config.APIVERSION, - access_token: token, - picture: profilePicture.picture, - email: loginObject.user.emails, - phone: loginObject.user.phone, - buildingName: loginObject.user.buildingName, - customerId: loginObject.user.customerId, - username: loginObject.user.username, - address1: loginObject.user.profile.address1, - address2: loginObject.user.profile.address2, - phoneVerified: loginObject.user.phoneVerified, - oneTimePasswordSetFlag: loginObject.user.oneTimePasswordSetFlag, - latitude: loginObject.user.latitude, - longitude: loginObject.user.longitude, - type: loginObject.user.profile.role, - typeasobj: stringToJsonObject, - }, - }); - } - } - } else { - reply.send({ + const { phone, password, fcmIds, deviceId } = req.body; + console.log(password, phone); + + const loginObject = await userController.loginUser(req, fcmIds, deviceId); + console.log("loginObject",loginObject) + if (!loginObject.same) { + return reply.send({ simplydata: { error: true, code: 400, @@ -274,10 +189,212 @@ fastify.post("/api/login", { }, }); } + + const user = loginObject.user; + const phoneVerified = user.phoneVerified; + const oneTimePasswordSetFlag = user.oneTimePasswordSetFlag; + + if (fcmIds.length > 0) { + await User.updateOne( + { customerId: user.customerId }, + { $addToSet: { fcmIds: { $each: fcmIds } } } + ); + } + + if (!phoneVerified) { + return reply.send({ + simplydata: { + error: false, + phoneVerified: false, + phone: loginObject.isStaff ? loginObject.staffMember.phone : user.phone, + oneTimePasswordSetFlag, + message: "Please Verify your phone number", + }, + }); + } + + // if (oneTimePasswordSetFlag) { + // return reply.send({ + // simplydata: { + // error: false, + // phoneVerified, + // phone: loginObject.isStaff ? loginObject.staffMember.phone : user.phone, + // oneTimePasswordSetFlag: true, + // message: "Password must be reset", + // }, + // }); + // } + + const tokenPayload = { + username: loginObject.isStaff ? loginObject.staffMember.name : user.username, + userId: user._id, + roles: user.profile.role, + }; + + const token = fastify.jwt.sign(tokenPayload, { expiresIn: "30d" }); + + const profilePicture = await ProfilePicture.findOne({ customerId: user.customerId }); + const responsePayload = { + simplydata: { + error: false, + apiversion: fastify.config.APIVERSION, + access_token: token, + buildingName: user.buildingName, + email: user.emails, + phone: loginObject.isStaff ? loginObject.staffMember.phone : user.phone, + customerId: user.customerId, + username: loginObject.isStaff ? loginObject.staffMember.name : user.username, + address1: user.profile.address1, + address2: user.profile.address2, + phoneVerified: user.phoneVerified, + oneTimePasswordSetFlag: user.oneTimePasswordSetFlag, + latitude: user.latitude, + longitude: user.longitude, + type: user.profile.role, + loginType: loginObject.isStaff ? "staff" : "user", + }, + }; + + if (loginObject.isStaff) { + let allMotorAccess = loginObject.staffMember.all_motor_access; + + // Normalize the value if it matches the given variations + if (["view", "view only", "View", "View Only"].includes(allMotorAccess)) { + allMotorAccess = "view"; + } + + responsePayload.simplydata.all_motor_access = allMotorAccess; + } + + if (profilePicture) { + responsePayload.simplydata.picture = profilePicture.picture; + } + + reply.send(responsePayload); }, }); +// fastify.post("/api/login", { +// schema: { +// description: "This is for Login User", +// tags: ["Login"], +// summary: "This is for User Login", +// body: { +// type: "object", +// required: ["phone", "password"], +// properties: { +// phone: { type: "string", description: "Registered phone number" }, +// password: { type: "string", description: "Password for authentication" }, +// fcmIds: { type: "array", items: { type: "string" }, default: [] }, +// deviceId: { type: "string" } +// } +// } +// }, +// async handler(req, reply) { +// try { +// const { phone, password, fcmIds = [], deviceId } = req.body; + +// // Find user by phone +// const user = await User.findOne({ phone }); +// console.log("user",user) +// if (!user) { +// return reply.code(400).send({ simplydata: { error: true, message: "User not found" } }); +// } + +// // Verify password (bcrypt) +// const isMatch = await bcrypt.compare(password, user.services.password.bcrypt); +// if (!isMatch) { +// return reply.code(400).send({ simplydata: { error: true, message: "Invalid credentials" } }); +// } + +// // Update FCM Ids if present +// if (fcmIds.length > 0) { +// await User.updateOne( +// { customerId: user.customerId }, +// { $addToSet: { fcmIds: { $each: fcmIds } } } +// ); +// } + +// // Phone Verification +// if (!user.phoneVerified) { +// return reply.send({ +// simplydata: { +// error: false, +// phoneVerified: false, +// phone: user.phone, +// oneTimePasswordSetFlag: user.oneTimePasswordSetFlag, +// message: "Please Verify your phone number" +// } +// }); +// } + +// // Password reset flag +// if (user.oneTimePasswordSetFlag) { +// return reply.send({ +// simplydata: { +// error: false, +// phoneVerified: user.phoneVerified, +// phone: user.phone, +// oneTimePasswordSetFlag: true, +// message: "Password must be reset" +// } +// }); +// } + +// // JWT Token Payload +// const tokenPayload = { +// username: user.username, +// userId: user._id, +// roles: user.profile.role +// }; + +// // JWT Token Generation (matches /api/storelogin style) +// const token = fastify.jwt.sign(tokenPayload, /* no direct secret here, assumes plugin config */{ expiresIn: "30d" }); + +// // Profile Picture +// const profilePicture = await ProfilePicture.findOne({ customerId: user.customerId }); + +// // Response Construction +// const responsePayload = { +// simplydata: { +// error: false, +// message: "Login successful", +// apiversion: fastify.config ? fastify.config.APIVERSION : undefined, +// access_token: token, +// buildingName: user.buildingName, +// email: user.emails, +// phone: user.phone, +// customerId: user.customerId, +// username: user.username, +// address1: user.profile.address1, +// address2: user.profile.address2, +// phoneVerified: user.phoneVerified, +// oneTimePasswordSetFlag: user.oneTimePasswordSetFlag, +// latitude: user.latitude, +// longitude: user.longitude, +// type: user.profile.role, +// loginType: "user" +// } +// }; + +// if (profilePicture) { +// responsePayload.simplydata.picture = profilePicture.picture; +// } + +// return reply.send(responsePayload); + +// } catch (error) { +// console.error("Login Error:", error); +// return reply.code(500).send({ simplydata: { error: true, message: "Internal server error" } }); +// } +// } +// }); + + + + + fastify.post("/api/installotplogin", { schema: { description: "This is for Login Otp Installation", @@ -364,7 +481,7 @@ console.log(user) phoneVerified: user.phoneVerified, oneTimePasswordSetFlag: user.oneTimePasswordSetFlag, type: user.profile.role, - fcmId: user.fcmId, + fcmIds: user.fcmIds, team: user.team, city: user.city, manager: user.manager, @@ -415,105 +532,105 @@ console.log(user) }); -fastify.post("/api/storelogin", { - schema: { - description: "This is for Store Login", - tags: ["Store-Data"], - summary: "This is for Store Login", - body: { - type: "object", - required: ["phone", "password"], - properties: { - phone: { type: "string" }, - password: { type: "string" }, - }, - }, - }, - async handler(request, reply) { - try { - let store = await Store.findOne({ phone: request.body.phone }); - if (!store) { - return reply.code(400).send({ - simplydata: { - error: true, - code: 400, - message: "Invalid Phone or Password", - }, - }); - } +// fastify.post("/api/storelogin", { +// schema: { +// description: "This is for Store Login", +// tags: ["Store-Data"], +// summary: "This is for Store Login", +// body: { +// type: "object", +// required: ["phone", "password"], +// properties: { +// phone: { type: "string" }, +// password: { type: "string" }, +// }, +// }, +// }, +// async handler(request, reply) { +// try { +// let store = await Store.findOne({ phone: request.body.phone }); +// if (!store) { +// return reply.code(400).send({ +// simplydata: { +// error: true, +// code: 400, +// message: "Invalid Phone or Password", +// }, +// }); +// } - const isMatch = await bcrypt.compare(request.body.password, store.services.password.bcrypt); - if (!isMatch) { - return reply.code(400).send({ - simplydata: { - error: true, - code: 400, - message: "Invalid Phone or Password", - }, - }); - } +// const isMatch = await bcrypt.compare(request.body.password, store.services.password.bcrypt); +// if (!isMatch) { +// return reply.code(400).send({ +// simplydata: { +// error: true, +// code: 400, +// message: "Invalid Phone or Password", +// }, +// }); +// } - const token = fastify.jwt.sign( - { - storename: store.storename, - storeId: store._id, - roles: store.profile.role, - }, - { expiresIn: "30d" } - ); +// const token = fastify.jwt.sign( +// { +// storename: store.storename, +// storeId: store._id, +// roles: store.profile.role, +// }, +// { expiresIn: "30d" } +// ); - var profilePicture = await ProfilePictureStore.findOne({ storeId: store.storeId }); +// var profilePicture = await ProfilePictureStore.findOne({ storeId: store.storeId }); - if (!profilePicture) { - reply.send({ - simplydata: { - error: false, - apiversion: fastify.config.APIVERSION, - access_token: token, - email: store.emails, - phone: store.phone, - storeId: store.storeId, - storename: store.storename, - office_address: store.profile.office_address, - phoneVerified: store.phoneVerified, - oneTimePasswordSetFlag: store.oneTimePasswordSetFlag, - latitude: store.latitude, - longitude: store.longitude, - description: store.description, - type: store.profile.role, - typeasobj: JSON.stringify(Object.assign({}, store.profile.role)), - }, - }); - } else { - reply.send({ - simplydata: { - error: false, - apiversion: fastify.config.APIVERSION, - access_token: token, - picture: profilePicture.picture, - email: store.emails, - phone: store.phone, - storeId: store.storeId, - storename: store.storename, - office_address: store.profile.office_address, - phoneVerified: store.phoneVerified, - oneTimePasswordSetFlag: store.oneTimePasswordSetFlag, - latitude: store.latitude, - longitude: store.longitude, - description: store.description, - type: store.profile.role, - typeasobj: JSON.stringify(Object.assign({}, store.profile.role)), - }, - }); - } - } catch (err) { - throw boom.boomify(err); - } - }, -}); +// if (!profilePicture) { +// reply.send({ +// simplydata: { +// error: false, +// apiversion: fastify.config.APIVERSION, +// access_token: token, +// email: store.emails, +// phone: store.phone, +// storeId: store.storeId, +// storename: store.storename, +// office_address: store.profile.office_address, +// phoneVerified: store.phoneVerified, +// oneTimePasswordSetFlag: store.oneTimePasswordSetFlag, +// latitude: store.latitude, +// longitude: store.longitude, +// description: store.description, +// type: store.profile.role, +// typeasobj: JSON.stringify(Object.assign({}, store.profile.role)), +// }, +// }); +// } else { +// reply.send({ +// simplydata: { +// error: false, +// apiversion: fastify.config.APIVERSION, +// access_token: token, +// picture: profilePicture.picture, +// email: store.emails, +// phone: store.phone, +// storeId: store.storeId, +// storename: store.storename, +// office_address: store.profile.office_address, +// phoneVerified: store.phoneVerified, +// oneTimePasswordSetFlag: store.oneTimePasswordSetFlag, +// latitude: store.latitude, +// longitude: store.longitude, +// description: store.description, +// type: store.profile.role, +// typeasobj: JSON.stringify(Object.assign({}, store.profile.role)), +// }, +// }); +// } +// } catch (err) { +// throw boom.boomify(err); +// } +// }, +// }); fastify.get("/api/reset_token/:customerId", { @@ -581,7 +698,9 @@ const { Schema } = require("mongoose"); fastify.register(require("./routes/usersRoute")); fastify.register(require("./routes/tanksRoute")); + fastify.register(require("./routes/createConnectionsRoute")); + fastify.register(require("./routes/tankersRoute.js")); fastify.register(require("./routes/supplierRoute")); fastify.register(require("./routes/supplierOrdersRoutes")); @@ -589,6 +708,8 @@ fastify.register(require("./routes/friendRequestRoute")); fastify.register(require("./routes/adminRoute")); fastify.register(require("./routes/storeRoute")); fastify.register(require("./routes/departmentRoute.js")); +fastify.register(require("./routes/installationRoute.js")); + // Testing route allows for retrieving a user by phone so one can see what is the phone verification code sent for a given user's phone @@ -601,14 +722,57 @@ fastify.register(require("./routes/forTestingRoute")); const {Storage} = require('@google-cloud/storage'); const { Supplier, profilePictureSupplier } = require("./models/supplier"); -const multer = require('fastify-multer'); +//const multer = require('fastify-multer'); +//const fastifyMulter = require('fastify-multer'); +//const upload = fastifyMulter.single('file'); +//fastify.register(fastifyMulter.contentParser); +const multer = require('multer'); +//const upload = multer({ dest: 'uploads/' }); const { ProfilePictureInstall, Install } = require("./models/store.js"); +const { TeamMemberProfilePicture, CompanyProfilePicture, Deparments, IndianLocations } = require("./models/Department.js"); fastify.register(require('fastify-formbody')); // fastify.register(multer.contentParser); // const multipart = require('fastify-multipart'); +fastify.addContentTypeParser('multipart/form-data', (request, payload, done) => { + done(null, payload); +}); +//const multipart = require('@fastify/multipart'); + +// fastify.register(multipart, { +// limits: { +// fileSize: 10 * 1024 * 1024, // 10 MB +// }, +// attachFieldsToBody: true, +// }); // fastify.register(multipart); +// const fastifyMultipart = require('@fastify/multipart'); + +// fastify.register(fastifyMultipart, { +// limits: { +// fieldNameSize: 100, +// fieldSize: 1000000, +// fields: 10, +// fileSize: 1000000, +// files: 10, +// headerPairs: 2000 +// } +// }); + + const formbody = require('@fastify/formbody'); +fastify.register(formbody); + + +// const multipart = require("@fastify/multipart"); + +// // register multipart +// fastify.register(require("@fastify/multipart"), { +// limits: { +// fileSize: 10 * 1024 * 1024, // 10 MB +// }, +// attachFieldsToBody: true, +// }); const gc = new Storage({ keyFilename : path.join(__dirname, "../src/arminta-tank-keyFile.json"), projectId : 'arminta-tank' @@ -630,9 +794,11 @@ const storage = new Storage({ // }); // Register fastify-file-upload plugin -fastify.register(require('fastify-multipart')); - +//fastify.register(require('fastify-multipart')); +// fastify.register(require("@fastify/multipart"), { +// attachFieldsToBody: true, +// }); // fastify.post('/upload', async (request, reply) => { // try { @@ -727,7 +893,446 @@ fastify.register(require('fastify-multipart')); // } // }); +// fastify.post('/api/uploads_team_profile/:customerId', async (request, reply) => { +// try { +// const customerId = request.params.customerId; +// const data = await request.file(); + +// const bucketName = 'arminta_profile_pictures'; +// const filePath = `arminta_team_profiles/${data.filename}`; + +// const file = storage.bucket(bucketName).file(filePath); +// const writeStream = file.createWriteStream(); + +// data.file.pipe(writeStream); + +// await new Promise((resolve, reject) => { +// writeStream.on('finish', resolve); +// writeStream.on('error', reject); +// }); + +// // Make file public +// await file.makePublic(); +// const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + +// // Update DB with async/await +// const picture = await TeamMemberProfilePicture.findOneAndUpdate( +// { customerId }, +// { picture: publicUrl }, +// { new: true, upsert: true } +// ); + +// reply.send({ picture: publicUrl }); +// } catch (err) { +// console.error(err); +// reply.code(500).send({ error: 'An error occurred', details: err.message }); +// } +// }); + +// fastify.post('/api/uploads_team_profile/:customerId', async (request, reply) => { +// try { +// const { customerId } = request.params; + +// // get uploaded file +// //const data = await request.file(); +// const data = request.body.file; +// if (!data) { +// return reply.code(400).send({ error: 'No file uploaded' }); +// } + +// const bucketName = 'arminta_profile_pictures'; +// const filePath = `arminta_team_profiles/${data.filename}`; + +// const file = storage.bucket(bucketName).file(filePath); +// const writeStream = file.createWriteStream(); + +// data.file.pipe(writeStream); +// console.log("Logs",request.headers['content-type']); +// await new Promise((resolve, reject) => { +// writeStream.on('finish', resolve); +// writeStream.on('error', reject); +// }); + +// // make file public +// await file.makePublic(); +// const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + +// // save in DB +// const picture = await TeamMemberProfilePicture.findOneAndUpdate( +// { customerId }, +// { picture: publicUrl }, +// { new: true, upsert: true } +// ); + +// return reply.send({ picture: publicUrl }); + +// } catch (err) { +// request.log.error(err); +// return reply.code(500).send({ error: 'Upload failed', details: err.message }); +// } +// }); +const fastifyMulter = require('fastify-multer'); +const mime = require("mime-types"); +const upload = fastifyMulter({ + dest: 'uploads/', + limits: { + fieldNameSize: 100, + fieldSize: 1000000, + fields: 10, + fileSize: 1000000000000000, + files: 10, + headerPairs: 2000 + } +}); + +fastify.register(upload.contentParser); + +fastify.post('/api/uploads_team_profile/:customerId', { + preHandler: upload.single('file') +}, async (request, reply) => { + try { + const { customerId } = request.params; + const file = await request.file; // Uncomment this line + const formData = new FormData(); + formData.append('file', file); + const bucketName = 'arminta_profile_pictures'; + const filePath = `arminta_team_profiles/${file.originalname}`; + + const fileBuffer = await fs.promises.readFile(file.path); + + await storage.bucket(bucketName).file(filePath).save(fileBuffer); + + // make file public + await storage.bucket(bucketName).file(filePath).makePublic(); + const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + + // save in DB + const picture = await TeamMemberProfilePicture.findOneAndUpdate( + { customerId }, + { picture: publicUrl }, + { new: true, upsert: true } + ); + + return reply.send({ picture: publicUrl }); + + } catch (err) { + request.log.error(err); + return reply.code(500).send({ error: 'Upload failed', details: err.message }); + } +}); + +fastify.post('/api/uploads_admin_profile/:customerId', { + preHandler: upload.single('file') +}, async (request, reply) => { + try { + const { customerId } = request.params; + const file = await request.file; // Uncomment this line + const formData = new FormData(); + formData.append('file', file); + const bucketName = 'arminta_profile_pictures'; + const filePath = `arminta_team_profiles/${file.originalname}`; + + const fileBuffer = await fs.promises.readFile(file.path); + + await storage.bucket(bucketName).file(filePath).save(fileBuffer); + + // make file public + await storage.bucket(bucketName).file(filePath).makePublic(); + const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + + // save in DB + const picture = await AdminProfilePicture.findOneAndUpdate( + { customerId }, + { picture: publicUrl }, + { new: true, upsert: true } + ); + + return reply.send({ picture: publicUrl }); + + } catch (err) { + request.log.error(err); + return reply.code(500).send({ error: 'Upload failed', details: err.message }); + } +}); + +fastify.post("/api/uploads_installation_profile/:installationId", { + preHandler: upload.single("file"), // your multer/fastify-multipart preHandler +}, async (request, reply) => { + try { + const { installationId } = request.params; + const file = request.file; // in fastify-multer this is set by preHandler + + if (!file) { + return reply.code(400).send({ error: "No file uploaded (expected field name 'file')." }); + } + + // basic file validation + const allowed = ["image/jpeg", "image/jpg", "image/png"]; + if (!allowed.includes(file.mimetype)) { + return reply.code(400).send({ error: "Only JPEG/PNG images are allowed." }); + } + + const bucketName = "arminta_profile_pictures"; + const ext = mime.extension(file.mimetype) || (file.originalname.split(".").pop() || "png"); + const safeBase = path.parse(file.originalname).name.replace(/[^\w.-]/g, "_"); + const filePath = `arminta_team_profiles/${safeBase}-${Date.now()}.${ext}`; + + // read temp file to buffer + const fileBuffer = await fs.promises.readFile(file.path); + + // upload to GCS + const bucket = storage.bucket(bucketName); + const gcsFile = bucket.file(filePath); + await gcsFile.save(fileBuffer, { + resumable: false, + contentType: file.mimetype, + public: true, + metadata: { cacheControl: "public, max-age=31536000" }, + }); + await gcsFile.makePublic(); + + const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + + // 1) Upsert installation profile picture collection + await ProfilePictureInstall.findOneAndUpdate( + { installationId }, + { picture: publicUrl }, + { new: true, upsert: true } + ); + + // 2) Update department doc where departmentId === installationId + const deptUpdate = await Deparments.findOneAndUpdate( + { departmentId: installationId }, + { picture: publicUrl }, + { new: true } + ); + + // 3) (Optional) also save on the installation doc itself if you keep picture there + // await Installations.findOneAndUpdate( + // { installationId }, + // { picture: publicUrl }, + // { new: true } + // ); + + return reply.send({ + installationId, + picture: publicUrl, + departmentUpdated: Boolean(deptUpdate), + message: deptUpdate + ? "Upload successful. Department picture updated." + : "Upload successful. No department matched this installationId.", + }); + } catch (err) { + request.log.error(err); + return reply.code(500).send({ error: "Upload failed", details: err.message }); + } finally { + // best effort: clean up temp file if your preHandler writes to disk + try { if (request.file?.path) await fs.promises.unlink(request.file.path); } catch {} + } +}); + +// fastify.post('/api/uploads_installation_profile/:installationId', { +// preHandler: upload.single('file') +// }, async (request, reply) => { +// try { +// const { installationId } = request.params; +// const file = await request.file; // Uncomment this line +// const formData = new FormData(); +// formData.append('file', file); +// const bucketName = 'arminta_profile_pictures'; +// const filePath = `arminta_team_profiles/${file.originalname}`; + +// const fileBuffer = await fs.promises.readFile(file.path); + +// await storage.bucket(bucketName).file(filePath).save(fileBuffer); + +// // make file public +// await storage.bucket(bucketName).file(filePath).makePublic(); +// const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + +// // save in DB +// const picture = await ProfilePictureInstall.findOneAndUpdate( +// { installationId }, +// { picture: publicUrl }, +// { new: true, upsert: true } +// ); + +// return reply.send({ picture: publicUrl }); + +// } catch (err) { +// request.log.error(err); +// return reply.code(500).send({ error: 'Upload failed', details: err.message }); +// } +// }); + +// fastify.post('/api/uploads_team_profile/:customerId', async (request, reply) => { +// try { +// const { customerId } = request.params; + +// // get uploaded file +// const file = await request.multipart(); +// if (!file) { +// return reply.code(400).send({ error: 'No file uploaded' }); +// } + +// const bucketName = 'arminta_profile_pictures'; +// const filePath = `arminta_team_profiles/${file.filename}`; + +// const fileBuffer = await fs.promises.readFile(file.filepath); + +// await storage.bucket(bucketName).file(filePath).save(fileBuffer); + +// // make file public +// await storage.bucket(bucketName).file(filePath).makePublic(); +// const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + +// // save in DB +// const picture = await TeamMemberProfilePicture.findOneAndUpdate( +// { customerId }, +// { picture: publicUrl }, +// { new: true, upsert: true } +// ); + +// return reply.send({ picture: publicUrl }); + +// } catch (err) { +// request.log.error(err); +// return reply.code(500).send({ error: 'Upload failed', details: err.message }); +// } +// }); +fastify.post('/api/uploads_company_profile/:customerId', async (request, reply) => { + try { + const customerId = request.params.customerId; + const data = await request.file(); + + const bucketName = 'arminta_profile_pictures'; + const filePath = `arminta_company_profiles/${data.filename}`; + + const file = storage.bucket(bucketName).file(filePath); + const writeStream = file.createWriteStream(); + + data.file.pipe(writeStream); + + await new Promise((resolve, reject) => { + writeStream.on('finish', resolve); + writeStream.on('error', reject); + }); + + // Make file public + await file.makePublic(); + const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + + // Update DB with async/await + const picture = await CompanyProfilePicture.findOneAndUpdate( + { customerId }, + { picture: publicUrl }, + { new: true, upsert: true } + ); + + reply.send({ picture: publicUrl }); + } catch (err) { + console.error(err); + reply.code(500).send({ error: 'An error occurred', details: err.message }); + } +}); + +fastify.post("/api/uploads_installation_TeamMember_profile/:installationId/:teamMemberId", { + preHandler: upload.single("file"), +}, async (request, reply) => { + try { + const { installationId ,teamMemberId} = request.params; + //const teamMemberId = request.body?.teamMemberId; // OPTIONAL + const file = request.file; + + if (!file) { + return reply.code(400).send({ error: "No file uploaded (field name 'file')." }); + } + + // Validate image type + const allowed = ["image/jpeg", "image/jpg", "image/png"]; + if (!allowed.includes(file.mimetype)) { + return reply.code(400).send({ error: "Only JPEG/PNG images are allowed." }); + } + + // Build GCS path + const ext = (mime.extension(file.mimetype) || path.extname(file.originalname).slice(1) || "png").toLowerCase(); + const safeBase = path.parse(file.originalname).name.replace(/[^\w.-]/g, "_"); + const filePath = `arminta_team_profiles/${safeBase}-${Date.now()}.${ext}`; + const bucketName = "arminta_profile_pictures"; + + // Upload + const buffer = await fs.promises.readFile(file.path); + const bucket = storage.bucket(bucketName); + const gcsFile = bucket.file(filePath); + await gcsFile.save(buffer, { + resumable: false, + public: true, + contentType: file.mimetype, + metadata: { cacheControl: "public, max-age=31536000" }, + }); + await gcsFile.makePublic(); + + const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + + // Always upsert the installation-level picture doc + await ProfilePictureInstallTeamMember.findOneAndUpdate( + { installationId, teamMemberId }, + { picture: publicUrl }, + { new: true, upsert: true } +); + + + // Update department picture where departmentId === installationId + const deptUpdate = await Deparments.findOneAndUpdate( + { departmentId: installationId }, + { picture: publicUrl }, + { new: true } + ); + + let teamMemberUpdated = false; + if (teamMemberId) { + // 1) Upsert team-member picture collection + await ProfilePictureInstallTeamMember.findOneAndUpdate( + { teamMemberId }, + { picture: publicUrl }, + { new: true, upsert: true } + ); + + // 2) Update nested item inside Installations.team_member.team_member[] + // Using arrayFilters to match the correct team member element + const res = await Install.updateOne( + { installationId }, + { + $set: { + "team_member.team_member.$[tm].picture": publicUrl + } + }, + { + arrayFilters: [{ "tm.teamMemberId": teamMemberId }] + } + ); + teamMemberUpdated = res.modifiedCount > 0; + } + return reply.send({ + installationId, + teamMemberId: teamMemberId || null, + picture: publicUrl, + departmentUpdated: Boolean(deptUpdate), + teamMemberUpdated, + message: teamMemberId + ? (teamMemberUpdated + ? "Upload successful. Installation + team member picture updated." + : "Upload successful. Team member not found under this installation.") + : "Upload successful. Installation picture updated.", + }); + } catch (err) { + request.log.error(err); + return reply.code(500).send({ error: "Upload failed", details: err.message }); + } finally { + try { if (request.file?.path) await fs.promises.unlink(request.file.path); } catch {} + } +}); fastify.post('/api/uploads/:supplierId', async (request, reply) => { try { @@ -836,106 +1441,1519 @@ fastify.post('/api/uploads-user/:customerId', async (request, reply) => { } }); -fastify.post("/api/insatllLogin", { + +// fastify.post("/api/uploads-electricty-work/:customerId/:installationId", async (request, reply) => { +// try { +// const { customerId, installationId } = request.params; +// const files = await request.files(); // Await files properly + +// if (!files || files.length === 0) { +// return reply.code(400).send({ error: "No files uploaded" }); +// } + +// const bucketName = "arminta_profile_pictures"; +// const publicUrls = []; + +// for await (const file of files) { +// const uniqueFileName = `${Date.now()}-${Math.random().toString(36).substring(7)}-${file.filename}`; +// const filePath = `electricty_work_picture/${uniqueFileName}`; + +// console.log(`Uploading file: ${file.filename} → ${filePath}`); + +// const writeStream = storage.bucket(bucketName).file(filePath).createWriteStream(); + +// file.file.pipe(writeStream); + +// await new Promise((resolve, reject) => { +// writeStream.on("finish", async () => { +// try { +// await storage.bucket(bucketName).file(filePath).makePublic(); +// const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; +// publicUrls.push(publicUrl); +// console.log(`File uploaded: ${publicUrl}`); +// resolve(); +// } catch (error) { +// console.error("Failed to make file public:", error); +// reject(error); +// } +// }); + +// writeStream.on("error", (err) => { +// console.error("Failed to upload file:", err); +// reject(err); +// }); +// }); +// } + +// // Update MongoDB: Convert URLs to { url: "..." } objects +// const updatedRecord = await ElectrictyWorkPictures.findOneAndUpdate( +// { customerId, installationId }, +// { $push: { pictureUrl: { $each: publicUrls.map(url => ({ url })) } } }, // Append new images +// { new: true, upsert: true } +// ); + +// reply.send({ success: true, pictures: publicUrls, details: updatedRecord }); +// } catch (err) { +// console.error("Upload Error:", err); +// reply.code(500).send({ error: "An error occurred", details: err.message }); +// } +// }); + +fastify.post( + "/api/uploads-electricty-work/:customerId/:installationId", + { preHandler: upload.any() }, // allow multiple files + async (request, reply) => { + try { + const { customerId, installationId } = request.params; + const files = request.files; + + if (!files || files.length === 0) { + return reply.code(400).send({ error: "No files uploaded" }); + } + + const bucketName = "arminta_profile_pictures"; + const publicUrls = []; + + for (const file of files) { + const uniqueFileName = `${Date.now()}-${Math.random() + .toString(36) + .substring(7)}-${file.originalname}`; + const filePath = `electricty_work_picture/${uniqueFileName}`; + + // ✅ Handle buffer vs path depending on multer storage + const fileBuffer = file.buffer + ? file.buffer // memoryStorage + : await fs.promises.readFile(file.path); // diskStorage + + await storage.bucket(bucketName).file(filePath).save(fileBuffer); + + await storage.bucket(bucketName).file(filePath).makePublic(); + const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + publicUrls.push(publicUrl); + + console.log(`✅ Uploaded: ${publicUrl}`); + } + + // Update MongoDB + const updatedRecord = await ElectrictyWorkPictures.findOneAndUpdate( + { customerId, installationId }, + { $push: { pictureUrl: { $each: publicUrls.map((url) => ({ url })) } } }, + { new: true, upsert: true } + ); + + return reply.send({ + success: true, + pictures: publicUrls, + details: updatedRecord, + }); + } catch (err) { + console.error("❌ Upload Error:", err); + return reply.code(500).send({ error: "Upload failed", details: err.message }); + } + } +); + +// fastify.post("/api/uploads-manualTestVideo-work/:customerId/:installationId", async (request, reply) => { +// try { +// const { customerId, installationId } = request.params; +// const files = await request.files(); // Await files properly + +// if (!files || files.length === 0) { +// return reply.code(400).send({ error: "No files uploaded" }); +// } + +// const bucketName = "arminta_profile_pictures"; +// const publicUrls = []; + +// for await (const file of files) { +// const uniqueFileName = `${Date.now()}-${Math.random().toString(36).substring(7)}-${file.filename}`; +// const filePath = `electricty_work_picture/${uniqueFileName}`; + +// console.log(`Uploading file: ${file.filename} → ${filePath}`); + +// const writeStream = storage.bucket(bucketName).file(filePath).createWriteStream(); + +// file.file.pipe(writeStream); + +// await new Promise((resolve, reject) => { +// writeStream.on("finish", async () => { +// try { +// await storage.bucket(bucketName).file(filePath).makePublic(); +// const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; +// publicUrls.push(publicUrl); +// console.log(`File uploaded: ${publicUrl}`); +// resolve(); +// } catch (error) { +// console.error("Failed to make file public:", error); +// reject(error); +// } +// }); + +// writeStream.on("error", (err) => { +// console.error("Failed to upload file:", err); +// reject(err); +// }); +// }); +// } + +// // Update MongoDB: Convert URLs to { url: "..." } objects +// const updatedRecord = await ManualTestVideo.findOneAndUpdate( +// { customerId, installationId }, +// { $push: { pictureUrl: { $each: publicUrls.map(url => ({ url })) } } }, // Append new images +// { new: true, upsert: true } +// ); + +// reply.send({ success: true, pictures: publicUrls, details: updatedRecord }); +// } catch (err) { +// console.error("Upload Error:", err); +// reply.code(500).send({ error: "An error occurred", details: err.message }); +// } +// }); + +// fastify.post( +// "/api/uploads-manualTestVideo-work/:customerId/:installationId", +// { preHandler: upload.any() }, +// async (request, reply) => { +// try { +// const { customerId, installationId } = request.params; +// const files = request.files; + +// if (!files || files.length === 0) { +// return reply.code(400).send({ error: "No files uploaded" }); +// } + +// const bucketName = "arminta_profile_pictures"; +// const publicUrls = []; + +// for (const file of files) { +// const uniqueFileName = `${Date.now()}-${Math.random() +// .toString(36) +// .substring(7)}-${file.originalname}`; +// const filePath = `manual_test_video/${uniqueFileName}`; + +// console.log(`Uploading video: ${file.originalname} → ${filePath}`); + +// const gcsFile = storage.bucket(bucketName).file(filePath); + +// // Use a proper write stream with full metadata +// const stream = gcsFile.createWriteStream({ +// metadata: { +// contentType: file.mimetype || "video/mp4", // fallback to mp4 +// contentDisposition: "inline", // ensures browser plays instead of downloads +// }, +// resumable: false, +// }); + +// stream.write(file.buffer); +// stream.end(); + +// await new Promise((resolve, reject) => { +// stream.on("finish", async () => { +// try { +// await gcsFile.makePublic(); +// const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; +// publicUrls.push(publicUrl); +// console.log(`✅ Uploaded: ${publicUrl}`); +// resolve(); +// } catch (err) { +// reject(err); +// } +// }); +// stream.on("error", (err) => { +// console.error("Upload Error:", err); +// reject(err); +// }); +// }); +// } + +// const updatedRecord = await ManualTestVideo.findOneAndUpdate( +// { customerId, installationId }, +// { $push: { videoUrl: { $each: publicUrls.map((url) => ({ url })) } } }, // use videoUrl field +// { new: true, upsert: true } +// ); + +// return reply.send({ +// success: true, +// videos: publicUrls, +// details: updatedRecord, +// }); +// } catch (err) { +// console.error("Upload Error:", err); +// return reply.code(500).send({ error: "Upload failed", details: err.message }); +// } +// } +// ); + + +fastify.post( + "/api/uploads-manualTestVideo-work/:customerId/:installationId", + { preHandler: upload.any() }, // Multer saves files to "uploads/" + async (request, reply) => { + try { + const { customerId, installationId } = request.params; + const files = request.files; // Multer saves file info here + + if (!files || files.length === 0) { + return reply.code(400).send({ error: "No files uploaded" }); + } + + const bucketName = "arminta_profile_pictures"; + const publicUrls = []; + + for (const file of files) { + const uniqueFileName = `${Date.now()}-${Math.random() + .toString(36) + .substring(7)}-${file.originalname}`; + const filePath = `manual_test_video/${uniqueFileName}`; + + console.log(`Uploading video: ${file.path} → ${filePath}`); + + const gcsFile = storage.bucket(bucketName).file(filePath); + const stream = gcsFile.createWriteStream({ + metadata: { + contentType: file.mimetype || "video/mp4", + contentDisposition: "inline", + }, + resumable: false, + }); + + // Pipe from disk to GCS + fs.createReadStream(file.path).pipe(stream); + + await new Promise((resolve, reject) => { + stream.on("finish", async () => { + try { + await gcsFile.makePublic(); + const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + publicUrls.push(publicUrl); + console.log(`✅ Uploaded: ${publicUrl}`); + resolve(); + } catch (err) { + reject(err); + } + }); + stream.on("error", (err) => { + console.error("Upload Error:", err); + reject(err); + }); + }); + + // optional: cleanup temp file + fs.unlink(file.path, (err) => { + if (err) console.error("Temp file cleanup failed:", err); + }); + } + + // Update MongoDB + const updatedRecord = await ManualTestVideo.findOneAndUpdate( + { customerId, installationId }, + { $push: { pictureUrl: { $each: publicUrls.map((url) => ({ url })) } } }, + { new: true, upsert: true } + ); + + return reply.send({ + success: true, + videos: publicUrls, + details: updatedRecord, + }); + } catch (err) { + console.error("Upload Error:", err); + return reply + .code(500) + .send({ error: "Upload failed", details: err.message }); + } + } +); + +fastify.post("/api/uploads-plumbing-work/:customerId/:installationId", async (request, reply) => { + try { + const { customerId, installationId } = request.params; + const files = await request.files(); // Await files properly + + if (!files || files.length === 0) { + return reply.code(400).send({ error: "No files uploaded" }); + } + + const bucketName = "arminta_profile_pictures"; + const publicUrls = []; + + for await (const file of files) { + const uniqueFileName = `${Date.now()}-${Math.random().toString(36).substring(7)}-${file.filename}`; + const filePath = `plumbing_work_picture/${uniqueFileName}`; + + console.log(`Uploading file: ${file.filename} → ${filePath}`); + + const writeStream = storage.bucket(bucketName).file(filePath).createWriteStream(); + + file.file.pipe(writeStream); + + await new Promise((resolve, reject) => { + writeStream.on("finish", async () => { + try { + await storage.bucket(bucketName).file(filePath).makePublic(); + const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + publicUrls.push(publicUrl); + console.log(`File uploaded: ${publicUrl}`); + resolve(); + } catch (error) { + console.error("Failed to make file public:", error); + reject(error); + } + }); + + writeStream.on("error", (err) => { + console.error("Failed to upload file:", err); + reject(err); + }); + }); + } + + // Update MongoDB: Convert URLs to { url: "..." } objects + const updatedRecord = await PlumbingWorkPictures.findOneAndUpdate( + { customerId, installationId }, + { $push: { pictureUrl: { $each: publicUrls.map(url => ({ url })) } } }, // Append new images + { new: true, upsert: true } + ); + + reply.send({ success: true, pictures: publicUrls, details: updatedRecord }); + } catch (err) { + console.error("Upload Error:", err); + reply.code(500).send({ error: "An error occurred", details: err.message }); + } +}); + +// fastify.post("/api/uploads-material-recieved/:customerId/:installationId", async (request, reply) => { +// try { +// const { customerId, installationId } = request.params; +// const files = await request.files(); // Await files properly + +// if (!files || files.length === 0) { +// return reply.code(400).send({ error: "No files uploaded" }); +// } + +// const bucketName = "arminta_profile_pictures"; +// const publicUrls = []; + +// for await (const file of files) { +// const uniqueFileName = `${Date.now()}-${Math.random().toString(36).substring(7)}-${file.filename}`; +// const filePath = `plumbing_work_picture/${uniqueFileName}`; + +// console.log(`Uploading file: ${file.filename} → ${filePath}`); + +// const writeStream = storage.bucket(bucketName).file(filePath).createWriteStream(); + +// file.file.pipe(writeStream); + +// await new Promise((resolve, reject) => { +// writeStream.on("finish", async () => { +// try { +// await storage.bucket(bucketName).file(filePath).makePublic(); +// const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; +// publicUrls.push(publicUrl); +// console.log(`File uploaded: ${publicUrl}`); +// resolve(); +// } catch (error) { +// console.error("Failed to make file public:", error); +// reject(error); +// } +// }); + +// writeStream.on("error", (err) => { +// console.error("Failed to upload file:", err); +// reject(err); +// }); +// }); +// } + +// // Update MongoDB: Convert URLs to { url: "..." } objects +// const updatedRecord = await MaterialRecievedPictures.findOneAndUpdate( +// { customerId, installationId }, +// { $push: { pictureUrl: { $each: publicUrls.map(url => ({ url })) } } }, // Append new images +// { new: true, upsert: true } +// ); + +// reply.send({ success: true, pictures: publicUrls, details: updatedRecord }); +// } catch (err) { +// console.error("Upload Error:", err); +// reply.code(500).send({ error: "An error occurred", details: err.message }); +// } +// }); + + +// fastify.post("/api/installLogin", { +// schema: { +// description: "This is for Login Install", +// tags: ["Installation"], +// summary: "This is for Login Install", +// body: { +// type: "object", +// required: ["type", "phone", "password"], +// properties: { +// type: { type: "string", description: "User role type (e.g., 'admin', 'manager')" }, +// phone: { type: "string", description: "Registered phone number" }, +// password: { type: "string", description: "Password for authentication" }, +// }, +// }, +// }, +// async handler(req, reply) { +// try { +// const { type, phone, password } = req.body; + +// // Check if user exists in the Department Schema +// const user = await Deparments.findOne({ phone }); + +// if (!user) { +// return reply.code(400).send({ message: "User not found" }); +// } + +// // Verify Password +// const isMatch = await bcrypt.compare(password, user.services.password.bcrypt); + +// if (!isMatch) { +// return reply.code(400).send({ message: "Invalid credentials" }); +// } + +// // Check if department details already exist in installation schema +// let installation = await Install.findOne({ phone }); + +// if (!installation) { +// // Create a new entry in installation schema with departmentId as installationId +// installation = new Install({ +// phone: user.phone, +// installationId: user.departmentId, // Store departmentId in installationId +// firstName: user.firstName, +// lastName: user.lastName, +// email: user.email, +// alternativeNumber: user.alternativeContactNumber, +// departmentName: user.departmentName, +// designation: user.desginationName, +// reportingManager: user.reportingManager, +// city: user.city, +// zone: user.zone, +// address1: user.address1, +// address2: user.address2, +// profile: { +// state: user.state, +// country: user.country, +// role: type, // Store type in profile.role +// }, +// }); + +// await installation.save(); +// } + +// // Ensure `type` is stored in `profile.role` +// if (!installation.profile?.role) { +// installation.profile.role = type; +// await installation.save(); // Save the updated type +// } + +// // Fetch profile picture if available +// const profilePicture = await ProfilePictureInstall.findOne({ customerId: installation._id }); + +// // Generate JWT Token +// const token = fastify.jwt.sign( +// { userId: user._id, phone: user.phone, role: installation.profile?.role }, +// "your_secret_key", +// { expiresIn: "7d" } +// ); + +// // Construct response payload +// const responsePayload = { +// simplydata: { +// error: false, +// apiversion: fastify.config.APIVERSION, +// access_token: token, +// email: installation.emails || [], +// installationId: installation.installationId, +// phone: installation.phone, +// address1: installation.address1, +// address2: installation.address2, +// phoneVerified: installation.phoneVerified, +// oneTimePasswordSetFlag: installation.oneTimePasswordSetFlag, +// type: installation.profile?.role || "user", // Default to "user" if not available +// fcmIds: installation.fcmId || null, +// team: installation.team, +// city: installation.city, +// manager: installation.manager, +// firstName: installation.firstName, +// lastName: installation.lastName, +// address: installation.address || "", +// alternativeNumber: installation.alternativeNumber || null, +// profilePicture: profilePicture ? profilePicture.pictureUrl : null, // Include profile picture URL if available +// }, +// }; + +// return reply.send(responsePayload); +// } catch (error) { +// console.error("Login Error:", error); +// return reply.code(500).send({ message: "Internal server error" }); +// } +// }, +// }); + +fastify.post( + "/api/uploads-material-recieved/:customerId/:installationId", + { preHandler: upload.any() }, // use 'files' field for multiple uploads + async (request, reply) => { + try { + const { customerId, installationId } = request.params; + const files = request.files; // multer-style files array + + if (!files || files.length === 0) { + return reply.code(400).send({ error: "No files uploaded" }); + } + + const bucketName = "arminta_profile_pictures"; + const publicUrls = []; + + for (const file of files) { + // Generate unique file name + const uniqueFileName = `${Date.now()}-${Math.random() + .toString(36) + .substring(7)}-${file.originalname}`; + const filePath = `plumbing_work_picture/${uniqueFileName}`; + + // Read file buffer + const fileBuffer = await fs.promises.readFile(file.path); + + // Upload to GCS + await storage.bucket(bucketName).file(filePath).save(fileBuffer); + + // Make file public + await storage.bucket(bucketName).file(filePath).makePublic(); + + const publicUrl = `https://storage.googleapis.com/${bucketName}/${filePath}`; + publicUrls.push(publicUrl); + } + + // Save/Update in DB + const updatedRecord = await MaterialRecievedPictures.findOneAndUpdate( + { customerId, installationId }, + { $push: { pictureUrl: { $each: publicUrls.map((url) => ({ url })) } } }, // Append images + { new: true, upsert: true } + ); + + return reply.send({ + success: true, + pictures: publicUrls, + details: updatedRecord, + }); + } catch (err) { + request.log.error(err); + return reply + .code(500) + .send({ error: "Upload failed", details: err.message }); + } + } +); +fastify.post("/api/installLogin", { schema: { - description: "This is for Login Install", - tags: ["Install"], - summary: "This is for Login Install", + description: "Login as Installation Manager", + tags: ["Installation"], + summary: "Installation Manager login", body: { type: "object", - required: ["phone", "password"], + required: ["type", "phone", "password"], properties: { - phone: { type: "string" }, - password: { type: "string" }, + type: { type: "string", enum: ["Installation_Manager"], description: "Login type" }, + phone: { type: "string", description: "Registered phone number" }, + password: { type: "string", description: "Password" }, + }, + }, + }, + async handler(req, reply) { + try { + const { type, phone, password } = req.body; + + // Find department record + const user = await Deparments.findOne({ phone }).lean(); + if (!user) { + return reply.code(400).send({ message: "User not found" }); + } + + // Validate password + const isMatch = await bcrypt.compare(password, user.services.password.bcrypt); + if (!isMatch) { + return reply.code(401).send({ message: "Invalid credentials" }); + } + + // Find or create installation document + let installation = await Install.findOne({ phone }); + if (!installation) { + installation = new Install({ + phone, + installationId: user.departmentId, + firstName: user.firstName, + lastName: user.lastName, + email: user.email, + alternativeNumber: user.alternativeContactNumber, + departmentName: user.departmentName, + designation: user.desginationName, + reportingManager: user.reportingManager, + city: user.city, + zone: user.zone, + address1: user.address1, + address2: user.address2, + picture: user.picture, + profile: { + state: user.state, + country: user.country, + role: type, // store login type + }, + }); + await installation.save(); + } else if (!installation.profile?.role) { + installation.profile.role = type; + await installation.save(); + } + + // Generate JWT + const token = fastify.jwt.sign( + { userId: user._id, phone, role: type }, + "your_secret_key", // replace with config + { expiresIn: "7d" } + ); + + return reply.send({ + success: true, + message: "Login successful", + simplydata: { + error: false, + access_token: token, + installationId: installation.installationId, + phone: installation.phone, + firstName: installation.firstName, + lastName: installation.lastName, + city: installation.city, + email: installation.emails?.map(e => e.email) || [], + type: installation.profile?.role || type, + team: installation.team, + manager: installation.manager, + fcmId: installation.fcmId, + alternativeNumber: installation.alternativeNumber, + phoneVerified: installation.phoneVerified, + picture: installation.picture + }, + }); + } catch (err) { + console.error("installLogin error:", err); + reply.code(500).send({ message: "Internal server error" }); + } + } +}); + + +// fastify.post("/api/teamMemberLogin", { +// schema: { +// description: "Login API for team members", +// tags: ["Installation"], +// summary: "Login as a Team Member", +// body: { +// type: "object", +// required: ["type", "phone", "password"], +// properties: { +// type: { type: "string", description: "Role type of the user (e.g., 'team_member')" }, +// phone: { type: "string", description: "Registered phone number of the team member" }, +// password: { type: "string", description: "Password for authentication" }, +// }, +// }, +// }, +// async handler(request, reply) { +// try { +// const { type, phone, password } = request.body; + +// // ✅ Step 1: Find the team member in `Deparments` +// const department = await Deparments.findOne({ +// "team_member.team_member.phone": phone +// }); + +// if (!department) { +// return reply.status(401).send({ +// simplydata: { +// error: true, +// message: "Invalid phone number", +// }, +// }); +// } + +// // ✅ Step 2: Find the specific team member +// const teamMember = department.team_member.team_member.find( +// (member) => member.phone === phone +// ); + +// if (!teamMember) { +// return reply.status(401).send({ +// simplydata: { +// error: true, +// message: "Invalid phone number", +// }, +// }); +// } + +// // ✅ Step 3: Verify password +// const isPasswordValid = await bcrypt.compare(password, teamMember.password); + +// if (!isPasswordValid) { +// return reply.status(401).send({ +// simplydata: { +// error: true, +// message: "Invalid phone number or password", +// }, +// }); +// } + +// console.log("Team Member First Name:", teamMember.firstName); // ✅ Debugging + +// // ✅ Step 4: Check if this team member already exists in `Install` +// let installation = await Install.findOne({ +// installationId: department.departmentId +// }); + +// if (!installation) { +// return reply.status(404).send({ +// simplydata: { +// error: true, +// message: "Installation not found", +// }, +// }); +// } + +// // Check if team member already exists in Install schema +// const existingTeamMember = installation.team_member.team_member.find( +// (member) => member.phone === phone +// ); + +// if (!existingTeamMember) { +// // ✅ Step 5: Add team member details to `Install` schema +// installation.team_member.team_member.push({ +// teamMemberId: teamMember.teamMemberId, +// firstName: teamMember.firstName, +// phone: teamMember.phone, +// email: teamMember.email, +// alternativePhone: teamMember.alternativePhone, +// installationTeamMemId: installation.installationId, +// password: teamMember.password, // Store hashed password +// status: teamMember.status || "active", +// type: type, // Store login type +// }); + +// await installation.save(); +// } + +// // ✅ Step 6: Generate JWT token +// const token = fastify.jwt.sign( +// { phone: teamMember.phone, role: type, installationId: installation.installationId }, +// "JWT_SECRET", +// { expiresIn: "1h" } +// ); + +// return reply.send({ +// simplydata: { +// error: false, +// message: "Login successful", +// access_token: token, +// phone: teamMember.phone, +// firstName: teamMember.firstName || null, // ✅ Now included +// teamMemberId: teamMember.teamMemberId, +// alternativePhone: teamMember.alternativePhone || null, +// email: teamMember.email || null, +// status: teamMember.status || "active", +// type: teamMember.type, +// installationId: installation.installationId +// }, +// }); + +// } catch (err) { +// console.error("Error logging in:", err); +// reply.status(500).send({ +// simplydata: { +// error: true, +// message: "Internal server error", +// }, +// }); +// } +// }, +// }); + + +fastify.post("/api/teamMemberLogin", { + schema: { + description: "Login as Installation Team Member", + tags: ["Installation"], + summary: "Team member login", + body: { + type: "object", + required: ["type", "phone", "password"], + properties: { + type: { type: "string", enum: ["Installation_TeamMember"], description: "Login type" }, + phone: { type: "string", description: "Team member phone" }, + password: { type: "string", description: "Password" }, }, }, }, async handler(req, reply) { try { - const { phone, password } = req.body; + const { type, phone, password } = req.body; + + // Find department with this team member phone + const department = await Deparments.findOne({ "team_member.team_member.phone": phone }).lean(); + if (!department) { + return reply.code(401).send({ message: "Invalid phone number" }); + } + + // Find the actual team member + const teamMember = department.team_member.team_member.find(m => m.phone === phone); + if (!teamMember) { + return reply.code(401).send({ message: "Invalid phone number" }); + } + + // Validate password + const isMatch = await bcrypt.compare(password, teamMember.password); + if (!isMatch) { + return reply.code(401).send({ message: "Invalid credentials" }); + } + + // Find install document + const installation = await Install.findOne({ installationId: department.departmentId }); + if (!installation) { + return reply.code(404).send({ message: "Installation not found" }); + } + + // Add team member to install if missing + const alreadyExists = installation.team_member?.team_member?.some(m => m.phone === phone); + if (!alreadyExists) { + installation.team_member.team_member.push({ + teamMemberId: teamMember.teamMemberId, + firstName: teamMember.firstName, + phone: teamMember.phone, + installationTeamMemId: installation.installationId, + password: teamMember.password, + status: teamMember.status || "active", + email: teamMember.email, + alternativePhone: teamMember.alternativePhone, + type + }); + await installation.save(); + } + + // Generate JWT + const token = fastify.jwt.sign( + { phone, role: type, installationId: installation.installationId }, + "your_secret_key", // replace with config + { expiresIn: "1h" } + ); + + return reply.send({ + simplydata: { + error: false, + access_token: token, + phone: teamMember.phone, + firstName: teamMember.firstName, + teamMemberId: teamMember.teamMemberId, + alternativePhone: teamMember.alternativePhone, + email: teamMember.email, + status: teamMember.status || "active", + type: type, + installationId: installation.installationId + } +}); + } catch (err) { + console.error("teamMemberLogin error:", err); + reply.code(500).send({ message: "Internal server error" }); + } + } +}); + + + + fastify.post("/api/surveyLogin", { + schema: { + description: "This is for Login Survey", + tags: ["Survey"], + summary: "This is for Login Survey", + body: { + type: "object", + required: [ "phone", "password"], + properties: { + phone: { type: "string", description: "Registered phone number" }, + password: { type: "string", description: "Password for authentication" }, + }, + }, + }, + async handler(req, reply) { + try { + const { phone, password } = req.body; + + // Check if user exists in the Department Schema + const user = await Deparments.findOne({ phone }); + console.log("user", user) - // Check if an install with the phone number exists - const install = await Install.findOne({ phone }); + if (!user) { + return reply.code(400).send({ message: "User not found" }); + } - if (!install) { - return reply.status(401).send({ + // Verify Password + const isMatch = await bcrypt.compare(password, user.services.password.bcrypt); + + if (!isMatch) { + return reply.code(400).send({ message: "Invalid credentials" }); + } + + let survey = await Survey.findOne({ phone }); + + if (!survey) { + survey = new Survey({ + phone: user.phone, + surveyId: user.departmentId, + firstName: user.firstName, + lastName: user.lastName, + email: user.email, + alternativeNumber: user.alternativeContactNumber, + departmentName: user.departmentName, + designation: user.desginationName, + reportingManager: user.reportingManager, + city: user.city, + zone: user.zone, + address1: user.address1, + address2: user.address2, + profile: { + state: user.state, + country: user.country, + //role: type, // Store type in profile.role + }, + }); + + await survey.save(); + } + + const token = fastify.jwt.sign( + { phone: user.phone }, + "Scret", + { expiresIn: "1h" } + ); + + return reply.send({ simplydata: { - error: true, - message: 'Invalid Phone or password' - } + error: false, + message: "Login successful", + access_token: token, + phone: user.phone, + surveyId: user.departmentId, + firstName: user.firstName, + lastName: user.lastName, + email: user.email, + alternativeNumber: user.alternativeContactNumber, + departmentName: user.departmentName, + designation: user.desginationName, + reportingManager: user.reportingManager, + city: user.city, + zone: user.zone, + address1: user.address1, + address2: user.address2, + profile: { + state: user.state, + country: user.country, + //role: type, // Store type in profile.role + }, + }, }); + // return reply.send(survey); + } catch (error) { + console.error("Login Error:", error); + return reply.code(500).send({ message: "Internal server error" }); } + }, + }); + + const moment = require("moment-timezone"); +const { AdminProfilePicture } = require("./models/admin.js"); + + fastify.post("/api/supportLogin", { + schema: { + description: "This is for Login Support", + tags: ["Support"], + summary: "This is for Login Support", + body: { + type: "object", + required: ["phone", "password", "type"], + properties: { + phone: { type: "string" }, + password: { type: "string" }, + type: { type: "string" }, + }, + }, + }, + handler: async (req, reply) => { + try { + const { phone, password, type } = req.body; + const user = await Deparments.findOne({ phone }); + if (!user) return reply.code(400).send({ message: "User not found" }); + + const isMatch = await bcrypt.compare(password, user.services.password.bcrypt); + if (!isMatch) return reply.code(400).send({ message: "Invalid credentials" }); + + // Format login times + const now = moment().tz("Asia/Kolkata"); + const dateOfLogin = now.format("DD-MM-YYYY"); + const timeOfLogin = now.format("HH:mm:ss"); + + let support = await Support.findOne({ phone }); - // Compare the password entered by the user with the hashed password stored in the database - const isPasswordValid = await bcrypt.compare(password, install.services.password.bcrypt); + if (!support) { + support = new Support({ + phone: user.phone, + supportId: user.departmentId, + firstName: user.firstName, + lastName: user.lastName, + email: user.email, + alternativeNumber: user.alternativeContactNumber, + departmentName: user.departmentName, + designation: user.desginationName, + reportingManager: user.reportingManager, + city: user.city, + zone: user.zone, + address1: user.address1, + address2: user.address2, + profile: { + state: user.state, + country: user.country, + }, + dateOfLogin, + timeOfLogin, + }); + await support.save(); + } else { + // Update login time only if access token is being issued (new login) + await Support.updateOne( + { phone }, + { $set: { dateOfLogin, timeOfLogin } } + ); + } + + const token = fastify.jwt.sign( + { + userId: user._id, + phone: user.phone, + role: type, + }, + "Scret", + { expiresIn: "1h" } + ); - if (!isPasswordValid) { - return reply.status(401).send({ + return reply.send({ simplydata: { - error: true, - message: 'Invalid phone or password' + error: false, + apiversion: "1.0.0", + access_token: token, + email: user.email || [], + supportId: user.departmentId || "", + phone: user.phone, + address1: user.address1 || "", + address2: user.address2 || "", + phoneVerified: false, + oneTimePasswordSetFlag: false, + type: type, + fcmIds: null, + team: null, + city: user.city || "", + manager: user.reportingManager || null, + firstName: user.firstName || "", + lastName: user.lastName || "", + address: "", + alternativeNumber: user.alternativeContactNumber || "", + profilePicture: null, + dateOfLogin, + timeOfLogin, + currentTime: now.format("HH:mm:ss") } }); + } catch (error) { + console.error("Login Error:", error); + return reply.code(500).send({ message: "Internal server error" }); } + } + }); - // Generate a JWT token for the authenticated install - const token = fastify.jwt.sign({ phone: install.phone }, 'your_jwt_secret', { expiresIn: '30d' }); - // Fetch the profile picture if it exists - const profilePicture = await ProfilePictureInstall.findOne({ customerId: install._id }); - const responsePayload = { - simplydata: { - error: false, - apiversion: fastify.config.APIVERSION, - access_token: token, - email: install.emails, - installationId: install.installationId, - phone: install.phone, - address1: install.address1, - address2: install.address2, - phoneVerified: install.phoneVerified, - oneTimePasswordSetFlag: install.oneTimePasswordSetFlag, - type: install.profile.role, - fcmId: install.fcmId, - team: install.team, - city: install.city, - manager: install.manager, - firstName: install.firstName, - lastName: install.lastName, - address: install.address, - alternativeNumber: install.alternativeNumber, + fastify.post("/api/supportTeamMemberLogin", { + schema: { + description: "Login Support TeamMember", + tags: ["Support"], + summary: "Login for Support TeamMember", + body: { + type: "object", + required: ["type", "phone", "password"], + properties: { + type: { + type: "string", + description: "Role type of the user (e.g., 'Support_TeamMember')" + }, + phone: { + type: "string", + description: "Registered phone number of the team member" + }, + password: { + type: "string", + description: "Password for authentication" + } } - }; + } + }, + async handler(req, reply) { + try { + const { type, phone, password } = req.body; + + if (type !== "Support_TeamMember") { + return reply.code(400).send({ error: "Invalid user type" }); + } + + const support = await Support.findOne({ + "team_member.team_member.phone": phone + }); + + if (!support) { + return reply.code(404).send({ error: 'Team member not found' }); + } + + const teamMember = support.team_member.team_member.find( + member => member.phone === phone + ); + + if (!teamMember) { + return reply.code(404).send({ error: 'Team member not found' }); + } + + if (!teamMember.password) { + return reply.code(401).send({ error: 'Password not set' }); + } + + const isPasswordValid = await bcrypt.compare(password, teamMember.password); + if (!isPasswordValid) { + return reply.code(401).send({ error: 'Invalid credentials' }); + } + + const token = fastify.jwt.sign( + { + support_teamMemberId: teamMember.support_teamMemberId, + name: teamMember.name, + phone: teamMember.phone, + type: type + }, + "JWT_SECRET", + { expiresIn: '7d' } + ); + + return reply.send({ + simplydata: { + error: false, + apiversion: "1.0.0", + access_token: token, + email: [], + support_teamMemberId: teamMember.support_teamMemberId || "", + phone: teamMember.phone, + address1: teamMember.address1 || "", + address2: teamMember.address2 || "", + phoneVerified: false, + oneTimePasswordSetFlag: false, + type: type, + fcmIds: null, + team: null, + city: teamMember.city || "", + manager: null, + firstName: teamMember.name?.split(" ")[0] || "", + lastName: teamMember.name?.split(" ")[1] || "", + address: "", + alternativeNumber: teamMember.alternativeNumber || "", + profilePicture: null + } + }); - if (profilePicture) { - responsePayload.simplydata.picture = profilePicture.picture; + } catch (error) { + console.error("Error during team member login:", error); + return reply.code(500).send({ error: "Internal server error" }); } + } + }); - // Return the token and user details to the client - return reply.send(responsePayload); - } catch (err) { - reply.status(500).send({ - simplydata: { - error: true, - message: err.message + + + + fastify.post("/api/storelogin", { + schema: { + description: "This is for Store Login", + tags: ["Store-Data"], + summary: "This is for Store Login", + body: { + type: "object", + required: [ "phone", "password"], + properties: { + phone: { type: "string", description: "Registered phone number" }, + password: { type: "string", description: "Password for authentication" }, + }, + }, + }, + async handler(req, reply) { + try { + const { phone, password } = req.body; + + // Check if user exists in the Department Schema + const user = await Deparments.findOne({ phone }); + console.log("user", user) + + if (!user) { + return reply.code(400).send({ message: "User not found" }); } - }); + + // Verify Password + const isMatch = await bcrypt.compare(password, user.services.password.bcrypt); + + if (!isMatch) { + return reply.code(400).send({ message: "Invalid credentials" }); + } + + let store = await Store.findOne({ phone }); + + if (!store) { + store = new Store({ + phone: user.phone, + storeId: user.departmentId, + firstName: user.firstName, + lastName: user.lastName, + email: user.email, + alternativeNumber: user.alternativeContactNumber, + departmentName: user.departmentName, + designation: user.desginationName, + reportingManager: user.reportingManager, + city: user.city, + zone: user.zone, + address1: user.address1, + address2: user.address2, + profile: { + state: user.state, + country: user.country, + //role: type, // Store type in profile.role + }, + }); + + await store.save(); + } + + const token = fastify.jwt.sign( + { phone: user.phone }, + "Scret", + { expiresIn: "1h" } + ); + + return reply.send({ + simplydata: { + error: false, + message: "Login successful", + access_token: token, + phone: user.phone, + storeId: user.departmentId, + firstName: user.firstName, + lastName: user.lastName, + email: user.email, + alternativeNumber: user.alternativeContactNumber, + departmentName: user.departmentName, + designation: user.desginationName, + reportingManager: user.reportingManager, + city: user.city, + zone: user.zone, + address1: user.address1, + address2: user.address2, + profile: { + state: user.state, + country: user.country, + //role: type, // Store type in profile.role + }, + }, + }); + // return reply.send(survey); + } catch (error) { + console.error("Login Error:", error); + return reply.code(500).send({ message: "Internal server error" }); + } + }, + }); + + fastify.post("/add-states", async (request, reply) => { + try { + const statesData = [ + { + state: "Andhra Pradesh", + majorCities: [ + "Adoni", "Amaravati", "Anantapur", "Chandragiri", "Chittoor", + "Guntur", "Kadapa", "Kakinada", "Kurnool", "Machilipatnam", + "Rajahmundry", "Tirupati", "Vijayawada", "Visakhapatnam", "Vizianagaram" + ], + }, + { + state: "Arunachal Pradesh", + majorCities: ["Itanagar", "Naharlagun", "Tawang", "Pasighat", "Ziro"], + }, + { + state: "Assam", + majorCities: ["Guwahati", "Dibrugarh", "Silchar", "Jorhat", "Tezpur"], + }, + { + state: "Bihar", + majorCities: ["Patna", "Gaya", "Bhagalpur", "Muzaffarpur", "Purnia"], + }, + { + state: "Chhattisgarh", + majorCities: ["Raipur", "Bhilai", "Bilaspur", "Durg", "Korba"], + }, + { + state: "Goa", + majorCities: ["Panaji", "Margao", "Vasco da Gama", "Mapusa"], + }, + { + state: "Gujarat", + majorCities: ["Ahmedabad", "Surat", "Vadodara", "Rajkot", "Bhavnagar"], + }, + { + state: "Haryana", + majorCities: ["Chandigarh", "Faridabad", "Gurugram", "Hisar", "Panipat"], + }, + { + state: "Himachal Pradesh", + majorCities: ["Shimla", "Manali", "Dharamshala", "Mandi", "Solan"], + }, + { + state: "Jharkhand", + majorCities: ["Ranchi", "Jamshedpur", "Dhanbad", "Bokaro", "Hazaribagh"], + }, + { + state: "Karnataka", + majorCities: ["Bengaluru", "Mysuru", "Hubballi", "Mangaluru", "Belagavi"], + }, + { + state: "Kerala", + majorCities: ["Thiruvananthapuram", "Kochi", "Kozhikode", "Thrissur", "Alappuzha"], + }, + { + state: "Madhya Pradesh", + majorCities: ["Bhopal", "Indore", "Gwalior", "Jabalpur", "Ujjain"], + }, + { + state: "Maharashtra", + majorCities: ["Mumbai", "Pune", "Nagpur", "Nashik", "Aurangabad"], + }, + { + state: "Manipur", + majorCities: ["Imphal", "Bishnupur", "Thoubal", "Churachandpur"], + }, + { + state: "Meghalaya", + majorCities: ["Shillong", "Tura", "Jowai", "Nongstoin"], + }, + { + state: "Mizoram", + majorCities: ["Aizawl", "Lunglei", "Champhai", "Serchhip"], + }, + { + state: "Nagaland", + majorCities: ["Kohima", "Dimapur", "Mokokchung", "Tuensang"], + }, + { + state: "Odisha", + majorCities: ["Bhubaneswar", "Cuttack", "Rourkela", "Berhampur", "Sambalpur"], + }, + { + state: "Punjab", + majorCities: ["Chandigarh", "Ludhiana", "Amritsar", "Jalandhar", "Patiala"], + }, + { + state: "Rajasthan", + majorCities: ["Jaipur", "Jodhpur", "Udaipur", "Kota", "Ajmer"], + }, + { + state: "Sikkim", + majorCities: ["Gangtok", "Namchi", "Mangan", "Gyalshing"], + }, + { + state: "Tamil Nadu", + majorCities: ["Chennai", "Coimbatore", "Madurai", "Tiruchirappalli", "Salem"], + }, + { + state: "Telangana", + majorCities: ["Hyderabad", "Warangal", "Nizamabad", "Karimnagar", "Khammam"], + }, + { + state: "Tripura", + majorCities: ["Agartala", "Udaipur", "Dharmanagar", "Kailashahar"], + }, + { + state: "Uttar Pradesh", + majorCities: ["Lucknow", "Kanpur", "Varanasi", "Agra", "Meerut"], + }, + { + state: "Uttarakhand", + majorCities: ["Dehradun", "Haridwar", "Nainital", "Rishikesh"], + }, + { + state: "West Bengal", + majorCities: ["Kolkata", "Howrah", "Durgapur", "Siliguri", "Asansol"], + }, + { + state: "Andaman and Nicobar Islands", + majorCities: ["Port Blair"], + }, + { + state: "Chandigarh", + majorCities: ["Chandigarh"], + }, + { + state: "Dadra and Nagar Haveli and Daman and Diu", + majorCities: ["Daman", "Diu", "Silvassa"], + }, + { + state: "Lakshadweep", + majorCities: ["Kavaratti"], + }, + { + state: "Delhi", + majorCities: ["New Delhi"], + }, + { + state: "Puducherry", + majorCities: ["Puducherry", "Karaikal", "Mahe", "Yanam"], + }, + ]; + + await IndianLocations.deleteMany(); + await IndianLocations.insertMany(statesData); + + reply.send({ message: "✅ All states added successfully!" }); + } catch (error) { + reply.status(500).send({ error: "❌ Error inserting states: " + error }); } - },}); + }); // Run the server! -const start = async () => { +// const start = async () => { - try { +// try { - await fastify.listen(3000, "0.0.0.0"); - fastify.log.info(`listening on ${fastify.server.address().port}`); - fastify.log.info(`server listening on ${fastify.config}`); +// await fastify.listen(3000, "0.0.0.0"); +// fastify.log.info(`listening on ${fastify.server.address().port}`); +// fastify.log.info(`server listening on ${fastify.config}`); +// } catch (err) { +// fastify.log.error(err); +// process.exit(1); +// } +// }; +const start = async () => { + try { + await fastify.listen({ port: 3000, host: "0.0.0.0" }); // ✅ correct usage in Fastify v4 + fastify.log.info(`Server listening on ${fastify.server.address().port}`); } catch (err) { fastify.log.error(err); process.exit(1); } }; -start(); +start(); \ No newline at end of file diff --git a/src/models/Department.js b/src/models/Department.js index 9ccbce64..2bf2a50e 100644 --- a/src/models/Department.js +++ b/src/models/Department.js @@ -11,10 +11,16 @@ const citySchema = new mongoose.Schema( phone: { type: String, unique: true, trim: true }, office_address1: String, officeName: { type: String }, + email: { type: String }, address2: String, pincode: { type: String }, zone: { type: String }, + longitude: { type: Number, default: 0.0 }, + latitude: { type: Number, default: 0.0 }, + googleLocation: { type: String }, + gstNo: { type: String }, city: { type: String }, + nameoftheContactPerson: String, location: [{ type : String}], state: String, country: String, @@ -38,13 +44,82 @@ const citySchema = new mongoose.Schema( ); + const branchSchema = new mongoose.Schema( + { + branchId:{type:String}, + phone: { type: String, unique: true, trim: true }, + land_line_number: { type: String, trim: true }, + office_address1: String, + officeName: { type: String }, + email: { type: String }, + address2: String, + pincode: { type: String }, + zone: { type: String , default: "ALL"}, + city: { type: String }, + location: [{ type : String}], + googleLocation: { type: String }, + longitude: { type: Number, default: 0.0 }, + latitude: { type: Number, default: 0.0 }, + googleLocation: { type: String }, + state: String, + country: String, + nameoftheContactPerson: String, + services: { password: { bcrypt: String } }, + createdAt: { + type: Date, + default: function () { + return Date.now(); + }, + }, + createdBy: ObjectId, + updatedAt: { + type: Date, + default: function () { + return Date.now(); + }, + }, + updatedBy: ObjectId, + }, + { versionKey: false } + ); + + const zoneSchema = new mongoose.Schema( + { + zoneId:{type:String}, + officeName: { type: String }, + zone: { type: String , default: "ALL"}, + city: { type: String }, + area: { type: String }, + location: [{ type : String}], + createdAt: { + type: Date, + default: function () { + return Date.now(); + }, + }, + createdBy: ObjectId, + updatedAt: { + type: Date, + default: function () { + return Date.now(); + }, + }, + updatedBy: ObjectId, + }, + { versionKey: false } + ); + const departmentsSchema = new mongoose.Schema( { + adminId: String, departmentId:{type:String}, + officeName: { type: String }, desginationName: { type: String }, phone: { type: String, unique: true, trim: true }, alternativeContactNumber : { type: String }, - reportingManager : { type: String }, + reportingManager: { type: String, default: "Self" }, + reportingManager_mobile_number : { type: String }, + reportingManager_email : { type: String }, location: [{ type : String}], firstName : { type: String }, gender: { type: String }, @@ -57,9 +132,33 @@ const citySchema = new mongoose.Schema( pincode: { type: String }, zone: { type: String }, city: { type: String }, + personal_city: { type: String }, state: String, country: String, + picture:{type:String}, + dateOfJoin : { type: String }, + employeeType: {type: String }, services: { password: { bcrypt: String } }, + team_member: { + + team_member: [ + { + teamMemberId: { type: String }, + firstName: { type: String }, + phone: { type: String }, + installationTeamMemId: { type: String }, + password: { type: String, default: null }, + status: { type: String, default: "active" }, + email: { type: String }, + alternativePhone: { type: String }, + departmentId: String, // new + departmentName: String, + officeName: String, // new + city: String, + + } + ], + }, createdAt: { type: Date, default: function () { @@ -77,10 +176,60 @@ const citySchema = new mongoose.Schema( }, { versionKey: false } ); + + const teamMemberProfilePictureSchema = new Schema({ + customerId: { + type: String, + unique: true, + required: true + }, + picture: { + type: String, // Change the type to String + required: true, + validate: { + validator: function (value) { + const supportedFormats = ['jpg', 'jpeg', 'png']; + const fileExtension = value.split('.').pop().toLowerCase(); + return supportedFormats.includes(fileExtension); + }, + message: 'Picture must be a JPEG, PNG, or JPG image' + } + } + }); + + const companyProfilePictureSchema = new Schema({ + customerId: { + type: String, + unique: true, + required: true + }, + picture: { + type: String, // Change the type to String + required: true, + validate: { + validator: function (value) { + const supportedFormats = ['jpg', 'jpeg', 'png']; + const fileExtension = value.split('.').pop().toLowerCase(); + return supportedFormats.includes(fileExtension); + }, + message: 'Picture must be a JPEG, PNG, or JPG image' + } + } + }); + const stateSchema = new mongoose.Schema({ + state: { type: String, required: true, unique: true }, + majorCities: { type: [String], required: true } + }); + + const IndianLocations = mongoose.model("IndianLocations", stateSchema); const City = mongoose.model('City', citySchema); const Deparments = mongoose.model('Deparments', departmentsSchema); + const Branch = mongoose.model('Branch', branchSchema); + const Zone = mongoose.model('Zone', zoneSchema); + const TeamMemberProfilePicture = mongoose.model('TeamMemberProfilePicture', teamMemberProfilePictureSchema); + const CompanyProfilePicture = mongoose.model('CompanyProfilePicture', companyProfilePictureSchema); - module.exports = { City,Deparments}; + module.exports = { City,Deparments,Branch,TeamMemberProfilePicture,CompanyProfilePicture,Zone,IndianLocations}; diff --git a/src/models/User.js b/src/models/User.js index ea4c55a8..08ad91a4 100644 --- a/src/models/User.js +++ b/src/models/User.js @@ -44,6 +44,7 @@ const generateBookingId = async () => { + const userSchema = new mongoose.Schema( { installationId:{type:String}, @@ -55,18 +56,22 @@ const userSchema = new mongoose.Schema( inchargeName: String, phoneVerified: { type: Boolean, default: false }, phoneVerificationCode: { type: Number, default: 11111 }, + passwordResetCode: { type: Number, default: code }, oneTimePasswordSetFlag: { type: Boolean, default: false }, emails: [{ email: String, verified: { type: Boolean, default: false } }], services: { password: { bcrypt: String } }, survey_status:{ type:String,default: "pending" }, + favorate_suppliers: [{ type: String, default: null }], + staff: { + staff: [ { name: { type: String }, phone: { type: String }, - + all_motor_access: { type: String }, password: { type: String, default: null }, status: { type: String, default: "active" }, @@ -90,6 +95,7 @@ const userSchema = new mongoose.Schema( zip: { type: String, default: null }, notes: { type: String, default: null }, }, + stripeCustomerId: String, stripePaymentIntentId: String, stripeSubscriptionId: String, @@ -116,8 +122,27 @@ const userSchema = new mongoose.Schema( latitude: {type: Number,default: 0.0}, isActive: Boolean, tenantId: ObjectId, - fcmId: { type: String, default: null }, + // fcmId: { type: String, default: null }, + fcmIds: [{ type: String }], // Changed to an array of strings + deviceId: { type: String, default: null }, + notificationPreference: { + type: String, + enum: ["never", "always", "6_hours", "8_hours", "1_month"], + default: "always", // Default is now "always" + }, + lastNotificationSent: { + type: Date, + default: null, // Initially, no notifications sent + }, + notificationTime: { type: String }, + allowNotifications: { type: Boolean, default: true }, + lowWaterAlert: { type: Boolean, default: true }, + criticalLowWaterAlert: { type: Boolean, default: true }, + manualStartAndStopNotify: { type: Boolean, default: true }, + automaticStartAndStopNotify: { type: Boolean, default: true }, + + createdAt: { type: Date, default: function () { @@ -169,6 +194,20 @@ const teamMembersSchema = new mongoose.Schema({ fcmId: { type: String, default: null }, }); +const cartItemSchema = new mongoose.Schema({ + productId: { type: String, required: true }, + name: { type: String, default: null }, + quantity: { type: Number, default: 1 }, + price: { type: Number, default: 0 }, +}); + +const cartSchema = new mongoose.Schema({ + customerId: { type: String, required: true }, + items: [cartItemSchema], +}, { timestamps: true }); + +const Cart = mongoose.model("Cart", cartSchema); + const ProfilePicture = mongoose.model('ProfilePicture', profilePictureSchema); const Counter = mongoose.model('Counter', CounterSchema); @@ -182,4 +221,4 @@ const AddTeamMembers = mongoose.model("AddTeamMembers", teamMembersSchema); //module.exports = mongoose.model("User", userSchema); -module.exports = { User,Counter, generateCustomerId,generateBookingId ,resetCounter,ProfilePicture,AddTeamMembers}; +module.exports = { User,Counter, generateCustomerId,generateBookingId ,resetCounter,ProfilePicture,AddTeamMembers,Cart}; diff --git a/src/models/admin.js b/src/models/admin.js index c386110a..2161e132 100644 --- a/src/models/admin.js +++ b/src/models/admin.js @@ -24,17 +24,42 @@ const adminSchema = new mongoose.Schema({ enum: ['admin', 'sales', 'store'], default: 'sales', }, - customerId: { + adminId: { type: String, required: true, // Customer ID is now required unique: true, }, + picture:{ + type: String, + }, date: { type: Date, default: Date.now, }, }) + + const adminProfilePictureSchema = new mongoose.Schema({ + customerId: { + type: String, + unique: true, + required: true + }, + picture: { + type: String, // Change the type to String + required: true, + validate: { + validator: function (value) { + const supportedFormats = ['jpg', 'jpeg', 'png']; + const fileExtension = value.split('.').pop().toLowerCase(); + return supportedFormats.includes(fileExtension); + }, + message: 'Picture must be a JPEG, PNG, or JPG image' + } + } + }); + const Admin = mongoose.model('Admin', adminSchema) +const AdminProfilePicture = mongoose.model('AdminProfilePicture', adminProfilePictureSchema); -module.exports = Admin \ No newline at end of file +module.exports = {Admin,AdminProfilePicture} \ No newline at end of file diff --git a/src/models/store.js b/src/models/store.js index c207b0b6..a498237e 100644 --- a/src/models/store.js +++ b/src/models/store.js @@ -4,7 +4,6 @@ const ObjectId = Schema.Types.ObjectId; const { Counter} = require('../models/User') const code = Math.floor(100000 + Math.random() * 900000); - const generateinstallationId = async () => { var result = await Counter.findOneAndUpdate( { _id: 'installation_id' }, @@ -25,56 +24,218 @@ const generateinstallationId = async () => { return result.seq; }; -const installationschema = new mongoose.Schema({ - // name: { type: String }, - phone: { type: String, unique: true, trim: true }, - address: String, - installationId: { type: String }, - phoneVerified: { type: Boolean, default: false }, - phoneVerificationCode: { type: Number, default: 11111 }, - passwordResetCode: { type: Number}, - oneTimePasswordSetFlag: { type: Boolean, default: false }, - emails: [{ email: String, verified: { type: Boolean, default: false } }], - services: { password: { bcrypt: String } }, - alternativeNumber: { type: String, default: null }, - firstName: { type: String, default: null }, - lastName: { type: String, default: null }, - address1: { type: String, default: null }, - address2: { type: String, default: null }, - city: { type: String, default: null }, - designation: { type: String, default: null }, - reportingManager: { type: String, default: null }, - departmentName: { type: String, default: null }, - zone: { type: String, default: null }, +// const installationschema = new mongoose.Schema({ +// // name: { type: String }, +// phone: { type: String, unique: true, trim: true }, +// address: String, +// installationId: { type: String }, +// phoneVerified: { type: Boolean, default: false }, +// phoneVerificationCode: { type: Number, default: 11111 }, +// passwordResetCode: { type: Number}, +// oneTimePasswordSetFlag: { type: Boolean, default: false }, +// emails: [{ email: String, verified: { type: Boolean, default: false } }], +// services: { password: { bcrypt: String } }, +// alternativeNumber: { type: String, default: null }, +// firstName: { type: String, default: null }, +// lastName: { type: String, default: null }, +// address1: { type: String, default: null }, +// address2: { type: String, default: null }, +// city: { type: String, default: null }, +// designation: { type: String, default: null }, +// reportingManager: { type: String, default: null }, +// departmentName: { type: String, default: null }, +// zone: { type: String, default: null }, +// type: { type: String }, - profile: { +// profile: { - state: { type: String, default: null }, - country: { type: String, default: null }, - }, - team : { type: String, default: null}, - manager : { type: String, default: null}, +// state: { type: String, default: null }, +// country: { type: String, default: null }, +// }, +// team : { type: String, default: null}, +// manager : { type: String, default: null}, +// team_member: { + +// team_member: [ +// { +// teamMemberId: { type: String }, +// firstName: { type: String }, +// phone: { type: String }, +// installationTeamMemId: { type: String }, +// password: { type: String, default: null }, +// status: { type: String, default: "active" }, +// email: { type: String }, +// alternativePhone: { type: String }, + +// } +// ], + + +// }, + +// longitude: { type : Number,default: 0.0}, +// latitude: {type: Number,default: 0.0}, - longitude: { type : Number,default: 0.0}, - latitude: {type: Number,default: 0.0}, +// fcmId: { type: String, default: null }, +// createdAt: { +// type: Date, +// default: function () { +// return Date.now(); +// }, +// }, +// createdBy: ObjectId, +// updatedAt: { +// type: Date, +// default: function () { +// return Date.now(); +// }, +// }, +// updatedBy: ObjectId, + +// }); - fcmId: { type: String, default: null }, - createdAt: { - type: Date, - default: function () { - return Date.now(); - }, + + + +const installationschema = new mongoose.Schema({ + phone: { type: String, unique: true, trim: true }, + address: { type: String, default: null }, + installationId: { type: String }, + phoneVerified: { type: Boolean, default: false }, + phoneVerificationCode: { type: Number, default: 11111 }, + passwordResetCode: { type: Number, default: null }, + oneTimePasswordSetFlag: { type: Boolean, default: false }, + emails: [ + { + email: { type: String }, + verified: { type: Boolean, default: false }, }, - createdBy: ObjectId, - updatedAt: { - type: Date, - default: function () { - return Date.now(); + ], + services: { + password: { bcrypt: { type: String, default: null } }, + }, + alternativeNumber: { type: String, default: null }, + firstName: { type: String, default: null }, + lastName: { type: String, default: null }, + address1: { type: String, default: null }, + address2: { type: String, default: null }, + city: { type: String, default: null }, + designation: { type: String, default: null }, + reportingManager: { type: String, default: null }, + departmentName: { type: String, default: null }, + zone: { type: String, default: null }, + + // Store main user type (e.g., Installation_Manager, Installation_TeamMember) + type: { type: String, default: null }, + + profile: { + state: { type: String, default: null }, + country: { type: String, default: null }, + role: { type: String, default: null }, // <-- good to keep as per your login flow + }, + + team: { type: String, default: null }, + manager: { type: String, default: null }, + picture:{type:String}, + + team_member: { + team_member: [ + { + teamMemberId: { type: String }, + firstName: { type: String, default: null }, + phone: { type: String }, + installationTeamMemId: { type: String }, + password: { type: String, default: null }, + status: { type: String, default: "active" }, + email: { type: String, default: null }, + alternativePhone: { type: String, default: null }, + type: { type: String, default: "Installation_TeamMember" }, // good to keep for clarity + picture: { type: String, default: null }, }, - }, - updatedBy: ObjectId, - - }); + ], + }, + + longitude: { type: Number, default: 0.0 }, + latitude: { type: Number, default: 0.0 }, + + fcmId: { type: String, default: null }, + + createdAt: { type: Date, default: Date.now }, + createdBy: { type: ObjectId, default: null }, + updatedAt: { type: Date, default: Date.now }, + updatedBy: { type: ObjectId, default: null }, +}); + + + const surveyschema = new mongoose.Schema({ + // name: { type: String }, + phone: { type: String, unique: true, trim: true }, + address: String, + surveyId: { type: String }, + phoneVerified: { type: Boolean, default: false }, + phoneVerificationCode: { type: Number, default: 11111 }, + passwordResetCode: { type: Number}, + oneTimePasswordSetFlag: { type: Boolean, default: false }, + emails: [{ email: String, verified: { type: Boolean, default: false } }], + services: { password: { bcrypt: String } }, + alternativeNumber: { type: String, default: null }, + firstName: { type: String, default: null }, + lastName: { type: String, default: null }, + address1: { type: String, default: null }, + address2: { type: String, default: null }, + city: { type: String, default: null }, + designation: { type: String, default: null }, + reportingManager: { type: String, default: null }, + departmentName: { type: String, default: null }, + zone: { type: String, default: null }, + type: { type: String }, + + profile: { + + state: { type: String, default: null }, + country: { type: String, default: null }, + }, + team : { type: String, default: null}, + manager : { type: String, default: null}, + team_member: { + + team_member: [ + { + survey_teamMemberId: { type: String }, + name: { type: String }, + phone: { type: String }, + installationTeamMemId: { type: String }, + password: { type: String, default: null }, + status: { type: String, default: "active" }, + email: { type: String }, + alternativePhone: { type: String }, + + } + ], + + + }, + + longitude: { type : Number,default: 0.0}, + latitude: {type: Number,default: 0.0}, + + fcmId: { type: String, default: null }, + createdAt: { + type: Date, + default: function () { + return Date.now(); + }, + }, + createdBy: ObjectId, + updatedAt: { + type: Date, + default: function () { + return Date.now(); + }, + }, + updatedBy: ObjectId, + + }); const profilePictureInstallSchema = new Schema({ installationId: { @@ -96,6 +257,31 @@ const installationschema = new mongoose.Schema({ } }); + const profilePictureInstallTeamMemberSchema = new Schema({ + installationId: { + type: String, + unique: true, + required: true + }, + teamMemberId: { + type: String, + unique: true, + required: true + }, + picture: { + type: String, // Change the type to String + required: true, + validate: { + validator: function (value) { + const supportedFormats = ['jpg', 'jpeg', 'png']; + const fileExtension = value.split('.').pop().toLowerCase(); + return supportedFormats.includes(fileExtension); + }, + message: 'Picture must be a JPEG, PNG, or JPG image' + } + } + }); + const profilePictureStoreSchema = new Schema({ storeId: { type: String, @@ -116,6 +302,244 @@ const installationschema = new mongoose.Schema({ } }); + const IssueSchema = new Schema({ + //ticketId: { type: String, unique: true }, + type: { + type: String, + enum: ["GSM Disconnected", "LoRa Disconnected", "GSM or LoRa Disconnected"], + required: true + }, + hardwareId: { + type: String, + required: true + }, + masterHardwareId: { + type: String + }, + masterName: { + type: String + }, + slaveName: { + type: String + }, + hardwareIds: [String], + slaveNames: [String], + resolved: { + type: Boolean, + default: false + }, + movedToCategory: { + type: Boolean, + default: false + }, + category: { + type: String + }, + createdAt: { + type: String + }, + lastTicketRaisedAt: { + type: String + }, + movedAt: { + type: String + } + }); + + const CategorizedIssueSchema = new Schema({ + type: { + type: String, + enum:["GSM Disconnected", "LoRa Disconnected", "GSM or LoRa Disconnected"], + required: true + }, + hardwareId: { + type: String, + required: true + }, + masterHardwareId: { + type: String, + required: true + }, + slaveName: { + type: String + }, + category: { + type: String, + enum: ["Power Outage", "Resolved", "OutDoor Escalation","LongTerm Issues"], + required: true + }, + // ticketId: String, + movedAt: { + type: String, + required: true + }, + assignedTo: { + name: String, + support_teamMemberId: String, + phone: String, + email: String, + startDate: String, + endDate: String, + assignedAt: String, + assignmentCode: String + } + }); + + const CommentSchema = new Schema({ + text: { type: String }, + call_status: { type: String }, + call_time: { type: String }, + customerId: String, + hardwareId: String, + createdAt: { type: Date, default: Date.now } + }); + + const CallRecordSchema = new Schema({ + call_status: { type: String }, + call_time: { type: String }, + customerId: String, + hardwareId: String, + createdAt: { type: Date, default: Date.now } + }); + + const supportschema = new mongoose.Schema({ + // name: { type: String }, + phone: { type: String, unique: true, trim: true }, + address: String, + supportId: { type: String }, + phoneVerified: { type: Boolean, default: false }, + phoneVerificationCode: { type: Number, default: 11111 }, + passwordResetCode: { type: Number}, + oneTimePasswordSetFlag: { type: Boolean, default: false }, + emails: [{ email: String, verified: { type: Boolean, default: false } }], + services: { password: { bcrypt: String } }, + alternativeNumber: { type: String, default: null }, + firstName: { type: String, default: null }, + lastName: { type: String, default: null }, + address1: { type: String, default: null }, + address2: { type: String, default: null }, + city: { type: String, default: null }, + designation: { type: String, default: null }, + reportingManager: { type: String, default: null }, + departmentName: { type: String, default: null }, + zone: { type: String, default: null }, + type: { type: String }, + dateOfLogin: { type: String, default: null }, + timeOfLogin: { type: String, default: null }, + currentTime: { type: Date, default: Date.now }, + comments: [CommentSchema], + // callRecord: [CallRecordSchema], + lastTicketRaisedAt: {type : String}, + issues: [IssueSchema], + + categorizedIssues: [CategorizedIssueSchema], + resolvedIssues: [ + { + type: { type: String }, + hardwareId: String, + masterHardwareId: String, + category: String, // will be 'Resolved' + resolvedAt: String, // ISO string or Date + originalMovedAt: String, // store original movedAt for reference + // ticketId: String, + reason: String, + } + ], + masterDisconnected: [{ // new field for master disconnected details + hardwareId: String, + masterName: String, + disconnectedAt: String, + }], + disconnectedSlaves: [{ // new field for disconnected slaves details + slaveHardwareId: String, + slaveName: String, + }], + // categorizedIssues: [ + // { + // type: { + // type: String, + // required: true + // }, + // hardwareId: { + // type: String, + // required: true + // }, + // masterHardwareId: { + // type: String, + // required: true + // }, + // slaveName: { + // type: String, + // }, + // category: { + // type: String, + // enum: [ "Power Outage", + // "Resolved", + // "Escalation",], + // required: true + // }, + // movedAt: { + // type: String, // or Date, depending on your preference + // required: true + // }, + // assignedTo: { + // name: String, + // support_teamMemberId: String, + // phone: String, + // email: String, + // startDate: String, + // endDate: String + // } + // } + // ], + + + profile: { + + state: { type: String, default: null }, + country: { type: String, default: null }, + }, + team : { type: String, default: null}, + manager : { type: String, default: null}, + team_member: { + + team_member: [ + { + support_teamMemberId: { type: String }, + name: { type: String }, + phone: { type: String }, + installationTeamMemId: { type: String }, + password: { type: String, default: null }, + status: { type: String, default: "active" }, + email: { type: String }, + alternativePhone: { type: String }, + + } + ], + + + }, + + longitude: { type : Number,default: 0.0}, + latitude: {type: Number,default: 0.0}, + + fcmId: { type: String, default: null }, + createdAt: { + type: Date, + default: function () { + return Date.now(); + }, + }, + createdBy: ObjectId, + updatedAt: { + type: Date, + default: function () { + return Date.now(); + }, + }, + updatedBy: ObjectId, + + }); const storeSchema = new mongoose.Schema({ storename: { type: String }, @@ -132,11 +556,7 @@ const installationschema = new mongoose.Schema({ reportingManager: { type: String, default: null }, departmentName: { type: String, default: null }, zone: { type: String, default: null }, - services: { - password: { - bcrypt: { type: String, required: true } - } - }, + services: { password: { bcrypt: String } }, description: { type: String, default: null }, startingPrice: { type: String, default: 0.0 }, profile: { @@ -250,12 +670,12 @@ const motorSwitchSensorInSchema = new mongoose.Schema({ const insensorsSchema = new mongoose.Schema({ storeId: { type: String }, - hardwareId: { type: String }, + hardwareId: { type: String, default: null }, masterId: { type: String, default: null }, type: { type: String }, model: { type: String }, indate: { type: String }, - hardwareId_company: { type: String }, + hardwareId_company: { type: String, default: null }, qccheck: { type: String, default: null }, qccheckdate: { type: String, default: null }, qcby: { type: String, default: null }, @@ -269,13 +689,139 @@ const insensorsSchema = new mongoose.Schema({ comments: { type: String, default: "0" }, quantity: { type: Number, default: 0 }, batchno: { type: String, default: null }, - sensor_type: { type: String, enum: ['slaves', 'motorswitch', 'master'] }, // adding sensor_type field + sensor_type: { type: String }, // adding sensor_type field + status: { type: String, default: "pending" }, + connected_to: { type: String, default: "0" }, + tankName: { type: String, default: "0" }, + tankLocation: { type: String, default: "0" }, + connected_slave: { type: String, default: null }, + connected_status: { type: String, enum: ["connected", "disconnected", "Not connected", "unknown"], default: "unknown" }, + masterName: { type: String, default: null }, + location: { type: String, default: null }, + tankhardwareId: { type: String, default: null }, + support_issue_status: { + type: String, + enum: ['active', 'inactive'], + default: 'inactive' + }, + motor_switches: [ + { + from_tank: { type: String, default: null }, + from_location: { type: String, default: null }, + to_tank: { type: String, default: null }, + to_location: { type: String, default: null }, + }, + ], + connected_gsm_time: { type: String, default: null }, +connected_gsm_date: { type: String, default: null }, +connected_lora_date: { type: String, default: null }, +connected_lora_time: { type: String, default: null }, +typeOfWater:{ type: String, default: null }, +gsm_last_check_time : { type: String, default: null }, +support_gsm_last_check_time : { type: String, default: null }, +support_lora_last_check_time : { type: String, default: null }, +team_member_support_gsm_last_check_time : { type: String, default: null }, +team_member_support_lora_last_check_time : { type: String, default: null }, +lora_last_check_time : { type: String, default: null }, +gsm_last_disconnect_time : { type: String, default: null }, +outDoor_status: { + type: String, + enum: ['inprogress', 'inprogress at store','ready to pick up', 'dispatched'], + default: 'inprogress' // optional: set default + }, +lora_last_disconnect_time : { type: String, default: null }, + hardwareList: { + type: [ + { + name: { type: String, required: true }, + value: { type: Number, required: true } + } + ], + default: [] +}, + quality_check_details: [{ + damage_check: { result: String }, + stickering_check: { result: String }, + power_check: { result: String }, + master_connecting_gsm: { result: String }, + slave_connecting: { result: String }, + motor_start: { result: String }, + motor_stop: { result: String }, + + motor_starting: { + result: String, + steps: [ + { step: Number, result: String } + ] + }, + connecting_to_sensor: { result: String }, + connecting_to_slave: { result: String }, + data_sending: { result: String }, + connected_slave_count: {type : String}, +lastTicketRaisedAt: { type: String }, + // hardwareList: { + // type: Map, + // of: Number, + // default: {} + // }, + + +distance_check: { + result: String, + steps: [ + { step: Number, result: String } + ] + } + }], + manualTestVideos: [ + { + url: String, + createdAt: { type: Date, default: Date.now } + } +], +materialReceivedPictures: [ + { + url: String, + createdAt: { type: Date, default: Date.now } + } +], +workStatusPictures: [ + { + url: String, + createdAt: { type: Date, default: Date.now } + } +], + product_status: { + type: String, + enum: ['pending', 'complete'], + default: 'pending' + }, + description: { type: String }, + +}); + + + +const iotpriceSchema = new mongoose.Schema({ + name: { type: String }, + type: { type: String ,default:null}, + cost: { type: Number, default: null }, }); +const estimationorderSchema = new mongoose.Schema({ + orderId: { type: String, unique: true, required: true }, + customerId: { type: String, required: true }, + items: { type: Array, required: true }, + estimatedTotal: { type: String, required: true }, + status: { type: String, default: "pending" }, +}, { timestamps: true }); + const sensorquotationSchema = new mongoose.Schema({ customerId: { type: String }, + surveyId: { type: String, default: null }, + storeId: { type: String, default: null }, installationId: { type: String, default: null }, quatationId: { type: String, default: null }, masters: { type: String }, @@ -293,15 +839,47 @@ const sensorquotationSchema = new mongoose.Schema({ comments: { type: String, default: null }, datetime: { type: String, default: null }, updated_at: { type: String, default: null }, + + master_connections: [ + { + master_name: { type: String, default: null }, + slaves: { type: String, default: null }, + location: { type: String, default: null }, + googleLocation: { type: String, default: null }, + longitude: { type : Number,default: 0.0}, + latitude: {type: Number,default: 0.0}, + tanks: [ + { + tankName: { type: String, default: null }, + tankLocation: { type: String, default: null }, + + }, + ], + motor_switches: [ + { + from_tank: { type: String, default: null }, + from_location: { type: String, default: null }, + to_tank: { type: String, default: null }, + to_location: { type: String, default: null }, + }, + ], + + + }, + ], electricals: [ { type: { type: String, default: null }, wire: { type: String, default: null }, switch: { type: String, default: null }, text: { type: String, default: null }, + available_quantity: { type: String, default: null }, }, ], master_type_quantity_price: { type: String, default: null }, + master_available_quantity: { type: String, default: null }, + slave_available_quantity: { type: String, default: null }, + sensor_available_quantity: { type: String, default: null }, master_type_total_price: { type: String, default: null }, sensor_type_quantity_price: { type: String , default: null}, sensor_type_total_price: { type: String , default: null}, @@ -310,6 +888,137 @@ const sensorquotationSchema = new mongoose.Schema({ qutation_total_price: { type: String, default: null }, }); + + + + +const orderSchema = new mongoose.Schema({ + customerId: { type: String }, + surveyId: { type: String, default: null }, + storeId: { type: String, default: null }, + installationId: { type: String, default: null }, + quatationId: { type: String, default: null }, + masters: { type: String }, + masters_quantity_price: { type: String }, + masters_total_price: { type: String }, + slaves: { type: String }, + sensors: { type: String }, + slaves_quantity_price: { type: String }, + slaves_total_price: { type: String }, + motor_switches: { type: String }, + motor_switches_quantity_price: { type: String }, + motor_switches_total_price: { type: String }, + quote_status: { type: String, default: null }, + quoted_amount: { type: String, default: null }, + comments: { type: String, default: null }, + datetime: { type: String, default: null }, + updated_at: { type: String, default: null }, + assignedTeamMembers: [{ type: String }], + tankhardwareId: [{ type: String,default: null }], + master_connections: [ + { + master_name: { type: String, default: null }, + hardwareId: { type: String, default: null }, + work_status: { type: String, enum: ['active', 'pending', 'complete'], default: 'active' }, + slaves: { type: String, default: null }, + location: { type: String, default: null }, + googleLocation: { type: String, default: null }, + longitude: { type : Number,default: 0.0}, + latitude: {type: Number,default: 0.0}, + tanks: [ + { + tankName: { type: String, default: null }, + tankLocation: { type: String, default: null }, + + }, + ], + motor_switches: [ + { + from_tank: { type: String, default: null }, + from_location: { type: String, default: null }, + to_tank: { type: String, default: null }, + to_location: { type: String, default: null }, + }, + ], + + + }, + ], + + electricals: [ + { + type: { type: String, default: null }, + wire: { type: String, default: null }, + switch: { type: String, default: null }, + text: { type: String, default: null }, + available_quantity: { type: String, default: null }, + }, + ], + master_type_quantity_price: { type: String, default: null }, + master_available_quantity: { type: String, default: null }, + slave_available_quantity: { type: String, default: null }, + sensor_available_quantity: { type: String, default: null }, + master_type_total_price: { type: String, default: null }, + sensor_type_quantity_price: { type: String, default: null }, + sensor_type_total_price: { type: String, default: null }, + switch_type_quantity_price: { type: String, default: null }, + switch_type_total_price: { type: String, default: null }, + qutation_total_price: { type: String, default: null }, + type: { type: String, default: null }, + status: { type: String, default: "pending" }, + quatation_status: { type: String, default: "pending" }, + +}); + +const SensorStockSchema = new mongoose.Schema({ + storeId: { + type: String, + required: true, + ref: "Store", + }, + type: { + type: String, + required: true, + enum: ["master", "slave", "sensor"], // Ensures only valid types + }, + total_count: { + type: Number, + required: true, + default: 0, + }, + total_available: { + type: Number, + required: true, + default: 0, + }, + total_count_before_qc: { + type: Number, + required: true, + default: 0, + }, + total_blocked: { + type: Number, + required: true, + default: 0, + }, + total_repair: { + type: Number, + required: true, + default: 0, + }, + excess_needed: { + type: Number, + required: true, + default: 0, + }, + total_installed: { + type: Number, + required: true, + default: 0, + }, +}, { timestamps: true }); + + const hardwareCartSchema = new mongoose.Schema({ productId: { type: String}, productName: { type: String }, @@ -401,18 +1110,186 @@ const salesSchema = new mongoose.Schema({ updatedBy: ObjectId, }, { versionKey: false }); + +const masterSlaveDataSchema = new mongoose.Schema({ + installationId: {type: String}, + customerId: {type: String}, + type: { type: String}, + hardwareId: { type: String }, + batchno: { type: String, default: null }, + masterId: { type: String }, + tankName: { type: String }, + tankLocation: { type: String }, + materialRecived: { type: String }, + electricityWork: { type: String }, + plumbingWork: { type: String }, + electricityWorkPictures: [ + { + url: { type: String }, + uploadedAt: { type: Date, default: Date.now } + } + ], + plumbingWorkPictures: [ + { + url: { type: String }, + uploadedAt: { type: Date, default: Date.now } + } + ], + materialRecievedPictures: [ + { + url: { type: String }, + uploadedAt: { type: Date, default: Date.now } + } + ], + loraCheck: { type: String }, + +}, { + timestamps: true, +}); + +const electrictyWorkPicturesSchema = new Schema({ + installationId: { + type: String, + //required: true, + //unique: true + }, + customerId: { + type: String, + //required: true + }, + pictureUrl: [{ + url: { + type: String, + }, + }], + createdAt: { + type: Date, + default: Date.now + } +}); + +const plumbingWorkPicturesSchema = new Schema({ + installationId: { + type: String, + //required: true, + //unique: true + }, + customerId: { + type: String, + //required: true + }, + pictureUrl: [{ + url: { + type: String, + }, + }], + createdAt: { + type: Date, + default: Date.now + } +}); + +const manualTestVideoSchema = new Schema({ + installationId: { + type: String, + //required: true, + //unique: true + }, + customerId: { + type: String, + //required: true + }, + pictureUrl: [{ + url: { + type: String, + }, + }], + createdAt: { + type: Date, + default: Date.now + } +}); + +const materialRecievedPicturesSchema = new Schema({ + installationId: { + type: String, + //required: true, + //unique: true + }, + customerId: { + type: String, + //required: true + }, + pictureUrl: [{ + url: { + type: String, + }, + }], + createdAt: { + type: Date, + default: Date.now + } +}); + +const RepairorderSchema = new mongoose.Schema({ + customerId: { type: String, required: true }, + supportId: { type: String }, + storeId: { type: String }, + status: { type: String, default: "pending" }, + packageId: { type: String,}, + otp: { type: String, }, + replacements: [ + { + type: { + type: String, + enum: ["master", "slave", "sensor"], + required: true + }, + oldHardwareId: { type: String, required: true }, + newHardwareId: { type: String } + } + ], + createdAt: { type: Date, default: Date.now } +}); + + + + +const Repairorder = mongoose.model('Repairorder', RepairorderSchema); + + + +const Iotprice = mongoose.model('Iotprice', iotpriceSchema); const Insensors = mongoose.model('Insensors', insensorsSchema); + const MasterSlaveData = mongoose.model('MasterSlaveData', masterSlaveDataSchema); + const ElectrictyWorkPictures = mongoose.model('ElectrictyWorkPictures', electrictyWorkPicturesSchema); + const ManualTestVideo = mongoose.model('ManualTestVideo', manualTestVideoSchema); + + const PlumbingWorkPictures = mongoose.model('PlumbingWorkPictures', plumbingWorkPicturesSchema); + const MaterialRecievedPictures = mongoose.model('MaterialRecievedPictures', materialRecievedPicturesSchema); + + + const Order = mongoose.model('Order', orderSchema); + const EstimationOrder = mongoose.model('EstimationOrder', estimationorderSchema); + const Store = mongoose.model("Store", storeSchema); const WaterLeverSensor = mongoose.model('WaterLeverSensor', waterLeverSensorInSchema); const ProfilePictureStore = mongoose.model('ProfilePictureStore', profilePictureStoreSchema); const ProfilePictureInstall = mongoose.model('ProfilePictureInstall', profilePictureInstallSchema); + const ProfilePictureInstallTeamMember = mongoose.model('ProfilePictureInstallTeamMember', profilePictureInstallTeamMemberSchema); + const MotorSwitchSensor = mongoose.model('MotorSwitchSensor', motorSwitchSensorInSchema); const SensorQuotation = mongoose.model('SensorQuotationSchema', sensorquotationSchema); - + + const SensorStock = mongoose.model("SensorStock", SensorStockSchema); const Install = mongoose.model("Install", installationschema); + const Survey = mongoose.model("Survey", surveyschema); + const Support = mongoose.model("Support", supportschema); + const HardwareCart = mongoose.model("HardwareCart", hardwareCartSchema); const ServiceCart = mongoose.model("ServiceCart", serviceCartSchema); const Sales = mongoose.model("Sales", salesSchema); + - module.exports = {Sales, Install, ProfilePictureInstall, SensorQuotation,generateinstallationId,Store,ProfilePictureStore,WaterLeverSensor,MotorSwitchSensor,Insensors,generatequatationId, HardwareCart, ServiceCart}; + module.exports = {ManualTestVideo,Repairorder,Support,MaterialRecievedPictures,PlumbingWorkPictures,ElectrictyWorkPictures,MasterSlaveData,SensorStock,Order,EstimationOrder,Iotprice,Sales, Install,Survey, ProfilePictureInstall, SensorQuotation,generateinstallationId,Store,ProfilePictureStore,WaterLeverSensor,MotorSwitchSensor,Insensors,generatequatationId, HardwareCart, ServiceCart,ProfilePictureInstallTeamMember}; diff --git a/src/models/supplier.js b/src/models/supplier.js index c356cb5c..fc007b83 100644 --- a/src/models/supplier.js +++ b/src/models/supplier.js @@ -23,6 +23,9 @@ const supplierSchema = new mongoose.Schema( { suppliername: { type: String }, phone: { type: String, unique: true, trim: true }, + bussinessname: {type : String, default: null}, + registration_number: {type : String, default: null}, + years_in_business: {type : String, default: null}, supplierId: {type : String, default: null}, phoneVerified: { type: Boolean, default: false }, phoneVerificationCode: { type: Number, default: 11111 }, @@ -32,6 +35,7 @@ const supplierSchema = new mongoose.Schema( services: { password: { bcrypt: String } }, description: {type : String, default: null}, startingPrice : { type : String, default: 0.0}, + status: { type : String, default:"under_verification"}, profile: { role: [{ type: String, default: "supplier" }], firstName: { type: String, default: null }, @@ -155,12 +159,97 @@ const supplierSchema = new mongoose.Schema( }); + +const requestedSupplierSchema = new mongoose.Schema({ + supplierId: String, + quoted_amount: Number, + time: {type:String,default:null}, // ✅ New field added here + status:{type:String,default: "pending" }, + advance_paid: Number, + advance_ref_number: {type:String,default:null}, +}, { _id: false }); + +const requestedBookingSchema = new mongoose.Schema({ + customerId: { type: String, required: true }, + type_of_water: String, + capacity: String, + quantity: String, + total_required_capacity: Number, + date: String, + time: String, + requested_suppliers: [requestedSupplierSchema], + status: { type: String, default: "pending" }, +}, { timestamps: true }); + +// models/RecurringRequestedBooking.js + +// const requestedSupplier1Schema = new mongoose.Schema({ +// supplierId: String, +// quoted_amount: Number, +// time: { type: String, default: null }, +// status: { type: String, default: "pending" }, +// }, { _id: false }); + +// const recurringRequestedBookingSchema = new mongoose.Schema({ +// customerId: { type: String, required: true }, +// type_of_water: String, +// capacity: String, +// quantity: String, +// total_required_capacity: Number, +// frequency: { type: String, enum: ["daily","weekly_once","weekly_twice","weekly_thrice","weekly"], required: true }, +// weekly_count: { type: Number, enum: [1,2,3] }, +// start_date: { type: String, required: true }, +// end_date: { type: String, required: true }, +// time: String, +// dates: [String], +// requested_suppliers: [requestedSupplier1Schema], +// status: { type: String, default: "pending" }, +// }, { timestamps: true }); + + +const requestedSupplier1Schema = new mongoose.Schema({ + supplierId: { type: String, required: true }, + quoted_amount: { type: Number, default: 0 }, + time: { type: String, default: null }, // keep as string to match current payloads + status: { type: String, enum: ["pending", "accepted", "rejected"], default: "pending" }, +}, { _id: false }); + +const recurringRequestedBookingSchema = new mongoose.Schema({ + customerId: { type: String, required: true }, + type_of_water: { type: String, required: true }, + + capacity: { type: String, required: true }, // keep as sent by UI, we also store parsed number below if you want + quantity: { type: String, required: true }, + total_required_capacity: { type: Number, required: true }, // capacity * quantity (numeric) + + frequency: { + type: String, + enum: ["daily", "weekly_once", "weekly_twice", "weekly_thrice", "weekly"], + required: true + }, + weekly_count: { type: Number, default: 1 }, + + start_date: { type: String, required: true }, // storing original string for audit + end_date: { type: String, required: true }, + time: { type: String, required: true }, + + dates: { type: [String], default: [] }, // ISO "YYYY-MM-DD" strings + + requested_suppliers: { type: [requestedSupplier1Schema], default: [] }, + + status: { type: String, default: "pending" }, +}, { timestamps: true }); + +const RequestedBooking = mongoose.model('RequestedBooking', requestedBookingSchema); + +const RecurringRequestedBooking = mongoose.model("RecurringRequestedBooking", recurringRequestedBookingSchema); + const Supplier = mongoose.model("Supplier", supplierSchema); //const DeliveryAgent = mongoose.model("DeliveryAgent", deliveryAgent); const FriendRequest = mongoose.model('FriendRequest', friendRequestSchema); const DeliveryBoy = mongoose.model('DeliveryBoy', deliveryBoySchema); const profilePictureSupplier = mongoose.model('ProfilePictureSupplier', profilePictureSupplierSchema); -module.exports = { Supplier, generateSupplierId, FriendRequest,DeliveryBoy, profilePictureSupplier} +module.exports = { Supplier, generateSupplierId, FriendRequest,DeliveryBoy, profilePictureSupplier,RequestedBooking,RecurringRequestedBooking} diff --git a/src/models/tankers.js b/src/models/tankers.js index ba2a14ef..8917d4a0 100644 --- a/src/models/tankers.js +++ b/src/models/tankers.js @@ -58,10 +58,12 @@ const tankersbookingSchema = new mongoose.Schema({ stop_time:{ type: String, default: "null" }, quantityDelivered: { type: String, default: null}, amount_paid: { type: String, default: null }, + advance_reference_number:{ type: String, default: null }, amount_due: { type: String, default: null }, distrubance_price: { type: String, default: "none" }, amount_difference: { type: String, default: "none" }, payment_mode: { type: String, default: null }, + payment_reference_number:{type: String, default: null}, remarks : { type: String, default: null }, customerPhone : { type: String, default: null }, supplierPhone : { type: String, default: null }, diff --git a/src/models/tanks.js b/src/models/tanks.js index 04d4813e..28a56a70 100644 --- a/src/models/tanks.js +++ b/src/models/tanks.js @@ -55,6 +55,18 @@ const tanksSchema = new mongoose.Schema({ auto_min_percentage: { type: String, default: "20" }, reserved_percentage: { type: String, default: "20" }, auto_max_percentage: { type: String, default: "80" }, + auto_mode_type: { type: String, default: "default" }, + notificationSentCritical: { type: Boolean }, + notificationSentVeryLow: { type: Boolean }, + notificationSentLow: { type: Boolean }, + notificationSentCriticalHigh: { type: Boolean }, + notificationSentVeryHigh: { type: Boolean }, + notificationSentHigh: { type: Boolean }, + all_motor_status: { type: Boolean }, + status:{ type: String, default: "active" }, + slave_status:{ type: String, default: "working" }, + slave_disconnected_time :{ type: String, default: null }, + connections: { source: { type: String }, inputConnections: [ @@ -71,13 +83,14 @@ const tanksSchema = new mongoose.Schema({ water_level: { type: String, default: null }, manual_threshold_percentage: { type: String, default: "90" }, manual_threshold_time: { type: String, default: null }, - + status:{ type: String, default: "active" }, stop_threshold_time: { type: String, default: null }, threshold_type: { type: String, default: "percentage" }, startTime: { type: String, default: null }, start_instance_id: { type: String, default: null }, stopTime: { type: String, default: null }, - waterlevelPercentage: { type: String, default: null } + waterlevelPercentage: { type: String, default: null } , + slave_status:{ type: String, default: "working" }, } ], outputConnections: [ @@ -93,34 +106,80 @@ const tanksSchema = new mongoose.Schema({ manual_threshold_percentage: { type: String, default: "90" }, manual_threshold_time: { type: String, default: null }, threshold_type: { type: String, default: "percentage" }, - waterlevelPercentage: { type: String, default: null } + waterlevelPercentage: { type: String, default: null } , + status:{ type: String, default: "active" }, } ], inputWaterlevelPercentage: { type: String, default: null }, outputWaterlevelPercentage: { type: String, default: null } - } + }, + motor_start_notified: { type: Boolean, default: false }, + motor_stop_notified: { type: Boolean, default: false } + }); +const customerautopercentages = ({ + customerId: { type: String }, + auto_min_percentage: { type: String, required: true }, + auto_max_percentage: { type: String,default:null }, + + date: { type: String, required: true }, + +}); const motordataSchema = new mongoose.Schema({ customerId: { type: String, default: null }, motor_id: { type: String, default: null }, + started_by:{ type: String, default: "user" }, start_instance_id:{type:String,default:null}, supplierTank: { type: String, default: null }, receiverTank: { type: String, default: null }, - receiverInitialwaterlevel: { type: String, default: "0" }, - receiverfinalwaterlevel: { type: String, default: "0" }, + supplierInitialwaterlevel: { type: String, default: "0" }, + supplierfinalwaterlevel: { type: String, default: "0" }, startTime: { type: String, default: null }, stopTime: { type: String, default: null }, runtime:{type:String, default:"0"}, supplier_type: { type: String, default: null }, receiver_type: { type: String, default: null }, quantity_delivered:{ type: String, default: null }, - + receiverInitialwaterlevel: { type: String, default: "0" }, + receiverfinalwaterlevel: { type: String, default: "0" }, + started_by: { type: String, default: "user" }, + stopped_by: { type: String, default: "user" } }); +const updateMotorData = async () => { + try { + // Fetch all motor data where quantity_delivered is null or needs updating + const motorDataRecords = await MotorData.find({}); + + for (let record of motorDataRecords) { + // Convert string values to numbers by removing commas + const initialLevel = parseInt(record.receiverInitialwaterlevel.replace(/,/g, ""), 10) || 0; + const finalLevel = parseInt(record.receiverfinalwaterlevel.replace(/,/g, ""), 10) || 0; + + // Calculate quantity delivered + const quantityDelivered = finalLevel - initialLevel; + + // Update the record + await MotorData.updateOne( + { _id: record._id }, + { + $set: { + started_by: "user", + quantity_delivered: quantityDelivered.toString(), // Convert back to string for consistency + }, + } + ); + } + + console.log("Motor data updated successfully!"); + } catch (err) { + console.error("Error updating motor data:", err); + } +}; const tankSchema = new mongoose.Schema({ tankhardwareId: { type: String }, @@ -164,6 +223,8 @@ const tankconsumptionoriginalSchema = new mongoose.Schema({ tankName: { type: String }, tankLocation: { type: String }, consumption: { type: String }, + consumed_percentage:{ type: String }, + available_capacity:{ type: String }, block:{type: String}, typeofwater:{type:String}, time: { type: String } @@ -171,6 +232,8 @@ const tankconsumptionoriginalSchema = new mongoose.Schema({ const Tank = mongoose.model("Tank", tanksSchema); +const CustomerAutoPercentages = mongoose.model("CustomerAutoPercentages", customerautopercentages); + const MotorData = mongoose.model("MotorData", motordataSchema); const TankWaterLevel = mongoose.model("TankWaterLevel", tankWaterLevelSchema); const IotData = mongoose.model("IotData", IOttankSchema); @@ -179,6 +242,6 @@ const TankConsumptionOriginalSchema = mongoose.model("TankConsumptionOriginalSch module.exports = { - Tank, MotorData,IotData,TankWaterLevel,TankConsumptionSchema,TankConsumptionOriginalSchema + Tank, MotorData,IotData,TankWaterLevel,TankConsumptionSchema,TankConsumptionOriginalSchema,CustomerAutoPercentages } diff --git a/src/routes/adminRoute.js b/src/routes/adminRoute.js index 6a03c812..bb7e74db 100644 --- a/src/routes/adminRoute.js +++ b/src/routes/adminRoute.js @@ -31,6 +31,30 @@ fastify.route({ handler: adminController.adminSignUp, }); +fastify.put('/api/editAdmin/:customerId', { + schema: { + description: "Edit Admin details by CustomerId", + tags: ["Admin"], + summary: "Edit Admin details by CustomerId", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + }, + required: ["customerId"], + }, + body: { + type: "object", + properties: { + phone: { type: "string" }, + username: { type: "string" }, + picture: { type: "string" }, + + }, + } + }, + handler: adminController.editAdmin, +}); fastify.post("/api/adminLogin", { schema: { @@ -187,23 +211,6 @@ fastify.route({ }); -// fastify.post("/api/createUser", { -// schema: { -// description: "This is for Create sale/store", -// tags: ["createUser for sale/sore"], -// summary: "This is for Create sale/store", -// body: { -// type: "object", -// required: ["phone", "password", "role"], -// properties: { -// phone : { type: "string" }, -// password: { type: "string" }, -// role: { type: "string", enum: ["sales", "store"] } -// }, -// }, -// }, -// handler: adminController.createUser, -// }); @@ -230,7 +237,172 @@ fastify.post("/api/integratingHardwareidToTank", { handler: adminController.integratingHardwareidToTank, }); +fastify.post("/api/getDepartmentDetails/:adminId", { + schema: { + description: "Get department details by adminId, departmentName and reportingManager", + tags: ["Admin"], + summary: "Get department details", + params: { + type: "object", + properties: { + adminId: { type: "string", description: "Admin ID" } + }, + required: ["adminId"] + }, + body: { + type: "object", + properties: { + departmentName: { type: "string" }, + reportingManager: { type: "string" } + }, + required: ["departmentName", "reportingManager"] + } + }, + handler: adminController.getDepartmentDetailsByAdminAndName +}); + +fastify.get("/api/getAllCompanies", { + schema: { + tags: ["Admin"], + description: "Get all Companies List", + summary: "Get all Companies List", + + }, + handler: adminController.getAllCompanys, +}); +fastify.get("/api/getBranchDetails", { + schema: { + tags: ["Admin"], + description: "Get Branch Details", + summary: "Get Branch Details", + // params: { + // type: "object", + // properties: { + // officeName: { type: "string", description: "Office name" } + // }, + // required: ["officeName"] + // }, + querystring: { // ✅ allow customerId in query string + type: 'object', + required: ['officeName'], + properties: { + officeName: { type: 'string' } + } + } + }, + handler: adminController.getAllOffices, +}); + + +fastify.get("/api/getOfficesByCity", { + schema: { + tags: ["Admin"], + description: "Get Offices by City", + summary: "Fetch Head Offices and Branches by City", + querystring: { + type: 'object', + required: ['city'], + properties: { + city: { type: 'string' } + } + } + }, + handler: adminController.getAllOfficesByCity, +}); + + fastify.put("/api/adminEditTeamMember/:departmentId/:teamMemberId", { + schema: { + description: "Admin Edit Team Member", + tags: ["Admin"], + summary: "Admin Edit Team Member", + params: { + type: "object", + properties: { + departmentId: { type: "string", description: "departmentId" }, + teamMemberId: { type: "string", description: "Team Member ID" } + }, + required: ["departmentId", "teamMemberId"] + }, + body: { + type: "object", + properties: { + firstName: { type: "string" }, + phone: { type: "string" }, + email: { type: "string" }, + alternativePhone: { type: "string" }, + + } + }, + + }, + handler: adminController.adminEditTeamMember + }); + + fastify.delete("/api/admin/admindeleteTeamMember/:departmentId/:teamMemberId", { + schema: { + description: "Delete a team member from an Admin", + tags: ["Admin"], + summary: "Admin Delete Team Member", + params: { + type: "object", + properties: { + departmentId: { type: "string", description: "departmentId" }, + teamMemberId: { type: "string", description: "Team Member ID" } + }, + required: ["departmentId", "teamMemberId"] + }, + response: { + 200: { + type: "object", + properties: { + simplydata: { + type: "object", + properties: { + error: { type: "boolean" }, + message: { type: "string" } + } + } + } + } + } + }, + handler: adminController.AdmindeleteTeamMember + }); + +fastify.get("/api/getOfficeDetails/:officeName/:city", { + schema: { + tags: ["Admin"], + description: "Get office details from both City and Branch collections", + summary: "Get merged office data", + params: { + type: "object", + properties: { + officeName: { type: "string" }, + city: { type: "string" } + }, + required: ["officeName", "city"] + }, + }, + handler: adminController.getOfficeDetails +}); + +fastify.get("/api/getCompanyCitiesList/:officeName", { + schema: { + tags: ["Admin"], + description: "Get Cities List Office Name Matches", + summary: "Get Cities List Office Name Matches", + params: { + type: "object", + properties: { + officeName: { type: "string" }, + + }, + required: ["officeName"] + }, + }, + handler: adminController.getCompanyCitiesByOffice +}); next(); }; diff --git a/src/routes/departmentRoute.js b/src/routes/departmentRoute.js index ece82516..6cf64308 100644 --- a/src/routes/departmentRoute.js +++ b/src/routes/departmentRoute.js @@ -6,7 +6,7 @@ module.exports = function (fastify, opts, next) { method: "POST", url: "/api/citySignup", schema: { - tags: ["Department"], + tags: ["Admin"], description: "This is for creating a new City account", summary: "This is for creating a new City account", body: { @@ -20,11 +20,18 @@ module.exports = function (fastify, opts, next) { items: { type: "string" }, }, state: { type: "string" }, + email: { type: "string" }, country: { type: "string" }, office_address1: { type: "string" }, address2: { type: "string" }, zone: { type: "string" }, pincode: { type: "string" }, + googleLocation: { type: "string" }, + gstNo: { type: "string" }, + latitude: { type: 'number', default: 0.0 }, + longitude: { type: 'number', default: 0.0 }, + //nameoftheContactPerson: {type : "string"}, + //departmentName: { type: "string" }, }, }, @@ -62,21 +69,21 @@ module.exports = function (fastify, opts, next) { // handler: departmentController.getSinledepartmentData, // }); - fastify.get("/api/getallcities", { - schema: { - tags: ["Department"], - description: "This is for Get all cities Data", - summary: "This is for to Get all cities Data", + // fastify.get("/api/getallcities", { + // schema: { + // tags: ["Department"], + // description: "This is for Get all cities Data", + // summary: "This is for to Get all cities Data", - security: [ - { - basicAuth: [], - }, - ], - }, - //preHandler: fastify.auth([fastify.authenticate]), - handler: departmentController.getallcities, - }); + // security: [ + // { + // basicAuth: [], + // }, + // ], + // }, + // //preHandler: fastify.auth([fastify.authenticate]), + // handler: departmentController.getallcities, + // }); // fastify.get("/api/getalldepartmentsParticularFileds", { @@ -95,6 +102,23 @@ module.exports = function (fastify, opts, next) { // handler: departmentController.getAllDepartmentsParticularFields, // }); + fastify.get("/api/getallcompanyNames", { + schema: { + tags: ["Admin"], + description: "This is for Get all Company Name in city schema ", + summary: "This is for to Get all Company Name in city schema ", + + security: [ + { + basicAuth: [], + }, + ], + }, + //preHandler: fastify.auth([fastify.authenticate]), + handler: departmentController.getallCompanyNames, + }); + + fastify.delete("/api/deletecity/:cityId", { schema: { description: "Delete a city by cityId", @@ -135,7 +159,7 @@ module.exports = function (fastify, opts, next) { body: { type: "object", properties: { - // phone: { type: "string" }, + phone: { type: "string" }, city: { type: "string" }, state: { type: "string" }, country: { type: "string" }, @@ -143,22 +167,110 @@ module.exports = function (fastify, opts, next) { address2: { type: "string" }, zone: { type: "string" }, pincode: { type: "string" }, - departmentName: { type: "string" }, + officeName: { type: "string" }, + email: { type: "string" }, }, } }, handler: departmentController.editcity, }); +// fastify.route({ +// method: "GET", +// url: "/api/city/:cityId", +// schema: { +// tags: ["Department"], +// description: "This is for fetching a City details", +// summary: "This is for fetching a City details", +// params: { +// type: "object", +// properties: { +// cityId: { type: "string" } +// } +// } +// }, +// handler: departmentController.getCityDetails +// }); - +fastify.route({ + method: "GET", + url: "/api/details/:id", + schema: { + tags: ["Department"], + description: "This is for fetching details of a branch or city", + summary: "This is for fetching details of a branch or city", + params: { + type: "object", + properties: { + id: { type: "string" } + } + } + }, + handler: departmentController.getDetails +}); +fastify.route({ + method: "PUT", + url: "/api/updateBranchOrCompanydetails/:id", + schema: { + tags: ["Admin"], + description: "Update details of a branch or city", + summary: "Edit department details by branchId or cityId", + params: { + type: "object", + properties: { + id: { type: "string" }, // branchId or cityId + }, + required: ["id"], + }, + body: { + type: "object", + properties: { + phone: { type: "string" }, + land_line_number: { type: "string" }, + officeName: { type: "string" }, + office_address1: { type: "string" }, + address2: { type: "string" }, + email: { type: "string" }, + pincode: { type: "string" }, + zone: { type: "string" }, + city: { type: "string" }, + state: { type: "string" }, + country: { type: "string" }, + nameoftheContactPerson: { type: "string" }, + location: { type: "array", items: { type: "string" } }, + longitude: { type: "number" }, + latitude: { type: "number" }, + googleLocation: { type: "string" }, + gstNo: { type: "string" }, + }, + additionalProperties: true, // allow extra fields if needed + }, + }, + handler: departmentController.updateBranchOrCompanyDetails +}); + fastify.route({ + method: "GET", + url: "/api/branch/:branchId", + schema: { + tags: ["Department"], + description: "This is for fetching a Branch details", + summary: "This is for fetching a Branch details", + params: { + type: "object", + properties: { + branchId: { type: "string" } + } + } + }, + handler: departmentController.getBranchDetails +}); fastify.route({ method: "POST", url: "/api/departmentSignup", schema: { tags: ["Department"], - description: "This is for creating a new Department Account", - summary: "This is for creating a new Department Account", + description: "This is for creating a new Team Member Account", + summary: "This is for creating a new Team Member Account", body: { type: "object", //required: ["phone", "username", "password", "role"], // Add role to required fields @@ -179,8 +291,15 @@ module.exports = function (fastify, opts, next) { departmentName: { type: "string" }, firstName: { type: "string" }, lastName: { type: "string" }, - reportingManager: { type: "string" }, + reportingManager: { type: "string", default: "Self" }, email: { type: "string" }, + personal_city: { type: "string"}, + officeName: { type: "string"}, + reportingManager_mobile_number: { type: "string"}, + reportingManager_email: { type: "string"}, + picture: { type: "string"}, + dateOfJoin: { type: "string"}, + employeeType: { type: "string"}, location: { type: "array", items: { type: "string" }, @@ -235,7 +354,54 @@ module.exports = function (fastify, opts, next) { //preHandler: fastify.auth([fastify.authenticate]), handler: departmentController.getalldepartments, }); - + + fastify.get("/api/getallcitiesdata", { + schema: { + tags: ["Department"], + description: "This is for Get all Cities Data for City Schema", + summary: "This is for to Get all Cities Data for City Schema", + + security: [ + { + basicAuth: [], + }, + ], + }, + //preHandler: fastify.auth([fastify.authenticate]), + handler: departmentController.getallCitiesData, + }); + fastify.get("/api/getallzonesdata", { + schema: { + tags: ["Department"], + description: "This is for Get all Zones Data for City Schema", + summary: "This is for to Get all Zones Data for City Schema", + + security: [ + { + basicAuth: [], + }, + ], + }, + //preHandler: fastify.auth([fastify.authenticate]), + handler: departmentController.getallZonesData, + }); + + fastify.get("/api/getalllocationsdata", { + schema: { + tags: ["Department"], + description: "This is for Get all Locations Data for City Schema", + summary: "This is for to Get all Locations Data for City Schema", + + security: [ + { + basicAuth: [], + }, + ], + }, + //preHandler: fastify.auth([fastify.authenticate]), + handler: departmentController.getallLocationData, + }); + fastify.delete("/api/deletedepartment/:departmentId", { schema: { description: "Delete a Department by departmentId", @@ -293,6 +459,13 @@ module.exports = function (fastify, opts, next) { departmentName: { type: "string" }, firstName: { type: "string" }, lastName: { type: "string" }, + personal_city: { type: "string"}, + officeName: { type: "string"}, + reportingManager_mobile_number: { type: "string"}, + reportingManager_email: { type: "string"}, + picture: { type: "string"}, + employeeType: { type: "string" }, + }, } @@ -300,21 +473,7 @@ module.exports = function (fastify, opts, next) { handler: departmentController.editdepartment, }); - // fastify.get("/api/getalldesignationsParticularFileds", { - // schema: { - // tags: ["Department"], - // description: "This is for Get all Designation particular fileds", - // summary: "This is for to Get all Designation particular fields", - - // security: [ - // { - // basicAuth: [], - // }, - // ], - // }, - // //preHandler: fastify.auth([fastify.authenticate]), - // handler: departmentController.getAllDesignationsParticularFields, - // }); + fastify.route({ method: "GET", @@ -329,12 +488,388 @@ module.exports = function (fastify, opts, next) { properties: { city: { type: "string" }, zone: { type: "string" }, + officeName: { type: "string" }, + }, }, }, handler:departmentController.getZonebasedLocations }); + + fastify.route({ + method: "GET", + url: "/api/zonebasedlocations/:zone", + schema: { + tags: ["Department"], + description: "Get the locations by zone", + summary: "Get the locations by zone", + params: { + type: "object", + properties: { + zone: { type: "string" }, + }, + }, + }, + handler:departmentController.getLocationsByZone + }); + + + fastify.route({ + method: "GET", + url: "/api/zonebasedcity/:city/:officeName", + schema: { + tags: ["Admin"], + description: "Get the zones by city and office", + summary: "Get the zones by city and office", + params: { + type: "object", + properties: { + city: { type: "string" }, + officeName: { type: "string" } + }, + required: ["city", "officeName"] + } + }, + handler: departmentController.getZonesByCityAndOffice // ✅ Match function name +}); + + fastify.route({ + method: "GET", + url: "/api/areabasedcity/:city", + schema: { + tags: ["Department"], + description: "Get the Areas by city", + summary: "Get the Areas by city", + params: { + type: "object", + properties: { + city: { type: "string" }, + }, + }, + }, + handler:departmentController.getAreasByCity + }); + + fastify.route({ + method: "GET", + url: "/api/areabasedzones/:area", + schema: { + tags: ["Department"], + description: "Get the Areas based zones", + summary: "Get the Areas based zones", + params: { + type: "object", + properties: { + area: { type: "string" }, + }, + }, + }, + handler:departmentController.getZonesByArea + }); + + // fastify.route({ + // method: "GET", + // url: "/api/departmentNamebaselist/:officeName/:city/:departmentName", + // schema: { + // tags: ["Department"], + // description: "Department name based list", + // summary: "Department name based list", + // params: { + // type: "object", + // properties: { + // officeName: { type: "string" }, + // city: { type: "string" }, + // departmentName: { type: "string" }, + // }, + // }, + // }, + // handler:departmentController.getDepartments + // }); + + fastify.route({ + method: "GET", + url: "/api/departmentNamebaselist/:officeName/:city/:departmentName/:employeeType", + schema: { + tags: ["Admin"], + description: "Department name based list", + summary: "Department name based list", + params: { + type: "object", + properties: { + officeName: { type: "string" }, + city: { type: "string" }, + departmentName: { type: "string" }, + employeeType: { type: "string", enum: ["Internal", "Consultant", "ALL"] }, + }, + }, + }, + handler: departmentController.getDepartments +}); + + fastify.get("/api/getalldepartmentNames", { + schema: { + tags: ["Department"], + description: "This is for Get all departmentNames Data", + summary: "This is for to Get all departmentNames Data", + + security: [ + { + basicAuth: [], + }, + ], + }, + //preHandler: fastify.auth([fastify.authenticate]), + handler: departmentController.getAllDepartmentNames, + }); + + + fastify.route({ + method: "POST", + url: "/api/branchSignup", + schema: { + tags: ["Admin"], + description: "This is for creating a new Branch account", + summary: "This is for creating a new Branch account", + body: { + type: "object", + properties: { + phone: { type: "string" }, + land_line_number: { type: "string" }, + city: { type: "string" }, + officeName: { type: "string" }, + location: { + type: "array", + items: { type: "string" }, + }, + state: { type: "string" }, + email: { type: "string" }, + country: { type: "string" }, + office_address1: { type: "string" }, + address2: { type: "string" }, + zone: { type: "string" }, + pincode: { type: "string" }, + //nameoftheContactPerson: { type: "string"}, + googleLocation: { type: "string" }, + latitude: { type: 'number', default: 0.0 }, + longitude: { type: 'number', default: 0.0 }, + //departmentName: { type: "string" }, + }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: departmentController.addBranch, + }); + + fastify.route({ + method: "POST", + url: "/api/zoneSignup", + schema: { + tags: ["Admin"], + description: "This is for creating a new Zone account", + summary: "This is for creating a new Zone account", + body: { + type: "object", + properties: { + city: { type: "string" }, + officeName: { type: "string" }, + location: { + type: "array", + items: { type: "string" }, + }, + zone: { type: "string" }, + area : {type: "string"} + }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: departmentController.addZone, + }); + + fastify.delete("/api/deletebranch/:branchId", { + schema: { + description: "Delete a city by branchId", + tags: ["Department"], + summary: "Delete a user by branch", + params: { + type: "object", + properties: { + branchId: { type: "string" }, + }, + required: ["branchId"], + }, + response: { + 200: { + type: "object", + properties: { + success: { type: "boolean" }, + message: { type: "string" }, + } + } + } + }, + handler: departmentController.deleteBranchInfo, + }); + + fastify.put('/api/editbranch/:branchId', { + schema: { + description: "Edit Branch details by branch", + tags: ["Department"], + summary: "Edit Branch details.", + params: { + type: "object", + properties: { + branchId: { type: "string" }, + }, + required: ["branchId"], + }, + body: { + type: "object", + properties: { + phone: { type: "string" }, + land_line_number: { type: "string" }, + city: { type: "string" }, + state: { type: "string" }, + country: { type: "string" }, + address1: { type: "string" }, + address2: { type: "string" }, + zone: { type: "string" }, + pincode: { type: "string" }, + officeName: { type: "string" }, + email: { type: "string" }, + }, + } + }, + handler: departmentController.editBranch, + }); + + fastify.get("/api/getCitiesByOfficeName/:officeName", { + schema: { + tags: ["Admin"], + description: "This is for Get cities by OfficeName Data", + summary: "This is to Get cities by OfficeName Data", + params: { + type: "object", + properties: { + officeName: { + type: "string", + description: "officeName", + }, + }, + }, + + security: [ + { + basicAuth: [], + }, + ], + }, + handler: departmentController.getCitiesByOfficeName, + }); + + fastify.route({ + method: "GET", + url: "/api/departmentNameList/:city/:officeName", + schema: { + tags: ["Admin"], + description: "Get a list of department names for a given city", + summary: "Department names by city", + params: { + type: "object", + properties: { + city: { type: "string" }, + officeName: { type: "string" }, + + }, + required: ["city"], + }, + }, + handler: departmentController.getOffices, + }); + + fastify.route({ + method: "GET", + url: "/api/states", + schema: { + tags: ["Department"], + description: "Get the States", + summary: "Get the states", + }, + handler:departmentController.getAllStates + }); + + fastify.route({ + method: "GET", + url: "/api/Cities", + schema: { + tags: ["Admin"], + description: "Get the Cities", + summary: "Get the Cities", + }, + handler:departmentController.getAllCities + }); + + fastify.route({ + method: "GET", + url: "/api/states/cities/:stateName", + schema: { + tags: ["Admin"], + description: "Get the States by cities", + summary: "Get the states by cites", + params: { + type: "object", + properties: { + stateName: { type: "string" }, + + }, + }, + }, + handler:departmentController.getStaeBasedCites + }); + + fastify.route({ + method: "GET", + url: "/api/cities/states/:majorcities", + schema: { + tags: ["Admin"], + description: "Get the Cities by state", + summary: "Get the cities by state", + params: { + type: "object", + properties: { + majorcities: { type: "string" }, + + }, + }, + }, + handler:departmentController.getCitiesBasedState + }); + +fastify.route({ + method: "GET", + url: "/api/staffdepartments/:officeName/:city", + schema: { + tags: ["Admin"], + description: "This is for fetching department details based on officeName and city", + summary: "This is for fetching department details based on officeName and city", + params: { + type: "object", + properties: { + officeName: { type: "string" }, + city: { type: "string" } + } + } + }, + handler: departmentController.getStaffDepartmentDetails +}); next(); }; \ No newline at end of file diff --git a/src/routes/installationRoute.js b/src/routes/installationRoute.js new file mode 100644 index 00000000..7ab00c44 --- /dev/null +++ b/src/routes/installationRoute.js @@ -0,0 +1,1534 @@ +const installationController = require("../controllers/installationController") + +module.exports = function (fastify, opts, next) { + + fastify.post("/api/createTeamMember/:adminId", { + schema: { + description: "Create a new team member under an admin", + tags: ["Admin"], + summary: "Create Team Member", + params: { + type: "object", + properties: { + adminId: { type: "string", description: "Admin ID under whom to add the team member" } + }, + required: ["adminId"] + }, + body: { + type: "object", + required: ["departmentId", "firstName", "phone", "password"], + properties: { + departmentId: { type: "string", description: "Department ID (installationId)" }, + //departmentName: { type: "string", description: "Department name" }, + firstName: { type: "string" }, + phone: { type: "string" }, + password: { type: "string" }, + alternativePhone: { type: "string" }, + email: { type: "string" }, + status: { type: "string" }, + officeName: { type: "string" }, + city: { type: "string" }, + }, + }, + }, + handler: installationController.createTeamMember, +}); + +fastify.get("/api/getAllDepartments/:officeName", { + schema: { + description: "Get full department details by department name", + tags: ["Admin"], + summary: "Get all department details fetch the managers list", + params: { + type: "object", + properties: { + officeName: { type: "string", }, + // city: { type: "string", }, + + }, + // required: ["departmentName"], + }, + }, + handler: installationController.getAllDepartments, +}); + + + fastify.get("/api/getTeamMembers/:officeName/:city/:departmentId", { + schema: { + description: "Get all team members under a specific department", + tags: ["Admin"], + summary: "Get Team Members by Department ID", + params: { + type: "object", + properties: { + officeName: { + type: "string", + description: "fetch team members from" + }, + city: { + type: "string", + description: "fetch team members from" + }, + departmentId: { + type: "string", + description: "Department ID to fetch team members from" + } + }, + required: ["departmentId", "officeName", "city"] + }, + }, + handler: installationController.getTeamMembers +}); + + fastify.get("/api/getQuations/:installationId", { + schema: { + description: "Get all quatations under a specific installation", + tags: ["Installation"], + summary: "Get all quatations under a specific installation", + params: { + type: "object", + properties: { + installationId: { + type: "string", + description: "Installation ID to fetch team members from" + } + }, + required: ["installationId"] + }, + + }, + handler: installationController.getQuotationsByInstallationId + }); + fastify.get("/api/getQuations/:installationId/:teamMemberId", { + schema: { + description: "Get all quatations under a specific installation and team member", + tags: ["Installation"], + summary: "Get all quatations under a specific installation and team member", + params: { + type: "object", + properties: { + installationId: { + type: "string", + description: "Installation ID to fetch team members from" + }, + teamMemberId: { + type: "string", + description: "teamMember ID to fetch team members from" + } + }, + // required: ["installationId"] + }, + + }, + handler: installationController.getQuotationsByInstallationAndTeamMember + }); + + + fastify.route({ + method: "GET", + url: "/api/installationsTeammembers/:installationId", + schema: { + tags: ["Installation"], + description: "This is for fetching team members for a given installation", + summary: "This is for fetching team members for a given installation", + params: { + type: "object", + properties: { + installationId: { type: "string" } + } + } + }, + handler: installationController.getInstallationTeamMembers +}); + fastify.post("/api/assignTeammember/:installationId", { + schema: { + description: "Assign a team member to an installation's quotation", + tags: ["Installation"], + summary: "Assign a team member based on installationId", + params: { + type: "object", + properties: { + installationId: { + type: "string", + description: "Installation ID to fetch team members from" + } + }, + required: ["installationId"] + }, + body: { + type: "object", + properties: { + teamMemberId: { + type: "string", + description: "The team member ID to assign" + }, + quotationId: { + type: "string", + description: "The team member ID to assign" + } + }, + // required: ["teamMemberId"] + }, + }, + handler: installationController.assignTeamMemberToQuotation + }); + + + fastify.get("/api/getAllInstallers/:departmentName", { + schema: { + description: "Get All Installtion list", + tags: ["Installation"], + summary: "Get All Installtion list", + params: { + type: "object", + properties: { + departmentName: { + type: "string", + description: "departmentName to fetch Installation list" + } + }, + required: ["departmentName"] + }, + }, + handler: installationController.getAllInstallers + }); + + fastify.put("/api/installationEditTeamMember/:installationId/:teamMemberId", { + schema: { + description: "Update an existing team member's details", + tags: ["Installation"], + summary: "Edit Team Member", + params: { + type: "object", + properties: { + installationId: { type: "string", description: "Installation ID" }, + teamMemberId: { type: "string", description: "Team Member ID" } + }, + required: ["installationId", "teamMemberId"] + }, + body: { + type: "object", + properties: { + name: { type: "string" }, + phone: { type: "string" }, + email: { type: "string" }, + alternativePhone: { type: "string" }, + status: { type: "string" } + } + }, + response: { + 200: { + type: "object", + properties: { + simplydata: { + type: "object", + properties: { + error: { type: "boolean" }, + message: { type: "string" } + } + } + } + } + } + }, + handler: installationController.editTeamMember + }); + + fastify.delete("/api/install/installationdeleteTeamMember/:installationId/:teamMemberId", { + schema: { + description: "Delete a team member from an installation", + tags: ["Installation"], + summary: "Delete Team Member", + params: { + type: "object", + properties: { + installationId: { type: "string", description: "Installation ID" }, + teamMemberId: { type: "string", description: "Team Member ID" } + }, + required: ["installationId", "teamMemberId"] + }, + response: { + 200: { + type: "object", + properties: { + simplydata: { + type: "object", + properties: { + error: { type: "boolean" }, + message: { type: "string" } + } + } + } + } + } + }, + handler: installationController.deleteTeamMember + }); + + fastify.get("/api/getDepartmentByFirstName/:departmentName/:firstName", { + schema: { + description: "Get department details by first name", + tags: ["Installation"], + summary: "Fetch department's firstName and phone", + params: { + type: "object", + properties: { + firstName: { type: "string", description: "Department's first name" }, + departmentName: { type: "string"} + }, + required: ["firstName"], + }, + }, + handler: installationController.getDepartmentByFirstName, + }); + + fastify.get("/api/getGsmCheck/:hardwareId", { + schema: { + description: "Get GSM check details", + tags: ["Installation"], + summary: "Get GSM check details", + params: { + type: "object", + properties: { + hardwareId: { type: "string" }, + }, + required: ["hardwareId"], + }, + }, + handler: installationController.getByHardwareId, + }); + + fastify.get("/api/getGsmCheckSupport/:hardwareId", { + schema: { + description: "Get GSM check details Support", + tags: ["Installation"], + summary: "Get GSM check details Support", + params: { + type: "object", + properties: { + hardwareId: { type: "string" }, + }, + required: ["hardwareId"], + }, + }, + handler: installationController.getByHardwareIdSupport, + }); + + fastify.get("/api/getGsmCheckSupportTeamMember/:hardwareId", { + schema: { + description: "Get GSM check details Support Team Member", + tags: ["Installation"], + summary: "Get GSM check details Support Team Member", + params: { + type: "object", + properties: { + hardwareId: { type: "string" }, + }, + required: ["hardwareId"], + }, + }, + handler: installationController.getByHardwareIdSupportTeamMember, + }); + fastify.get("/api/getLoraCheck/:hardwareId/:tankhardwareId", { + schema: { + description: "Get LORA check details", + tags: ["Installation"], + summary: "Get LORA check details", + params: { + type: "object", + properties: { + hardwareId: { type: "string" }, + tankhardwareId: { type : "string"}, + }, + required: ["hardwareId","tankhardwareId"], + }, + }, + handler: installationController.getByHardwareAndTankId, + }); + fastify.get("/api/getLoraCheckSupport/:hardwareId/:tankhardwareId", { + schema: { + description: "Get LORA check details Support", + tags: ["Installation"], + summary: "Get LORA check details Support", + params: { + type: "object", + properties: { + hardwareId: { type: "string" }, + tankhardwareId: { type : "string"}, + }, + required: ["hardwareId","tankhardwareId"], + }, + }, + handler: installationController.getByHardwareAndTankIdSupport, + }); + + fastify.get("/api/getLoraCheckSupportTeamMember/:hardwareId/:tankhardwareId", { + schema: { + description: "Get LORA check details Support Team Member", + tags: ["Installation"], + summary: "Get LORA check details Support Team Member", + params: { + type: "object", + properties: { + hardwareId: { type: "string" }, + tankhardwareId: { type : "string"}, + }, + required: ["hardwareId","tankhardwareId"], + }, + }, + handler: installationController.getByHardwareAndTankIdSupportTeamMember, + }); + fastify.get("/api/getAllocatedSensorsByTank/:customerId/:tankName", { + schema: { + description: "Get allocated sensors by installationId, customerId, and tankName", + tags: ["Installation"], + summary: "Fetch allocated sensors for a given tank", + params: { + type: "object", + properties: { + // installationId: { type: "string" }, + customerId: { type: "string" }, + tankName: { type: "string" }, + }, + required: [ "customerId", "tankName"], + }, + }, + handler: installationController.getAllocatedSensorsByTank, + }); + + fastify.post("/api/createMasterSlaveData/:installationId", { + schema: { + description: "Create a new Master-Slave data entry under an installation", + tags: ["Installation"], + summary: "Save Master-Slave Data", + params: { + type: "object", + required: ["installationId"], + properties: { + installationId: { type: "string", description: "Installation ID" } + } + }, + body: { + type: "object", + required: ["hardwareId", "masterId"], + properties: { + type: { type: "string" }, + customerId: { type: "string" }, + hardwareId: { type: "string" }, + batchno: { type: "string" }, + masterId: { type: "string" }, + tankName: { type: "string" }, + tankLocation: { type: "string" }, + materialRecived: { type: "string" }, + electricityWork: { type: "string" }, + plumbingWork: { type: "string" }, + loraCheck: { type: "string" }, + materialRecievedPictures: { + type: "array", + items: { + type: "object", + properties: { + url: { type: "string", description: "Image URL" }, // No format validation + uploadedAt: { type: "string", format: "date-time", description: "Upload timestamp" } + } + } + }, + electricityWorkPictures: { + type: "array", + items: { + type: "object", + properties: { + url: { type: "string", description: "Image URL" }, // No format validation + uploadedAt: { type: "string", format: "date-time", description: "Upload timestamp" } + } + } + }, + plumbingWorkPictures: { + type: "array", + items: { + type: "object", + properties: { + url: { type: "string", description: "Image URL" }, // No format validation + uploadedAt: { type: "string", format: "date-time", description: "Upload timestamp" } + } + } + } + } + } + }, + handler: installationController.createMasterSlaveData + }); + + fastify.get("/api/getmasterConnectedSlaveData/:connectedTo/:customerId", { + schema: { + description: "Get masrter connected slave data", + tags: ["Installation"], + summary: "Get masrter connected slave data", + params: { + type: "object", + properties: { + connectedTo: { type: "string" }, + customerId: { type: "string" }, + }, + required: [ "connectedTo"], + }, + }, + handler: installationController.masterConnectedSlaveList, + }); +fastify.get('/api/tanks/master/:customerId/:hardwareId', { + schema: { + tags: ['Tank'], + summary: 'Get tank details by master device', + description: 'Fetch tank details by providing customerId and hardwareId for type master', + + params: { + type: 'object', + required: ['customerId', 'hardwareId'], + properties: { + customerId: { type: 'string', }, + hardwareId: { type: 'string', } + } + }, + + + }, + handler: installationController.getTankDetailsByMaster +}); + + +fastify.get('/api/slave-tank-details/:customerId/:hardwareId/:tankHardwareId', { + schema: { + tags: ['Tank'], + summary: 'Get slave tank details with master info', + params: { + type: 'object', + required: ['customerId', 'hardwareId', 'tankHardwareId'], + properties: { + customerId: { type: 'string' }, + hardwareId: { type: 'string' }, + tankHardwareId: { type: 'string' } + } + }, + + }, + handler: installationController.getSlaveTankDetails +}); + +fastify.put('/api/Updatetanksdimensisons/:customerId/:teamMemberId/:hardwareId/:tankHardwareId', { + schema: { + tags: ['Installation'], + summary: 'Update tank dimensions', + description: 'Edit tank dimensions by customerId, teamMemberId, hardwareId, and tankHardwareId. ' + + 'Provide dimensions in either centimeters or feet, specify using the "unit" field. ' + + 'Saves the data in feet (rounded to integer) and updates capacity fields.', + + params: { + type: 'object', + required: ['customerId', 'teamMemberId', 'hardwareId', 'tankHardwareId'], + properties: { + customerId: { type: 'string', description: 'Customer ID' }, + teamMemberId: { type: 'string', description: 'Team member ID' }, + hardwareId: { type: 'string', description: 'Master hardwareId' }, + tankHardwareId: { type: 'string', description: 'Tank hardwareId' } + } + }, + + body: { + type: 'object', + required: ['height', 'width', 'length', 'unit'], + properties: { + height: { type: 'string', description: 'Tank height (numeric string)' }, + width: { type: 'string', description: 'Tank width (numeric string)' }, + length: { type: 'string', description: 'Tank length (numeric string)' }, + unit: { type: 'string', enum: ['cm', 'feet'], description: 'Unit of the dimensions provided (must be either "cm" or "feet")' } + }, + // example: { + // height: "210", // in cm + // width: "150", // in cm + // length: "300", // in cm + // unit: "cm" + // } + }, + + + }, + handler: installationController.editTankDimensions +}); + + + fastify.post( + '/api/update-status/:connectedTo/:teamMemberId/:customerId', + { + schema: { + description: 'Update work_status in Order and set product_status to complete for master and connected slaves', + summary: 'Update work_status in Order and set product_status to complete for master and connected slaves', + tags: ['Installation'], + params: { + type: 'object', + required: ['connectedTo', 'teamMemberId', 'customerId'], + properties: { + connectedTo: { type: 'string', description: 'Master hardwareId' }, + teamMemberId: { type: 'string', description: 'Team member ID' }, + customerId: { type: 'string', description: 'Customer ID' } + } + }, + body: { + type: 'object', + required: ['work_status'], + properties: { + work_status: { + type: 'string', + enum: ['active', 'pending', 'complete','waiting','reject'], // update enum based on what your Orders schema allows + description: 'New work status to set in master_connections' + } + } + }, + + }, + handler: installationController.updateWorkStatusAndProductStatus + + } +); + +fastify.post( + '/api/insensors/media/:customerId', + { + schema: { + summary: 'Add media (manual videos, material pictures, or work status pictures) to master or slave device', + description: 'Attach media files (video/images) to a specific Insensor (master or slave) by customerId', + tags: ['Installation'], + params: { + type: 'object', + required: ['customerId'], + properties: { + customerId: { type: 'string', description: 'Customer ID' } + } + }, + body: { + type: 'object', + required: ['hardwareId', 'type'], // keep required fields + properties: { + hardwareId: { type: 'string', description: 'Hardware ID of the device' }, + type: { type: 'string', enum: ['master', 'slave'], description: 'Device type' }, + video: { + type: 'array', + items: { type: 'string', format: 'uri' }, + description: 'URLs to save in manualTestVideos' + }, + material: { + type: 'array', + items: { type: 'string', format: 'uri' }, + description: 'URLs to save in materialReceivedPictures' + }, + workStatus: { + type: 'array', + items: { type: 'string', format: 'uri' }, + description: 'URLs to save in workStatusPictures' + }, + product_status: { + type: 'string', + enum: ['pending', 'complete'], + description: 'Optional: update product_status' + }, + description: { type: 'string' }, + + }, + // at least one of video, material, workStatus required + + }, + + }, + handler: installationController.addMediaToInsensor + } +); + + + fastify.get("/api/getmasterList/:customerId/:installationId", { + schema: { + description: "Get masrter connected slave data", + tags: ["Installation"], + summary: "Get masrter List", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + installationId: { type: "string" }, + }, + required: [ "customerId","installationId"], + }, + }, + handler: installationController.mastrerList, + }); + + fastify.get("/api/getmasterlistwithslaves/:customerId", { + schema: { + description: "Get Active masrter connected slave data with full info", + tags: ["Installation"], + summary: "Get Active masrter connected slave data with full info", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + + }, + required: [ "customerId"], + }, + }, + handler: installationController.getMasterSlaveSummary, + }); + + fastify.get("/api/getcompletemasterlistwithslaves/:customerId", { + schema: { + description: "Get Complete masrter connected slave data with full info", + tags: ["Installation"], + summary: "Get Complete masrter connected slave data with full info", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + + }, + required: [ "customerId"], + }, + }, + handler: installationController.getCompleteMasterSlaveSummary, + }); + + fastify.get('/api/master-with-slaves/:installationId/:customerId/:hardwareId', { + schema: { + tags: ['Installation'], + summary: 'Get Individual master device and its connected slaves', + description: 'Fetch a master device from Insensors by hardwareId and list all connected slave devices for the same customer and installation.', + params: { + type: 'object', + required: ['installationId', 'customerId', 'hardwareId'], + properties: { + installationId: { type: 'string', description: 'Installation ID from Order' }, + customerId: { type: 'string', description: 'Customer ID' }, + hardwareId: { type: 'string', description: 'Master hardwareId' }, + } + }, + + }, + handler: installationController.getMasterWithSlaves, + +}); + + + fastify.get("/api/getwaitingmasterlistwithslaves/:customerId", { + schema: { + description: "Get waiting manager masrter connected slave data with full info", + tags: ["Installation"], + summary: "Get waiting manager masrter connected slave data with full info", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + + }, + required: [ "customerId"], + }, + }, + handler: installationController.getWaitingMasterSlaveSummary, + }); + + fastify.get("/api/getPendingmasterlistwithslaves/:customerId", { + schema: { + description: "Get Pending masrter connected slave data with full info", + tags: ["Installation"], + summary: "Get Pending masrter connected slave data with full info", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + + }, + required: [ "customerId"], + }, + }, + handler: installationController.getPendingMasterSlaveSummary, + }); + + fastify.get("/api/getAllmasterlistwithslaves/:customerId", { + schema: { + description: "Get All check masrter connected slave data with full info", + tags: ["Installation"], + summary: "Get All check masrter connected slave data with full info", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + // hardwareId: { type: "string" }, + + }, + required: [ "customerId"], + }, + }, + handler: installationController.getIotDataByCustomer, + }); + fastify.get("/api/getsinglemasterlistwithslaves/:customerId/:hardwareId", { + schema: { + description: "Get single check masrter connected slave data", + tags: ["Installation"], + summary: "Get single check masrter connected slave data", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + hardwareId: { type: "string" }, + }, + required: [ "customerId"], + }, + // querystring: { // ✅ allow customerId in query string + // type: 'object', + // required: ['customerId'], + // properties: { + // customerId: { type: 'string' }, + // hardwareId: { type: "string" }, + // } + // } + }, + handler: installationController.getIotDataByCustomerAndHardwareId, + }); + + + + fastify.get("/api/getraiseAticketBuildingDetails/:customerId/:connected_to/:installationId", { + schema: { + description: "Raise A Ticket for Support Building Details", + tags: ["Support"], + summary: "Raise A Ticket for Support Building Details", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + connected_to: { type: "string" }, + installationId: { type: "string" }, + + }, + required: [ "customerId"], + }, + }, + handler: installationController.raiseATicketBuildingDetails, + }); + + + fastify.get("/api/getraiseAticketslave/:customerId/:connected_to/:tankHardwareId", { + schema: { + description: "Raise A Ticket particular slave for Support", + tags: ["Support"], + summary: "Raise A Ticket particular slave for Support", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + connected_to: { type: "string" }, + tankHardwareId: { type: "string" }, + + + }, + required: [ "customerId"], + }, + }, + handler: installationController.raiseATicketSlave, + }); + fastify.get("/api/getAllDisconnectedIsuues/:supportId/:customerId", { + schema: { + description: "Get All disconnected list for Support", + tags: ["Support"], + summary: "Get All disconnected list for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + customerId: { type: "string" }, + }, + required: [ "supportId"], + }, + }, + handler: installationController.getDisconnectedIssuesBySupportId, + }); + + fastify.get("/api/getAllResolvedIsuues/:supportId/:customerId", { + schema: { + description: "Get All Resolved list for Support", + tags: ["Support"], + summary: "Get All Resolved list for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + customerId: { type: "string" }, + }, + required: [ "supportId"], + }, + }, + handler: installationController.getResolvedIssuesBySupportId, + }); + + fastify.put('/api/updateComments/:supportId', { + schema: { + description: "Update comments and call status for a support record issue", + tags: ["Support"], + params: { + type: "object", + required: ["supportId"], + properties: { + supportId: { type: "string", minLength: 1 } + } + }, + body: { + type: "object", + // required: ["customerId", "hardwareId", "comments", "call_status", "call_time"], + properties: { + customerId: { type: "string"}, + hardwareId: { type: "string" }, + comments: { type: "string"}, + call_status: { type: "string"}, + call_time: { type: "string" } + } + } + }, + handler: installationController.updateComments + }); + + fastify.get("/api/getRemoveAllConnectedIsuues/:supportId/:hardwareId", { + schema: { + description: "Remove all connected list for Support", + tags: ["Support"], + summary: "Remove all connected list for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + hardwareId: { type: "string" }, + }, + required: [ "supportId"], + }, + }, + handler: installationController.getRemoveConnectedMastersWithSlaves, + }); + + fastify.get("/api/fetchthebuildingdetails/:supportId", { + schema: { + description: "Get building details for Support", + tags: ["Support"], + summary: "Get building details list for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + + }, + required: [ "supportId"], + }, + }, + handler: installationController.getDisconnectedCustomerDetails, + }); + + fastify.get("/api/disconnectedCustomersbyTeamMember/:support_teamMemberId", { + schema: { + description: "Get disconnected customer details by team member ID", + tags: ["Support"], + summary: "Disconnected Customer Details by Team Member", + params: { + type: "object", + properties: { + support_teamMemberId: { type: "string" } + }, + required: ["support_teamMemberId"] + } + }, + handler: installationController.getDisconnectedCustomerDetailsByTeamMemberId + }); + + + fastify.get("/api/moveisseshebuildingdetails/:supportId", { + schema: { + description: "Move the ticket Get building details move for Support", + tags: ["Support"], + summary: "Move the ticket Get building details move for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + + }, + required: [ "supportId"], + }, + }, + handler: installationController.getDisconnectedMoveCustomerDetails, + }); + + fastify.get("/api/resolveissusesbuildingdetails/:supportId", { + schema: { + description: "Resolve the ticket Get building details move for Support", + tags: ["Support"], + summary: "Resolve the ticket Get building details move for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + + }, + required: [ "supportId"], + }, + }, + handler: installationController.getResolvedCustomerDetails, + }); + fastify.get("/api/longTermissusesbuildingdetails/:supportId", { + schema: { + description: "Long Term Issues the ticket Get building details move for Support", + tags: ["Support"], + summary: "Long Term Issues the ticket Get building details move for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + + }, + required: [ "supportId"], + }, + }, + handler: installationController.getLongTermCustomerDetails, + }); + + fastify.get("/api/outDoorEscalationissusesbuildingdetails/:supportId", { + schema: { + description: "Out Door Escalation Issues the ticket Get building details move for Support", + tags: ["Support"], + summary: "Out Door Escalation Issues the ticket Get building details move for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + + }, + required: [ "supportId"], + }, + }, + handler: installationController.outDoorEscalationCustomerDetails, + }); + + fastify.get("/api/powerOutageissusesbuildingdetails/:supportId", { + schema: { + description: "Power Outage Issues the ticket Get building details move for Support", + tags: ["Support"], + summary: "Power Outage Issues the ticket Get building details move for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + + }, + required: [ "supportId"], + }, + }, + handler: installationController.powerOutageCustomerDetails, + }); + fastify.get("/api/longTermissusesmasterslavedetails/:supportId/:customerId", { + schema: { + description: "Long Term Issues the master and slave details move for Support", + tags: ["Support"], + summary: "Long Term Issues the master and slave details move for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + customerId: { type: "string" }, + + + }, + required: [ "supportId"], + }, + }, + handler: installationController.getLongTermIssuesByCustomer, + }); + + fastify.get("/api/powerOutageissusesmasterslavedetails/:supportId/:customerId", { + schema: { + description: "Power Outgae Issues the master and slave details move for Support", + tags: ["Support"], + summary: "Power Outage Issues the master and slave details move for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + customerId: { type: "string" }, + + + }, + required: [ "supportId"], + }, + }, + handler: installationController.getPowerOutageIssuesByCustomer, + }); + + fastify.get("/api/outDoorEscalationissusesmasterslavedetails/:supportId/:customerId", { + schema: { + description: "Out Door Escalation Issues the master and slave details move for Support", + tags: ["Support"], + summary: "Out Door Escalation Issues the master and slave details move for Support", + params: { + type: "object", + properties: { + supportId: { type: "string" }, + customerId: { type: "string" }, + + + }, + required: [ "supportId"], + }, + }, + handler: installationController.getOutDoorEscalationIssuesByCustomer, + }); + fastify.route({ + method: 'POST', + url: '/api/supportCreateTeamMember/:supportId', + schema: { + tags: ['Support'], + summary: 'Create a new team member under a support record', + params: { + type: 'object', + properties: { + supportId: { type: 'string', description: 'Support ID' }, + }, + required: ['supportId'], + }, + body: { + type: 'object', + properties: { + name: { type: 'string' }, + phone: { type: 'string' }, + password: { type: 'string' }, + email: { type: 'string' }, + alternativePhone: { type: 'string' }, + status: { type: 'string', enum: ['active', 'inactive'], default: 'active' } + }, + required: [ 'name', 'phone'] + }, + }, + handler: installationController.createTeamMemberSupport + }); + fastify.get("/api/supportTeamMembersList/:supportId/", { + schema: { + description: "Get all team members for a support user", + tags: ["Support"], + summary: "Get all team members for a support user", + params: { + type: "object", + required: ["supportId"], + properties: { + supportId: { type: "string", description: "Support ID" } + } + } + }, + handler: installationController.getAllTeamMembersListSupport + }); + + fastify.put("/api/supportUpdateTeamMember/:supportId/:teamMemberId", { + schema: { + description: "Update a support team member by ID", + tags: ["Support"], + summary: "Update a support team member by ID", + params: { + type: "object", + required: ["supportId", "teamMemberId"], + properties: { + supportId: { type: "string" }, + teamMemberId: { type: "string" } + } + }, + body: { + type: "object", + properties: { + name: { type: "string" }, + phone: { type: "string" }, + password: { type: "string" }, + email: { type: "string" }, + alternativePhone: { type: "string" }, + status: { type: "string" } + } + } + }, + handler: installationController.updateTeamMemberSupport + + + }); + fastify.delete("/api/supportDeleteTeamMember/:supportId/:teamMemberId", { + schema: { + description: "Delete a support team member by ID", + tags: ["Support"], + summary: "Delete a support team member by ID", + params: { + type: "object", + required: ["supportId", "teamMemberId"], + properties: { + supportId: { type: "string" }, + teamMemberId: { type: "string" } + } + } + }, + handler: installationController.deleteTeamMemberSupport + + + }); + + fastify.post("/api/moveIssueToCategory/:supportId", { + schema: { + description: "Move specific issue to a categorized bucket", + tags: ["Support"], + summary: "Move disconnected issue to category and remove from issues list", + params: { + type: "object", + required: ["supportId"], + properties: { + supportId: { type: "string" }, + }, + }, + body: { + type: "object", + required: ["category", "hardwareId"], + properties: { + category: { type: "string" }, + hardwareId: { type: "string" }, + }, + }, + }, + handler: installationController.moveIssueToCategory + }); + + + + fastify.get('/api/support/categorizedIssues/:supportId/:category', { + schema: { + description: 'Get all issues in a particular category for a support record', + tags: ['Support'], + summary: 'Fetch issues by category', + params: { + type: 'object', + required: ['supportId', 'category'], + properties: { + supportId: { type: 'string' }, + category: { + type: 'string', + enum: ["Power Outage", + "Resolved", + "OutDoor Escalation", + "LongTerm Issues"] + } + } + }, + querystring: { // ✅ allow customerId in query string + type: 'object', + required: ['customerId'], + properties: { + customerId: { type: 'string' } + } + } + }, + handler: installationController.particularCategory + }); + + fastify.post('/api/support/sendToStoreHardwareList/:supportId/:customerId', { + schema: { + description: 'Send to store the hardware list', + tags: ['Support'], + summary: 'Send to store the hardware list', + params: { + type: 'object', + required: ['supportId', 'customerId'], + properties: { + supportId: { type: 'string' }, + customerId: { type: 'string' }, + } + }, + body: { + type: "object", + required: ["storeId"], + properties: { + storeId: { type: "string" }, + type: { type: "string" }, + + }, + }, + }, + handler: installationController.sendToStoreHardwareList + }); + + fastify.post('/api/repair-order/:supportId/:customerId',{ + schema: { + summary: 'Create a new Repair Order', + description: 'Creates a repair order for the given supportId and customerId with replacements.', + tags: ['Repair Orders'], + params: { + type: 'object', + required: ['supportId', 'customerId'], + properties: { + supportId: { type: 'string', description: 'Support ID' }, + customerId: { type: 'string', description: 'Customer ID' } + } + }, + body: { + type: 'object', + required: ['replacements'], + properties: { + storeId: { type: 'string', description: 'Store ID where the repair is logged' }, + status: { + type: 'string', + enum: ['pending', 'completed'], + default: 'pending', + description: 'Status of the repair order' + }, + replacements: { + type: 'array', + description: 'List of hardware replacements', + items: { + type: 'object', + required: ['type', 'oldHardwareId', 'newHardwareId'], + properties: { + type: { + type: 'string', + enum: ['master', 'slave', 'sensor'], + description: 'Type of the hardware being replaced' + }, + oldHardwareId: { type: 'string', description: 'Old hardware ID' }, + newHardwareId: { type: 'string', description: 'New hardware ID' } + } + }, + minItems: 1 + } + } + }, + }, + handler: installationController.createRepairOrder + + }); + + + fastify.post("/api/assignTeamMemberIssueToCategory/:supportId", { + schema: { + description: "Assign a team member to a categorized issue", + tags: ["Support"], + summary: "Assign a team member to a categorized issue", + params: { + type: "object", + required: ["supportId"], + properties: { + supportId: { type: "string" }, + }, + }, + body: { + type: "object", + //required: [ "support_teamMemberId", "startDate", "endDate", "category", "masterHardwareId"], + properties: { + support_teamMemberId: { type: "string" }, + // startDate: { type: "string" }, + // endDate: { type: "string" }, + category: { type: "string" }, + masterHardwareId: { type: "string" }, + }, + }, + }, + handler: installationController.assignCategorizeIssue + }); + +fastify.route({ + method: 'POST', + url: '/api/updateHardwareList/:supportId/:customerId/:hardwareId', + schema: { + tags: ['Support'], + summary: 'Update hardware list for an escalated issue', + description: 'Updates the hardwareList (array of name-value objects) in Insensors schema only if issue is categorized as Escalation', + params: { + type: 'object', + required: ['supportId', 'customerId', 'hardwareId'], + properties: { + supportId: { type: 'string', description: 'Support record ID' }, + customerId: { type: 'string', description: 'Customer ID' }, + hardwareId: { type: 'string', description: 'Hardware ID of the master/slave' } + } + }, + body: { + type: 'object', + required: ['hardwareList'], + properties: { + hardwareList: { + type: 'array', + description: 'List of hardware components as name-value objects', + items: { + type: 'object', + required: ['name', 'value'], + properties: { + name: { type: 'string', description: 'Component name (e.g., masters, slaves)' }, + value: { type: 'number', description: 'Quantity of the component' } + } + }, + example: [ + { name: 'masters', value: 2 }, + { name: 'slaves', value: 2 }, + { name: 'sensors', value: 4 }, + { name: 'plugs', value: 3 }, + { name: 'wires', value: 200 } + ] + } + } + }, + response: { + 200: { + description: 'Successfully updated hardwareList', + type: 'object', + properties: { + status_code: { type: 'number' }, + message: { type: 'string' }, + data: { + type: 'object', + properties: { + supportId: { type: 'string' }, + customerId: { type: 'string' }, + hardwareId: { type: 'string' }, + hardwareList: { + type: 'array', + items: { + type: 'object', + properties: { + name: { type: 'string' }, + value: { type: 'number' } + } + } + } + } + } + } + }, + 400: { + description: 'Missing or invalid input', + type: 'object', + properties: { + error: { type: 'string' } + } + }, + 403: { + description: 'Not allowed (issue is not escalated)', + type: 'object', + properties: { + error: { type: 'string' } + } + }, + 404: { + description: 'Support or sensor not found', + type: 'object', + properties: { + error: { type: 'string' } + } + }, + 500: { + description: 'Internal server error', + type: 'object', + properties: { + error: { type: 'string' } + } + } + } + }, + handler: installationController.updateHardwareList +}); + + + fastify.post("/api/my-categorized-issues/:support_teamMemberId/:customerId", { + schema: { + description: "Get categorized issues by team member and customer", + tags: ["Support"], + summary: "Get categorized issues for a particular team member and customer", + params: { + type: "object", + required: ["support_teamMemberId", "customerId"], + properties: { + support_teamMemberId: { type: "string" }, + customerId: { type: "string" } + } + } + }, + handler: installationController.getCategorizedIssue + }); + + + fastify.post("/api/updateStatusTeammember/:support_teamMemberId", { + schema: { + description: "Team Member status matain for support", + tags: ["Support"], + summary: "Team Member status matain for support", + params: { + type: "object", + properties: { + + support_teamMemberId: { + type: "string", + } + }, + }, + body: { + type: "object", + properties: { + status: { + type: "string", + }, + + }, + }, + }, + handler: installationController.StatusTeamMember + }); + + + fastify.put('/api/resolvedIssues/:supportId', { + schema: { + description: "Resolved Issues for Support Team", + tags: ["Support"], + params: { + type: "object", + required: ["supportId"], + properties: { + supportId: { type: "string", minLength: 1 }, + + } + }, + body: { + type: "object", + required: ["category", "hardwareId"], + properties: { + category: { type: "string" }, + hardwareId: { type: "string" }, + }, + }, + }, + handler: installationController.resolvedIssuesForSupport + }); + fastify.put('/api/resolvedAllDevicesAreConnected/:supportId', { + schema: { + description: "All Devices are connected Resolved Issues for Support Team", + tags: ["Support"], + params: { + type: "object", + required: ["supportId"], + properties: { + supportId: { type: "string", minLength: 1 }, + + } + }, + body: { + type: "object", + required: ["category", "hardwareId"], + properties: { + category: { type: "string" }, + hardwareId: { type: "string" }, + reason: { type: "string" }, + + }, + }, + }, + handler: installationController.resolveIssueIfAllConnected + }); + next(); + +} \ No newline at end of file diff --git a/src/routes/storeRoute.js b/src/routes/storeRoute.js index 705ae5ac..bc9c6d9b 100644 --- a/src/routes/storeRoute.js +++ b/src/routes/storeRoute.js @@ -510,67 +510,67 @@ fastify.get("/api/getusersofParticularInstaller", { handler: storeController.getusersofParticularInstaller, }); -fastify.post("/api/createwaterlevelSensor/:storeId", { - schema: { - description: "This is for creating waterlevel Sensor", - tags: ["Store-Data"], - summary: "This is for creating waterlevel Sensor", - params: { - required: ["storeId"], - type: "object", - properties: { - storeId: { - type: "string", - description: "storeId", - }, - }, - }, +// fastify.post("/api/createwaterlevelSensor/:storeId", { +// schema: { +// description: "This is for creating waterlevel Sensor", +// tags: ["Store-Data"], +// summary: "This is for creating waterlevel Sensor", +// params: { +// required: ["storeId"], +// type: "object", +// properties: { +// storeId: { +// type: "string", +// description: "storeId", +// }, +// }, +// }, - body: { - type: "object", +// body: { +// type: "object", - properties: { +// properties: { - hardwareId: { type: "string" }, - type: { type: "string" }, - indate: { type: "string" }, - hardwareId_company: { type: "string" } - }, - }, - }, - handler: storeController.createwaterlevelSensor, -}) - - - -fastify.put("/api/editwaterlevelSensor/:storeId", { - schema: { - description: "This is for editing a water level sensor", - tags: ["Store-Data"], - summary: "This is for editing a water level sensor", - params: { - required: ["storeId"], - type: "object", - properties: { - storeId: { - type: "string", - description: "storeId", - }, +// hardwareId: { type: "string" }, +// type: { type: "string" }, +// indate: { type: "string" }, +// hardwareId_company: { type: "string" } +// }, +// }, +// }, +// handler: storeController.createwaterlevelSensor, +// }) + + + +// fastify.put("/api/editwaterlevelSensor/:storeId", { +// schema: { +// description: "This is for editing a water level sensor", +// tags: ["Store-Data"], +// summary: "This is for editing a water level sensor", +// params: { +// required: ["storeId"], +// type: "object", +// properties: { +// storeId: { +// type: "string", +// description: "storeId", +// }, - }, - }, - body: { - type: "object", - properties: { - hardwareId: { type: "string" }, - type: { type: "string" }, - indate: { type: "string" }, - hardwareId_company: { type: "string" } - }, - }, - }, - handler: storeController.editWaterLevelSensor, -}); +// }, +// }, +// body: { +// type: "object", +// properties: { +// hardwareId: { type: "string" }, +// type: { type: "string" }, +// indate: { type: "string" }, +// hardwareId_company: { type: "string" } +// }, +// }, +// }, +// handler: storeController.editWaterLevelSensor, +// }); fastify.delete("/api/deletewaterlevelSensor/:storeId", { @@ -625,37 +625,37 @@ fastify.get("/api/getHardware/:storeId", { handler: storeController.getHardware, }); -fastify.post("/api/qccheckwaterlevelSensor/:hardwareId", { - schema: { - description: "This is for checking waterlevel Sensor", - tags: ["Store-Data"], - summary: "This is for checking waterlevel Sensor", - params: { - required: ["hardwareId"], - type: "object", - properties: { - hardwareId: { - type: "string", - description: "hardwareId", - }, - }, - }, - body: { - type: "object", +// fastify.post("/api/qccheckwaterlevelSensor/:hardwareId", { +// schema: { +// description: "This is for checking waterlevel Sensor", +// tags: ["Store-Data"], +// summary: "This is for checking waterlevel Sensor", +// params: { +// required: ["hardwareId"], +// type: "object", +// properties: { +// hardwareId: { +// type: "string", +// description: "hardwareId", +// }, +// }, +// }, +// body: { +// type: "object", - properties: { - qccheck: { type: "string" }, - qccheckdate: { type: "string" }, - qcby: { type: "string" }, - comment: { type: "string" }, - outforrepairdate: { type: "string" }, - sendto: { type: "string" }, - repairfeedback: { type: "string" }, - }, - }, - }, - handler: storeController.qccheckwaterlevelSensor, -}) +// properties: { +// qccheck: { type: "string" }, +// qccheckdate: { type: "string" }, +// qcby: { type: "string" }, +// comment: { type: "string" }, +// outforrepairdate: { type: "string" }, +// sendto: { type: "string" }, +// repairfeedback: { type: "string" }, +// }, +// }, +// }, +// handler: storeController.qccheckwaterlevelSensor, +// }) fastify.put("/api/getHardwareqc/:storeId", { @@ -752,45 +752,45 @@ fastify.post("/api/addSlave/:hardwareId", { }); -fastify.put("/api/editSlave/:hardwareId", { - schema: { - description: "This is for editing a slave of a water level sensor", - tags: ["Store-Data"], - summary: "This is for editing a slave of a water level sensor", - params: { - required: ["hardwareId"], - type: "object", - properties: { - hardwareId: { - type: "string", - description: "Main hardware ID", - }, +// fastify.put("/api/editSlave/:hardwareId", { +// schema: { +// description: "This is for editing a slave of a water level sensor", +// tags: ["Store-Data"], +// summary: "This is for editing a slave of a water level sensor", +// params: { +// required: ["hardwareId"], +// type: "object", +// properties: { +// hardwareId: { +// type: "string", +// description: "Main hardware ID", +// }, - }, - }, - body: { - type: "object", - properties: { - tankhardwareId: { type: "string" }, - type: { type: "string" }, - indate: { type: "string" }, - hardwareId_company: { type: "string" }, - qccheck: { type: "string", default: null }, - qccheckdate: { type: "string", default: null }, - qcby: { type: "string", default: null }, - comment: { type: "string", default: "0" }, - outforrepairdate: { type: "string", default: "0" }, - sendto: { type: "string", default: null }, - repairfeedback: { type: "string", default: "0" }, - dateofinstallation: { type: "string", default: null }, - installedby: { type: "string", default: "0" }, - customerId: { type: "string", default: "0" }, - comments: { type: "string", default: "0" }, - }, - }, - }, - handler: storeController.editSlave, -}); +// }, +// }, +// body: { +// type: "object", +// properties: { +// tankhardwareId: { type: "string" }, +// type: { type: "string" }, +// indate: { type: "string" }, +// hardwareId_company: { type: "string" }, +// qccheck: { type: "string", default: null }, +// qccheckdate: { type: "string", default: null }, +// qcby: { type: "string", default: null }, +// comment: { type: "string", default: "0" }, +// outforrepairdate: { type: "string", default: "0" }, +// sendto: { type: "string", default: null }, +// repairfeedback: { type: "string", default: "0" }, +// dateofinstallation: { type: "string", default: null }, +// installedby: { type: "string", default: "0" }, +// customerId: { type: "string", default: "0" }, +// comments: { type: "string", default: "0" }, +// }, +// }, +// }, +// handler: storeController.editSlave, +// }); fastify.delete("/api/deleteSlave/:hardwareId", { @@ -1109,6 +1109,209 @@ fastify.post("/api/createwaterlevelSensorintime/:storeId", { handler: storeController.createSensor, }) +fastify.delete("/api/deleteSensorById/:_id", { + schema: { + description: "Delete a sensor by ID", + tags: ["Store-Data"], + summary: "Delete a sensor based on its _id", + params: { + required: ["_id"], + type: "object", + properties: { + _id: { + type: "string", + description: "Sensor ID", + }, + }, + }, + }, + handler: storeController.deleteSensorById, +}); + +fastify.post('/api/generateHardwareMasterId/:storeId', { + schema: { + description: 'Generate hardwareId and masterId dynamically for pending sensors', + tags: ['Store-Data'], + summary: 'Assign hardwareId and masterId to pending sensors dynamically', + params: { + required: ["storeId"], + type: "object", + properties: { + storeId: { + type: "string", + description: "storeId", + }, + }, + }, + body: { + type: 'object', + required: ['from', 'to', 'type'], + properties: { + from: { type: 'string', description: 'Starting hardwareId (e.g., 00000020)' }, + to: { type: 'string', description: 'Ending hardwareId (e.g., 00000030)' }, + type: { type: 'string', description: 'Type of sensor' }, + quantity: { type: 'string' }, + }, + }, + }, + + handler: storeController.generateHardwareMasterId, +}); + + +fastify.post('/api/getSensorByHardwareId/:storeId', { + schema: { + description: 'Fetch details of a specific sensor using hardwareId', + tags: ['Store-Data'], + summary: 'Retrieve sensor details by hardwareId', + params: { + required: ['storeId'], + type: 'object', + properties: { + storeId: { type: 'string', description: 'Store ID' }, + }, + }, + body: { + type: 'object', + required: ['hardwareId'], + properties: { + hardwareId: { type: 'string', description: 'Hardware ID of the sensor' }, + }, + }, + }, + handler: storeController.getSensorByHardwareId, +}); + + +fastify.post("/api/updateSensorById/:_id", { + schema: { + description: "Edit specific sensor fields", + tags: ["Store-Data"], + summary: "Update specific fields of a sensor before macId", + params: { + required: ["_id"], + type: "object", + properties: { + _id: { + type: "string", + description: "Sensor ID", + }, + }, + }, + body: { + type: "object", + properties: { + model: { type: "string", description: "Model of the sensor" }, + type: { type: "string", description: "Type of sensor" }, + masterId: { type: "string"}, + hardwareId_company: { type: "string", description: "Company name of hardware ID" }, + hardwareId: { type: "string", nullable: true, description: "Hardware ID (if applicable)" }, + + }, + }, + }, + handler: storeController.updateSensorById, +}); + + +fastify.post('/api/updateSensorQC/:hardwareId', { + schema: { + description: 'Edit specific sensor QC fields', + tags: ['Store-Data'], + summary: 'Update QC fields of a sensor', + params: { + required: ['hardwareId'], + type: 'object', + properties: { + hardwareId: { type: 'string', description: 'Sensor ID' }, + }, + }, + body: { + type: 'object', + properties: { + qccheck: { type: 'string', description: 'QC check status' }, + qcby: { type: 'string', description: 'QC checked by' }, + comments: { type: 'string', description: 'QC comment' }, + quality_check_details: { + type: 'array', + description: 'Detailed quality check results', + items: { + type: 'object', + properties: { + damage_check: { type: 'string' }, + stickering_check: { type: 'string' }, + power_check: { type: 'string' }, + master_connecting_gsm: { type: 'string' }, + slave_connecting: { type: 'string' }, + motor_start: { type: 'string' }, + motor_stop: { type: 'string' }, + motor_starting: { + type: 'object', + properties: { + result: { type: 'string' }, + steps: { + type: 'array', + items: { + type: 'object', + properties: { + step: { type: 'number' }, + result: { type: 'string' } + } + } + } + } + }, + connecting_to_sensor: { type: 'string' }, + connecting_to_slave: { type: 'string' }, + data_sending: { type: 'string' }, + distance_check: { + type: 'object', + properties: { + result: { type: 'string' }, + steps: { + type: 'array', + items: { + type: 'object', + properties: { + step: { type: 'number' }, + result: { type: 'string' } + } + } + } + } + } + } + } + } + } + } + }, + handler: storeController.updateSensorQC, +}); + + +fastify.get("/api/getSensorsByStatus/:storeId", { + schema: { + + description: "Get list of sensors grouped by status for a given store", + tags: ["Store-Data"], + summary: "Retrieve sensors grouped by status", + params: { + type: "object", + required: ["storeId"], + properties: { + storeId: { + type: "string", + description: "Store ID", + }, + }, + + }, + }, + handler: storeController.getSensorsByStatus, +}); + + fastify.get("/api/getbatchnumbers/:storeId/:type", { schema: { tags: ["Store-Data"], @@ -1138,11 +1341,38 @@ fastify.get("/api/getbatchnumbers/:storeId/:type", { handler: storeController.getbatchnumbers, }); + +fastify.get("/api/getbatquotationsforparticularstore/:storeId", { + schema: { + tags: ["Store-Data"], + description: "This is to Get quotations for store based on store id", + summary: "This is to Get quotations for store based on store id", + params: { + type: "object", + properties: { + storeId: { + type: "string", + description: "storeId", + }, + + }, + required: ["storeId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), + handler: storeController.getbatquotationsforparticularstore, +}); + fastify.get("/api/getiots/:storeId/:type", { schema: { tags: ["Store-Data"], description: "This is to Get iots of particular store", - summary: "This is to iots of particular store", + summary: "This is to get iots of particular store", params: { type: "object", properties: { @@ -1168,18 +1398,18 @@ fastify.get("/api/getiots/:storeId/:type", { }); -fastify.post("/api/createquotationforSensor/:installationId", { +fastify.post("/api/createquotationforSensor/:surveyId", { schema: { description: "This is for create quotation for sensors", tags: ["Install"], summary: "This is for create quotation for sensors", params: { - required: ["installationId"], + required: ["surveyId"], type: "object", properties: { - installationId: { + surveyId: { type: "string", - description: "installationId", + description: "surveyId", }, }, }, @@ -1187,10 +1417,51 @@ fastify.post("/api/createquotationforSensor/:installationId", { type: "object", properties: { customerId: { type: "string" }, + masters: { type: "string" }, slaves: { type: "string" }, sensors: { type: "string" }, motor_switches: { type: "string" }, + master_connections: { + type: "array", + maxItems: 2500, + items: { + type: "object", + properties: { + master_name: { type: "string", default: null }, + slaves: { type: "string", default: null }, + location: { type: "string", default: null }, + googleLocation: { type: "string", default: null }, + latitude: { type: 'number', default: 0.0 }, + longitude: { type: 'number', default: 0.0}, + tanks: { + type: "array", + items: { + type: "object", + properties: { + tankName: { type: "string", default: null }, + tankLocation: { type: "string", default: null }, + }, + }, + default: [], + }, + motor_switches: { + type: "array", + items: { + type: "object", + properties: { + from_tank: { type: "string", default: null }, + from_location: { type: "string", default: null }, + to_tank: { type: "string", default: null }, + to_location: { type: "string", default: null } + }, + }, + default: [], + }, + }, + }, + }, + electricals: { type: "array", maxItems: 2500, @@ -1211,26 +1482,138 @@ fastify.post("/api/createquotationforSensor/:installationId", { }); - -fastify.post("/api/editQuotationForSensor/:quatationId", { +fastify.post("/api/createEstimationPrice", { schema: { - description: "This is for edit quotation for sensors", + description: "Calculate estimated cost for a list of items", tags: ["Install"], - summary: "This is for edit quotation for sensors", - params: { - required: ["quatationId"], + summary: "Fetch unit prices from IotPrice and calculate total price for all items", + body: { + type: "object", + properties: { + customerId: { type: "string", description: "Customer ID" }, + items: { + type: "array", + description: "List of items", + items: { + type: "object", + properties: { + name: { type: "string", description: "Item name" }, + + type: { type: "string", description: "Item type" }, + quantity: { type: "string", description: "Quantity of the item" } + }, + required: ["name", "type", "quantity"] + } + } + }, + required: ["items"] + } + }, + handler: storeController.createEstimationPrice +}); + + +fastify.post("/api/handleEstimation", { + schema: { + description: "Accept or reject the estimated price", + tags: ["Install"], + summary: "Handles user action for estimated price", + body: { + type: "object", + properties: { + customerId: { type: "string", description: "Customer ID" }, + items: { + type: "array", + description: "List of items with price details", + items: { + type: "object", + properties: { + name: { type: "string", description: "Item name" }, + type: { type: "string", description: "Item type" }, + quantity: { type: "string", description: "Quantity of the item" }, + unitPrice: { type: "number", description: "Unit price" }, + totalCost: { type: "number", description: "Total cost" } + }, + required: ["name", "type", "quantity", "unitPrice", "totalCost"] + } + }, + estimatedTotal: { type: "number", description: "Total estimated cost" }, + action: { type: "string", enum: ["accept", "reject"], description: "User decision" } + }, + required: ["customerId", "items", "estimatedTotal", "action"] + } + }, + handler: storeController.handleEstimation +}); + +fastify.put("/api/editOrder", { + schema: { + description: "Edit an existing order based on orderId", + tags: ["Install"], + summary: "Modify items and update the estimated total", + body: { + type: "object", + properties: { + orderId: { type: "string", description: "Order ID to be updated" }, + customerId: { type: "string", description: "Customer ID associated with the order" }, + items: { + type: "array", + description: "Updated list of items", + items: { + type: "object", + properties: { + name: { type: "string", description: "Item name" }, + type: { type: "string", description: "Item type" }, + quantity: { type: "string", description: "Quantity of the item" }, + unitPrice: { type: "number", description: "Unit price of the item" }, + totalPrice: { type: "number", description: "Total price of the item" } + }, + required: ["name", "type", "quantity", "unitPrice", "totalPrice"] + } + }, + estimatedTotal: { type: "number", description: "Updated estimated total cost" } + }, + required: ["orderId", "customerId", "items", "estimatedTotal"] + } + }, + handler: storeController.editOrder +}); + +fastify.get("/api/orders/:customerId", { + schema: { + description: "Get pending and accepted orders for a particular customer", + tags: ["Install"], + summary: "Fetch orders based on customerId", + params: { + type: "object", + properties: { + customerId: { type: "string", description: "Customer ID whose orders need to be fetched" } + }, + required: ["customerId"] + } + }, + handler: storeController.getOrdersByCustomer +}); + + +fastify.post("/api/editQuotationForSensor/:quatationId", { + schema: { + description: "This is for editing quotation for sensors", + tags: ["Install"], + summary: "Edit an existing sensor quotation", + params: { + required: ["quatationId"], type: "object", properties: { quatationId: { type: "string", - description: "quatationId", + description: "Quotation ID", }, }, }, body: { type: "object", properties: { - masters: { type: "string" }, slaves: { type: "string" }, sensors: { type: "string" }, @@ -1239,33 +1622,57 @@ fastify.post("/api/editQuotationForSensor/:quatationId", { type: "array", maxItems: 2500, items: { - type: "object", - properties: { - type: { type: "string", default: null }, - wire: { type: "string", default: null }, - switch: { type: "string", default: null }, - text: { type: "string", default: null }, + type: "object", + properties: { + type: { type: "string", default: null }, + wire: { type: "string", default: null }, + switch: { type: "string", default: null }, + text: { type: "string", default: null }, + }, + }, + }, + master_connections: { // Added master_connections to schema + type: "array", + maxItems: 2500, + items: { + type: "object", + properties: { + master_name: { type: "string", default: null }, + slaves: { type: "string", default: null }, + location: { type: "string", default: null }, + tanks: { + type: "array", + items: { + type: "object", + properties: { + tankName: { type: "string", default: null }, + tankLocation: { type: "string", default: null }, + }, + }, + default: [], }, + }, }, - }, + }, }, }, }, handler: storeController.editQuotationForSensor, }); -fastify.post("/api/getquotationofinstalleranduser/:installationId", { + +fastify.post("/api/getquotationofinstalleranduser/:surveyId", { schema: { tags: ["Install"], - description: "This is to Get Quotations for installer and particular user", - summary: "This is to Get Quotations for installer and particular user", + description: "This is to Get Quotations for survey and particular user", + summary: "This is to Get Quotations for survey and particular user", params: { - required: ["installationId"], + required: ["surveyId"], type: "object", properties: { - installationId: { + surveyId: { type: "string", - description: "installationId", + description: "surveyId", }, }, }, @@ -1368,6 +1775,100 @@ fastify.get("/api/getSingleQuotationData/:quotationId", { handler: storeController.getSinleQuotationData, }); + +fastify.put("/api/updateInstallationId/:_id", { + schema: { + tags: ["Install"], + description: "Update the installationId of an order", + summary: "Update installationId", + params: { + type: "object", + properties: { + _id: { + type: "string", + description: "_id", + }, + }, + required: ["_id"], + }, + body: { + type: "object", + properties: { + installationId: { + type: "string", + description: "Installation ID to update", + }, + }, + required: ["installationId"], + }, + response: { + 200: { + description: "Installation ID updated successfully", + type: "object", + properties: { + status_code: { type: "number" }, + message: { type: "string" }, + data: { type: "object" }, + }, + }, + 400: { + description: "Bad request", + type: "object", + properties: { + error: { type: "string" }, + }, + }, + 404: { + description: "Order not found", + type: "object", + properties: { + error: { type: "string" }, + }, + }, + 500: { + description: "Internal server error", + type: "object", + properties: { + error: { type: "string" }, + }, + }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.updateInstallationId, +}); + + +fastify.get("/api/getPendingOrders", { + schema: { + tags: ["Install"], + description: "Fetch all orders with status 'pending'", + summary: "Get all pending orders", + response: { + 200: { + type: "object", + properties: { + status_code: { type: "number" }, + message: { type: "string" }, + data: { type: "array", items: { type: "object" } }, + }, + }, + }, + }, + handler: storeController.getPendingOrders, +}); + + + + + + + + fastify.post("/api/cart/hardwareItem", { schema: { description: "To add items to the Hardwarecart", @@ -1415,5 +1916,364 @@ fastify.post("/api/cart/installationService", { }, handler: storeController.addToCartService }); + + +fastify.post("/api/acceptquotation/:quotationId", { + schema: { + tags: ["Install"], + description: "Accepts a quotation and moves it to the Orders database", + summary: "Accepts a quotation", + params: { + type: "object", + properties: { + quotationId: { type: "string" }, + }, + }, + body: { + type: "object", + properties: { + action: { type: "string" }, + storeId: { type: "string" }, + }, + required: ["action"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), // Uncomment if authentication is needed + handler: storeController.acceptQuotation, +}); + + +fastify.get("/api/ordersofstore/:storeId", { + schema: { + tags: ["Install"], + description: "Fetches orders based on storeId", + summary: "Get orders by storeId", + params: { + type: "object", + properties: { + storeId: { type: "string" }, + }, + required: ["storeId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.getOrdersByStoreId, +}); + + +fastify.get("/api/ordersofinstall/:installationId", { + schema: { + tags: ["Installation"], + description: "Fetches orders based on installationId", + summary: "Get orders by installationId", + params: { + type: "object", + properties: { + installationId: { type: "string" }, + //work_status: { type: "string"}, + //customerId: { type: "string"}, + }, + // required: ["installationId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.getOrdersByInstallationId, +}); + +fastify.get("/api/Pendingordersofinstall/:installationId/:teamMemberId", { + schema: { + tags: ["Installation"], + description: "Fetches orders based on installationId", + summary: "Get Pending orders team member by installationId", + params: { + type: "object", + properties: { + installationId: { type: "string" }, + teamMemberId: { type: "string"}, + //customerId: { type: "string"}, + }, + // required: ["installationId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.getPendingOrdersByInstallationAndTeamMember, +}); + +fastify.get("/api/waitingordersofinstall/:installationId/:teamMemberId", { + schema: { + tags: ["Installation"], + description: "Fetches orders based on installationId", + summary: "Get waiting orders team member by installationId", + params: { + type: "object", + properties: { + installationId: { type: "string" }, + teamMemberId: { type: "string"}, + //customerId: { type: "string"}, + }, + // required: ["installationId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.getWaitingOrdersByInstallationAndTeamMember, +}); + +fastify.get("/api/Completeordersofinstall/:installationId/:teamMemberId", { + schema: { + tags: ["Installation"], + description: "Fetches orders based on installationId", + summary: "Get Complete orders Team Member by installationId", + params: { + type: "object", + properties: { + installationId: { type: "string" }, + teamMemberId: { type: "string"}, + //customerId: { type: "string"}, + }, + // required: ["installationId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.getCompleteOrdersByInstallationAndTeamMember, +}); + +fastify.get("/api/ManagerPendingordersofinstall/:installationId", { + schema: { + tags: ["Installation"], + description: "Fetches orders based on installationId", + summary: "Get Manager Pending orders by installationId", + params: { + type: "object", + properties: { + installationId: { type: "string" }, + //teamMemberId: { type: "string"}, + //customerId: { type: "string"}, + }, + // required: ["installationId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.getManagerPendingOrdersByInstallationId, +}); + +fastify.get("/api/waitingManagerPendingordersofinstall/:installationId", { + schema: { + tags: ["Installation"], + description: "Fetches orders based on installationId", + summary: "Get Waiting Manager Pending orders by installationId", + params: { + type: "object", + properties: { + installationId: { type: "string" }, + //teamMemberId: { type: "string"}, + //customerId: { type: "string"}, + }, + // required: ["installationId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.getWaitingManagerPendingOrdersByInstallationId, +}); + +fastify.get("/api/Completeordersofinstall/:installationId", { + schema: { + tags: ["Installation"], + description: "Fetches orders based on installationId", + summary: "Get Complete orders Manager building details by installationId", + params: { + type: "object", + properties: { + installationId: { type: "string" }, + //work_status: { type: "string"}, + //customerId: { type: "string"}, + }, + // required: ["installationId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.getCompleteManagerPendingOrdersByInstallationId, +}); + +fastify.post( + '/api/orders/:installationId/:customerId/:teamMemberId', + { + schema: { + tags: ["Installation"], + description: "Update the work status", + summary: "Update the work status", + params: { + type: 'object', + required: ['installationId', 'customerId'], + properties: { + installationId: { type: 'string' }, + customerId: { type: 'string' }, + teamMemberId: { type : 'string'} + }, + }, + body: { + type: 'object', + required: ['work_status', 'hardwareId'], + properties: { + work_status: { + type: 'string', + enum: ['active', 'pending', 'complete'], + description: 'The new work status', + }, + hardwareId: { + type: 'string', + description: 'Hardware ID to update orders for', + }, + }, + }, + // response: { + // 200: { + // type: 'object', + // properties: { + // status_code: { type: 'integer' }, + // message: { type: 'string' }, + // data: { type: 'array' }, + // }, + // }, + // }, + }, + handler: storeController.updateWorkStatusByInstallationId, + } +); + + +fastify.get("/api/getallocatedsensors/:customerId", { + schema: { + tags: ["Install"], + description: "Fetches sensors based on storeId", + summary: "Get sensors by customerId", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + }, + required: ["customerId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + handler: storeController.getallocatedsensorstouser, +}); + + + +fastify.post("/api/repair/replace/:customerId", { + schema: { + tags: ["Install"], + summary: "Mark old sensors as repair and replace them with available ones", + description: "This API replaces sensors (master/slave/sensor) with available ones and logs the repair action", + params: { + type: "object", + properties: { + customerId: { type: "string", description: "Customer ID" }, + }, + required: ["customerId"] + }, + body: { + type: "object", + properties: { + items: { + type: "array", + items: { + type: "object", + properties: { + type: { type: "string", enum: ["master", "slave", "sensor"] }, + hardwareId: { type: "string" } + }, + required: ["type", "hardwareId"] + } + } + }, + required: ["items"] + }, + response: { + 200: { + description: "Successful replacement and repair log creation", + type: "object", + properties: { + status_code: { type: "integer" }, + message: { type: "string" }, + data: { + type: "object", + properties: { + packageId: { type: "string" }, + otp: { type: "string" }, + replacements: { + type: "array", + items: { + type: "object", + properties: { + type: { type: "string" }, + oldHardwareId: { type: "string" }, + newHardwareId: { type: "string" }, + } + } + }, + createdAt: { type: "string" } + } + } + } + } + }, + security: [ + { + basicAuth: [] + } + ] + }, + handler: storeController.replaceAndRepair + }); + + + + + + + + next(); }; diff --git a/src/routes/supplierOrdersRoutes.js b/src/routes/supplierOrdersRoutes.js index 661ef902..c2cbdb48 100644 --- a/src/routes/supplierOrdersRoutes.js +++ b/src/routes/supplierOrdersRoutes.js @@ -468,23 +468,26 @@ module.exports = function (fastify, opts, next) { handler:supplierOrderController.getAllOrderaccepted, }); + + + fastify.route({ method: "GET", - url: "/api/allrejected", + url: "/api/allrejected/:customerId", schema: { tags: ["Supplier-Order"], description:"This is for Get All order cancelled", summary: "This is for Get All order cancelled", - // params: { - // required: ["customerId"], - // type: "object", - // properties: { - // customerId: { - // type: "string", - // description: "customerId", - // }, - // }, - // }, + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { + type: "string", + description: "customerId", + }, + }, + }, security: [ { @@ -495,6 +498,65 @@ module.exports = function (fastify, opts, next) { }, handler:supplierOrderController.getAllOrderreject, + }); + + + fastify.route({ + method: "GET", + url: "/api/getAllOrdersoutfordelivery/:customerId", + schema: { + tags: ["Supplier-Order"], + description:"This is for Get All order out for delivery", + summary: "This is for Get All out for delivery", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { + type: "string", + description: "customerId", + }, + }, + }, + + security: [ + { + basicAuth: [], + }, + ], + + }, + handler:supplierOrderController.getAllOrdersoutfordelivery, + + }); + + fastify.route({ + method: "GET", + url: "/api/getAllOrdersdeliveryboyasigned/:customerId", + schema: { + tags: ["Supplier-Order"], + description:"This is for Get All orders for delivery boy assigned", + summary: "This is for Get All out for delivery boy assigned", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { + type: "string", + description: "customerId", + }, + }, + }, + + security: [ + { + basicAuth: [], + }, + ], + + }, + handler:supplierOrderController.getAllOrdersdeliveryboyasigned, + }); fastify.route({ @@ -583,6 +645,28 @@ module.exports = function (fastify, opts, next) { }); + fastify.route({ + method: "GET", + url: "/api/orders", + schema: { + tags: ["Supplier-Order"], + description: "Get orders filtered by status and customerId", + summary: "Get orders filtered by status and customerId", + querystring: { + type: "object", + properties: { + customerId: { type: "string", description: "Customer ID (optional)" }, + + }, + + }, + security: [{ basicAuth: [] }] + }, + handler: supplierOrderController.getOrdersByCustomerId + }); + + + // fastify.route({ // method: "GET", // url: "/api/billing/:bookingId", diff --git a/src/routes/supplierRoute.js b/src/routes/supplierRoute.js index e4a9622f..4d18c391 100644 --- a/src/routes/supplierRoute.js +++ b/src/routes/supplierRoute.js @@ -8,7 +8,7 @@ const { profilePictureSupplier } = require("../models/supplier"); module.exports = function (fastify, opts, next) { - fastify.get("/api/suppliers/:customerId", { + fastify.post("/api/suppliersforbooking/:customerId", { schema: { tags: ["Supplier-Data"], description: "This is for Get All Suppliers", @@ -22,6 +22,25 @@ module.exports = function (fastify, opts, next) { }, }, }, + body: { + type: "object", + + properties: { + type_of_water: { type: "string" }, + capacity: { type: "string" }, + quantity: { type: "string" }, + date: { type: "string" }, + radius_from:{ type: "string" }, + radius_to:{ type: "string" }, + rating_to:{ type: "string" }, + rating_to:{ type: "string" }, + price_from: { type: "string" }, + price_to: { type: "string" }, + time: { type: "string" }, + pump: { type: "string" }, + + }, + }, security: [ { @@ -33,6 +52,63 @@ module.exports = function (fastify, opts, next) { }); + fastify.post("/api/getSupplierswithoutbooking/:customerId", { + schema: { + tags: ["Supplier-Data"], + description: "Get all suppliers with favorite & connection flags for a customer", + summary: "List suppliers with isFavorite & isConnected", + params: { + type: "object", + required: ["customerId"], + properties: { + customerId: { type: "string", description: "Customer ID" }, + }, + }, + // Body is not needed; keep empty schema or remove `body` entirely + + security: [{ basicAuth: [] }], + }, + handler: validationHandler.getSupplierswithoutbooking, // or `exports.getSuppliers` if wired directly +}); + + +fastify.post("/api/requestedbookings", { + schema: { + tags: ["Supplier-Data"], + description: "API to create requested bookings and send to suppliers", + summary: "Create requested booking", + body: { + type: "object", + required: ["customerId", "type_of_water", "capacity", "quantity", "date", "time", "requested_suppliers"], + properties: { + customerId: { type: "string" }, + type_of_water: { type: "string" }, + capacity: { type: "string" }, + quantity: { type: "string" }, + date: { type: "string" }, + time: { type: "string" }, + requested_suppliers: { + type: "array", + items: { + type: "object", + required: ["supplierId", "quoted_amount"], + properties: { + supplierId: { type: "string" }, + quoted_amount: { type: "number" }, + time: { type: "string" } // ✅ New field + } + } + } + } + }, + security: [{ basicAuth: [] }] + }, + handler: validationHandler.createRequestedBooking +}); + + + + fastify.get("/api/connectedSuppliers/:customerId", { schema: { tags: ["Supplier-Data"], @@ -84,6 +160,92 @@ module.exports = function (fastify, opts, next) { handler: validationHandler.getPendingSuppliers, }); + fastify.post("/api/plan/suppliers/:customerId", { + schema: { + tags: ["Supplier-Data"], + summary: "Search suppliers for Plans page", + description: "Filters by type_of_water, capacity×quantity, price, rating, radius, pump. No booked-tanker exclusion.", + params: { + type: "object", + required: ["customerId"], + properties: { customerId: { type: "string" } }, + }, + body: { + type: "object", + required: ["type_of_water", "capacity", "quantity", "frequency", "start_date", "end_date"], + properties: { + // UI fields + type_of_water: { type: "string" }, + capacity: { type: "string" }, + quantity: { type: "string" }, + frequency: { + type: "string", + + }, + start_date: { type: "string" }, + end_date: { type: "string" }, + + // Extra filters from your payload + radius_from: { type: "string" }, + radius_to: { type: "string" }, + rating_from: { type: "string" }, + rating_to: { type: "string" }, + price_from: { type: "string" }, + price_to: { type: "string" }, + pump: { type: "string" }, // "true"/"false" | "1"/"0" | "yes"/"no" + }, + additionalProperties: false + }, + security: [{ basicAuth: [] }], + }, + handler: validationHandler.getSuppliersForPlanSearch, + }); + + fastify.post("/api/requestedplanbookings", { + schema: { + tags: ["Supplier-Data"], + summary: "Create plan requested booking (daily/weekly once|twice|thrice)", + body: { + type: "object", + required: [ + "customerId","type_of_water","capacity","quantity", + "start_date","end_date","time","frequency","requested_suppliers" + ], + properties: { + customerId: { type: "string" }, + type_of_water: { type: "string" }, + capacity: { type: "string" }, + quantity: { type: "string" }, + start_date: { type: "string" }, // "YYYY-MM-DD" + end_date: { type: "string" }, // "YYYY-MM-DD" + time: { type: "string" }, // "HH:mm" + frequency: { + type: "string", + enum: ["daily","weekly_once","weekly_twice","weekly_thrice","weekly"] + }, + weekly_count: { type: "integer", minimum: 1, maximum: 3 }, // only if frequency === "weekly" + requested_suppliers: { + type: "array", + minItems: 1, + items: { + type: "object", + required: ["supplierId","quoted_amount"], + properties: { + supplierId: { type: "string" }, + quoted_amount: { type: "number" }, + time: { type: "string" } + }, + additionalProperties: false + } + } + }, + additionalProperties: false + }, + security: [{ basicAuth: [] }] + }, + handler: validationHandler.createRequestedPlanBooking + }); + fastify.get("/api/rejectSuppliers/:customerId", { schema: { tags: ["Supplier-Data"], @@ -283,6 +445,11 @@ module.exports = function (fastify, opts, next) { longitude: { type: 'number', default: 0.0}, fcmId: { type: "string", default: null }, description: { type: "string", default: null }, + bussinessname: { type: "string", default: null }, + + registration_number: { type: "string", default: null }, + years_in_business: { type: "string", default: null }, + }, }, @@ -542,6 +709,76 @@ module.exports = function (fastify, opts, next) { }); + +fastify.route({ + method: "POST", + url: "/api/supplier/booking/respond/:_id", + schema: { + description: "Supplier accepts or rejects a requested booking", + tags: ["Supplier-Data"], + summary: "Supplier action on requested booking", + params: { + type: "object", + properties: { + _id: { type: "string", description: "Requested Booking ID" } + }, + required: ["_id"] + }, + body: { + type: "object", + properties: { + supplierId: { type: "string", description: "Supplier ID" }, + action: { + type: "string", + enum: ["accept", "reject"], + description: "Action to perform by supplier" + } + }, + required: ["supplierId", "action"] + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), // Uncomment if auth is needed + handler: supplierController.respondToRequestedBooking +}); + + fastify.route({ + method: "POST", + url: "/api/supplier/recurring/respond/:_id", + schema: { + description: + "Supplier accepts or rejects a recurring requested booking; on accept, creates bookings for each date in the stored 'dates' array.", + tags: ["Supplier-Data"], + summary: "Supplier action on recurring requested booking", + params: { + type: "object", + properties: { + _id: { type: "string", description: "Recurring Requested Booking ID" }, + }, + required: ["_id"], + }, + body: { + type: "object", + properties: { + supplierId: { type: "string", description: "Supplier ID" }, + action: { + type: "string", + enum: ["accept", "reject"], + description: "Action to perform by supplier", + }, + }, + required: ["supplierId", "action"], + }, + security: [{ basicAuth: [] }], + }, + // preHandler: fastify.auth([fastify.authenticate]), + handler: supplierController.respondToRecurringRequestedBooking, + }); + next(); } diff --git a/src/routes/tanksRoute.js b/src/routes/tanksRoute.js index 0f6acb37..e2467632 100644 --- a/src/routes/tanksRoute.js +++ b/src/routes/tanksRoute.js @@ -353,6 +353,8 @@ module.exports = function (fastify, opts, next) { stopTime:{type:"string"}, start_instance_id:{type:"string"}, motor_id:{type:"string"}, + phone:{type:"string"}, + }, }, security: [ @@ -370,6 +372,84 @@ module.exports = function (fastify, opts, next) { }); + fastify.route({ + method: "PUT", + url: "/api/motorActiontest/:customerId", + schema: { + tags: ["Install"], + summary: "This is for start and stop test", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { + type: "string", + description: "customerId", + }, + }, + }, + + + body: { + type: "object", + // required: ['phone'], + properties: { + + motor_id:{type:"string"}, + action:{type:"string"}, + + }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: [ + // fastify.auth([fastify.operatorAuthenticate]), + // validationHandler.validatePhoneFormat, + // ], + //preHandler: fastify.auth([fastify.authenticate]), + handler: tanksController.motoractiontest, + }); + + + + fastify.route({ + method: "PUT", + url: "/api/motoractiontestbeforeqc", + schema: { + tags: ["Install"], + summary: "This is for start and stop test before qc", + + + + body: { + type: "object", + // required: ['phone'], + properties: { + + motor_id:{type:"string"}, + action:{type:"string"}, + + }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: [ + // fastify.auth([fastify.operatorAuthenticate]), + // validationHandler.validatePhoneFormat, + // ], + //preHandler: fastify.auth([fastify.authenticate]), + handler: tanksController.motoractiontestbeforeqc, + }); + + // fastify.route({ // method: "PUT", // url: "/api/consumption/:customerId", @@ -563,6 +643,134 @@ module.exports = function (fastify, opts, next) { handler: tanksController.IotDevice }); + fastify.route({ + method: "GET", + url: "/api/validateRange/:hardwareId/:tankhardwareId", + schema: { + tags: ["Tank"], + description: "Validate tank height range", + summary: "Validate tank height range", + params: { + type: "object", + properties: { + hardwareId: { type: "string" }, + tankhardwareId: { type: "string" } + }, + required: ["hardwareId", "tankhardwareId"] + } + }, + handler: tanksController.validateTankHeight + }); + + fastify.route({ + method: "POST", + url: "/api/adjustMeasurement", + schema: { + tags: ["Tank"], + description: "Adjusts the water level measurement based on manual height measurement.", + summary: "Calculate water level difference using measured height.", + body: { + type: "object", + properties: { + tankName: { type: "string" }, + measuredHeight: { type: "number" } + }, + required: ["tankName", "measuredHeight"] + }, + response: { + 200: { + type: "object", + properties: { + status_code: { type: "number" }, + data: { + type: "object", + properties: { + tankName: { type: "string" }, + originalHeight: { type: "number" }, + measuredHeight: { type: "number" }, + heightDifference: { type: "number" }, + heightDifferenceInCm: { type: "number" }, + calculatedWaterLevel: { type: "number" }, + actualWaterLevel: { type: "number" }, + originalPercentage: { type: "string" }, + calculatedPercentage: { type: "string" }, + percentageDifference: { type: "string" }, + message: { type: "string" } + } + } + } + } + } + }, + handler: tanksController.adjustMeasurement + }); + + + // fastify.route({ + // method: "GET", + // url: "/api/waterlevel/:tankName", + // schema: { + // tags: ["Tank"], + // description: "Get actual water level in cm", + // summary: "The actual water level of a tank and convert it to cm", + // params: { + // type: "object", + // properties: { + // tankName: { type: "string" } + // }, + // required: ["tankName"] + // } + // }, + // handler: tanksController.getActualWaterLevelInCm + // }); + + + + // fastify.route({ + // method: "POST", + // url: "/api/compareWaterLevel", + // schema: { + // tags: ["Tank"], + // description: "Compare measured height with actual water level", + // summary: "Checks if measured height is within 10 cm of actual water level", + // // params: { + // // type: "object", + // // properties: { + // // tankName: { type: "string" } + // // }, + // // required: ["tankName"] + // // }, + // body: { + // type: "object", + // properties: { + // measuredHeight: { type: "string" }, + // tankName: { type: "string" } + + // }, + // required: ["measuredHeight","tankName"] + // } + // }, + // handler: tanksController.compareMeasuredHeight + // }); + +fastify.post('/api/compareMeasuredHeight', { + schema: { + tags: ['Tank'], + summary: 'Compare manual measured height with sensor data', + description: 'Pass tankName, tankHeight (cm) and measuredHeight (cm) as integers. Compares manual measurement vs sensor data and shows water levels.', + body: { + type: 'object', + required: ['tankName', 'tankHeight', 'measuredHeight'], + properties: { + tankName: { type: 'string', }, + tankHeight: { type: 'integer',}, // in cm + measuredHeight: { type: 'integer', } // in cm + } + }, + + }, + handler: tanksController.compareMeasuredHeight +}); fastify.route({ method: "POST", @@ -821,13 +1029,55 @@ module.exports = function (fastify, opts, next) { handler: tanksController.deletemotordatarecordsbefore7days, }); - fastify.get("/api/getTankmotordata", { + // fastify.get("/api/getTankmotordata", { + // schema: { + // tags: ["Tank"], + // description: "This is for Get Tank Motor Data", + // summary: "This is for to Get Tank Motor Data", + // querystring: { + // customerId: {type: 'string'} + // }, + // security: [ + // { + // basicAuth: [], + // }, + // ], + // }, + // preHandler: fastify.auth([fastify.authenticate]), + // handler: tanksController.getTankmotordata, + // }); + + + + fastify.route({ + method: "PUT", + url: "/api/getTankmotordata/:customerId", schema: { tags: ["Tank"], - description: "This is for Get Tank Motor Data", - summary: "This is for to Get Tank Motor Data", - querystring: { - customerId: {type: 'string'} + summary: "This is for Get Tank Motor Data", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { + type: "string", + description: "customerId", + }, + }, + }, + + body: { + type: "object", + // required: ['phone'], + properties: { + startDate:{ type: "string" }, + stopDate:{type:"string"}, + pumps:{type:"string"}, + users:{type:"string"}, + + + + }, }, security: [ { @@ -835,11 +1085,36 @@ module.exports = function (fastify, opts, next) { }, ], }, - preHandler: fastify.auth([fastify.authenticate]), + //preHandler: fastify.auth([fastify.authenticate]), handler: tanksController.getTankmotordata, }); + fastify.get("/api/getPumpsAndUsers/:customerId", { + schema: { + tags: ["Tank"], + description: "This is to Get pumps and users of particular customerId", + summary: "This is to Get pumps and users of particular customerId", + params: { + type: "object", + properties: { + customerId: { + type: "string", + description: "storeId", + }, + }, + required: ["customerId"], + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), + handler: tanksController.getPumpsAndUsers, + }); + @@ -1070,6 +1345,7 @@ module.exports = function (fastify, opts, next) { auto_min_percentage: { type: "string", default: null }, auto_max_percentage: { type: "string", default: null }, tankLocation: { type: "string", default: null }, + auto_mode_type: { type: "string", default: "default" }, }, }, @@ -1116,6 +1392,36 @@ module.exports = function (fastify, opts, next) { //preHandler: fastify.auth([fastify.authenticate]), handler: tanksController.getBlockData, }); + + + fastify.route({ + method: "GET", + url: "/api/getCustomerAutoPercentages/:customerId", + schema: { + tags: ["Tank"], + summary: "This is to get auto mode default percentages", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { + type: "string", + description: "customerId", + }, + }, + }, + // querystring: { + // tankName: {type: 'string'} + // }, + security: [ + { + basicAuth: [], + }, + ], + }, + //preHandler: fastify.auth([fastify.authenticate]), + handler: tanksController.getCustomerAutoPercentages, + }); fastify.route({ method: "PUT", @@ -1188,7 +1494,193 @@ module.exports = function (fastify, opts, next) { }); - next(); -} + fastify.route({ + method: "PUT", + url: "/api/updatetankstatus/:customerId", + schema: { + tags: ["Tank"], + summary: "This is for updating tank status to active or inactive", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { + type: "string", + description: "customerId", + }, + }, + }, + + body: { + type: "object", + // required: ['phone'], + properties: { + tankName:{ type: "string" }, + tankLocation:{type:"string"}, + status:{type:"string"}, + + + }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + //preHandler: fastify.auth([fastify.authenticate]), + handler: tanksController.updatetankstatus, + }); + fastify.route({ + method: "POST", + url: "/api/sendNotificationDaily", + schema: { + tags: ["Tank"], + summary: "This is for time-based notification", + body: { + type: "object", + properties: { + customerId: { type: "string" }, + notificationTime: { type: "string" }, + allowNotifications: { type: "boolean" }, + }, + required: ["customerId", "notificationTime", "allowNotifications"], // Ensures all fields are required + }, + security: [{ basicAuth: [] }], + }, + handler: tanksController.sendUserSetNotifications, + }); + + fastify.route({ + method: "POST", + url: "/api/sendNotificationLowWaterLevel", + schema: { + tags: ["Tank"], + summary: "This is for Send low water level alert notification", + body: { + type: "object", + properties: { + customerId: { type: "string" }, + lowWaterAlert: { type: "boolean" }, + }, + required: ["customerId", "lowWaterAlert"], // Ensures all fields are required + }, + security: [{ basicAuth: [] }], + }, + handler: tanksController.sendUserSetLowWaterNotificationsSwitch, + }); + + fastify.route({ + method: "POST", + url: "/api/sendNotificationCriticallyWaterLevel", + schema: { + tags: ["Tank"], + summary: "This is for Send critically low water level alert notification", + body: { + type: "object", + properties: { + customerId: { type: "string" }, + criticalLowWaterAlert: { type: "boolean" }, + }, + required: ["customerId", "criticalLowWaterAlert"], // Ensures all fields are required + }, + security: [{ basicAuth: [] }], + }, + handler: tanksController.sendUserSetCriticallyLowWaterNotificationsSwitch, + }); + fastify.route({ + method: "POST", + url: "/api/sendNotificationManualStartAndStop", + schema: { + tags: ["Tank"], + summary: "This is for Send Manual start and stop alert notification", + body: { + type: "object", + properties: { + customerId: { type: "string" }, + manualStartAndStopNotify: { type: "boolean" }, + }, + required: ["customerId", "manualStartAndStopNotify"], // Ensures all fields are required + }, + security: [{ basicAuth: [] }], + }, + handler: tanksController.sendUserManualStartAndStop, + }); + fastify.route({ + method: "POST", + url: "/api/sendNotificationAutomaticStartAndStop", + schema: { + tags: ["Tank"], + summary: "This is for Send Automatic start and stop alert notification", + body: { + type: "object", + properties: { + customerId: { type: "string" }, + automaticStartAndStopNotify: { type: "boolean" }, + }, + required: ["customerId", "automaticStartAndStopNotify"], // Ensures all fields are required + }, + security: [{ basicAuth: [] }], + }, + handler: tanksController.sendUserAutomaticStartAndStop, + }); + + fastify.route({ + method: "POST", + url: "/api/sendNotificationDailyPreference", + schema: { + tags: ["Tank"], + summary: "This is for time based notification preferences", + body: { + type: "object", + properties: { + customerId: { + type: "string", + + }, + notificationPreference: { + type: "string", + }, + // allowNotifications: { + // type: "boolean" + // } + }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + //preHandler: fastify.auth([fastify.authenticate]), + handler: tanksController.notificationTiming, + }); + fastify.route({ + method: "GET", + url: "/api/listofactiveandinactivetankstatus/:customerId", + schema: { + tags: ["Tank"], + summary: "Get list of active or inactive tanks", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { type: "string", description: "Customer ID" }, + }, + }, + querystring: { + type: "object", + properties: { + status: { type: "string"}, + }, + }, + security: [{ basicAuth: [] }], + }, + handler: tanksController.listofactiveandinactivetankstatus, + }); + + + next(); +} diff --git a/src/routes/usersRoute.js b/src/routes/usersRoute.js index 2583ab63..39a00e12 100644 --- a/src/routes/usersRoute.js +++ b/src/routes/usersRoute.js @@ -241,26 +241,26 @@ module.exports = function (fastify, opts, next) { }); - fastify.route({ - method: "POST", - url: "/api/change-password", - schema: { - tags: ["User"], - description: "Users to change their password using mobile number, old password, and new password.", - summary: "Users to change their password using mobile number, old password, and new password.", - body: { - type: "object", - required: ["phone", "oldPassword", "newPassword"], - properties: { - phone: { type: "string"}, - oldPassword: { type: "string"}, - newPassword: { type: "string" }, - //confirmPassword: { type: "string", minLength: 6 }, - }, - }, - }, - handler: validationHandler.verifyOldNewPassword, // Adjust the path to your handler - }); + // fastify.route({ + // method: "POST", + // url: "/api/change-password", + // schema: { + // tags: ["User"], + // description: "Users to change their password using mobile number, old password, and new password.", + // summary: "Users to change their password using mobile number, old password, and new password.", + // body: { + // type: "object", + // required: ["phone", "oldPassword", "newPassword"], + // properties: { + // phone: { type: "string"}, + // oldPassword: { type: "string"}, + // newPassword: { type: "string" }, + // //confirmPassword: { type: "string", minLength: 6 }, + // }, + // }, + // }, + // handler: validationHandler.verifyOldNewPassword, // Adjust the path to your handler + // }); // fastify.route({ @@ -342,6 +342,35 @@ module.exports = function (fastify, opts, next) { }, }); + + + fastify.route({ + method: "POST", + url: "/api/changePassword", + schema: { + tags: ["User"], + description: "This is to change password of user", + summary: "This is to change password of user", + body: { + type: "object", + required: ["phone"], + properties: { + phone: { type: "string" }, + oldPassword: { type: "string" }, + newPassword: { type: "string" }, + }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: [validationHandler.], + handler: userController.changePassword, + + }); + fastify.route({ method: "POST", url: "/api/resetpassword", @@ -627,6 +656,8 @@ module.exports = function (fastify, opts, next) { }); + + fastify.route({ method: "POST", url: "/api/sendSms", @@ -723,6 +754,85 @@ module.exports = function (fastify, opts, next) { preHandler: fastify.auth([fastify.authenticate]), handler: userController.deleteTeamMember, // Ensure this line points to the handler }); + + fastify.route({ + method: "PUT", + url: "/api/addingfavoratesupplier/:customerId", + schema: { + tags: ["User"], + summary: "This is for adding favorate supplier", + description: "This is for adding favorate supplier", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { + type: "string", + description: "customerId", + }, + }, + }, + querystring: { + supplierId: { type: "string" }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), + handler: userController.addingfavoratesupplier, // Ensure this line points to the handler + }); + + fastify.route({ + method: "PUT", + url: "/api/editfavoratesupplier/:customerId", + schema: { + tags: ["User"], + summary: "Edit a favorite supplier", + description: "Replace an existing supplierId with a new one", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { type: "string" }, + }, + }, + querystring: { + oldSupplierId: { type: "string" }, + newSupplierId: { type: "string" }, + }, + security: [{ basicAuth: [] }], + }, + handler: userController.editFavoriteSupplier, + }); + + fastify.route({ + method: "DELETE", + url: "/api/deletefavoratesupplier/:customerId", + schema: { + tags: ["User"], + summary: "Delete a favorite supplier", + description: "Remove a supplierId from favorite suppliers", + params: { + required: ["customerId"], + type: "object", + properties: { + customerId: { type: "string" }, + }, + }, + querystring: { + supplierId: { type: "string" }, + }, + security: [{ basicAuth: [] }], + }, + handler: userController.deleteFavoriteSupplier, + }); + + + + fastify.route({ method: "PUT", @@ -822,6 +932,7 @@ module.exports = function (fastify, opts, next) { phone: { type: "string", default: null }, password:{ type: "string" ,default: null}, + all_motor_access:{ type: "string" ,default: "read"}, }, }, @@ -835,6 +946,7 @@ module.exports = function (fastify, opts, next) { }, ], }, + //preHandler: fastify.auth([fastify.authenticate]), handler: userController.createstaff, }); @@ -859,10 +971,12 @@ fastify.route({ type: "object", properties: { name: { type: "string" }, - password: { type: "string" } + password: { type: "string" }, + all_motor_access:{type:"string"} }, required: ["name", "password"] } + }, handler: userController.editStaff, }); @@ -904,5 +1018,529 @@ fastify.route({ }); + + + +fastify.route({ + method: "GET", + url: "/api/favorites/:customerId", + schema: { + tags: ["User"], + description: "Get all favorite suppliers of a customer", + summary: "Get all favorite suppliers of a customer", + params: { + type: "object", + required: ["customerId"], + properties: { + customerId: { type: "string", description: "Customer ID" } + } + }, + security: [{ basicAuth: [] }] + }, + handler: userController.getFavoriteSuppliers +}); + +fastify.get("/api/cart/:customerId", { + schema: { + tags: ["User"], + description: "Fetch cart by userId", + summary: "Get cart", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + }, + required: ["customerId"], + }, + }, + handler: userController.getCartByUserId, + }); + + fastify.post("/api/cart/:customerId/add", { + schema: { + tags: ["User"], + description: "Add item to cart", + summary: "Add item", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + }, + required: ["customerId"], + }, + body: { + type: "object", + properties: { + productId: { type: "string" }, + name: { type: "string" }, + quantity: { type: "number" }, + price: { type: "number" }, + }, + required: ["productId", "quantity", "price"], + }, + }, + handler: userController.addItemToCart, + }); + + fastify.post("/api/cart/:customerId/remove", { + schema: { + tags: ["User"], + description: "Remove item from cart", + summary: "Remove item", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + }, + required: ["customerId"], + }, + body: { + type: "object", + properties: { + productId: { type: "string" }, + }, + required: ["productId"], + }, + }, + handler: userController.removeItemFromCart, + }); + + fastify.delete("/api/cart/:customerId/clear", { + schema: { + tags: ["User"], + description: "Clear entire cart", + summary: "Clear cart", + params: { + type: "object", + properties: { + customerId: { type: "string" }, + }, + required: ["customerId"], + }, + }, + handler: userController.clearCart, + }); + + + +fastify.route({ + method: "GET", + url: "/api/getuserOrders/:customerId", + schema: { + description: "To Get orders of customer", + tags: ["User"], + summary: "This is for getting orders of a customer", + params: { + type: "object", + properties: { + customerId: { + type: "string", + description: "Customer ID", + }, + }, + required: ["customerId"] + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), + handler: userController.getuserOrders, +}); + + + +fastify.route({ + method: "GET", + url: "/api/getuserRequestbookings/:customerId", + schema: { + description: "To Get requestbookings of customer", + tags: ["User"], + summary: "This is for getting requestbookings of a customer", + params: { + type: "object", + properties: { + customerId: { + type: "string", + description: "Customer ID", + }, + }, + required: ["customerId"] + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), + handler: userController.getuserRequestbookings, +}); + +fastify.route({ + method: "GET", + url: "/api/getuserRequestbookingsforsupplier/:supplierId", + schema: { + description: "Get request bookings for a particular supplier", + tags: ["Supplier"], + summary: "Fetch bookings where the supplier is requested", + params: { + type: "object", + properties: { + supplierId: { type: "string", description: "Supplier ID" }, + }, + required: ["supplierId"], + }, + security: [{ basicAuth: [] }], + }, + // preHandler: fastify.auth([fastify.authenticate]), + handler: userController.getuserRequestbookingsForSupplier, +}); + + +fastify.route({ + method: "GET", + url: "/api/getuserRequestbookingsforplansforsupplier/:supplierId", + schema: { + description: "Get request plans for a particular supplier", + tags: ["Supplier"], + summary: "Fetch plans where the supplier is requested", + params: { + type: "object", + properties: { + supplierId: { type: "string", description: "Supplier ID" }, + }, + required: ["supplierId"], + }, + security: [{ basicAuth: [] }], + }, + // preHandler: fastify.auth([fastify.authenticate]), + handler: userController.getuserRequestbookingsforplansforsupplier, +}); + + +fastify.route({ + method: "GET", + url: "/api/getuserRequestbookingsforplansforcustomer/:customerId", + schema: { + description: "Fetch plans of the customer", + tags: ["Supplier"], + summary: "Fetch plans of the customer", + params: { + type: "object", + properties: { + customerId: { type: "string", description: "customerId" }, + }, + required: ["customerId"], + }, + security: [{ basicAuth: [] }], + }, + // preHandler: fastify.auth([fastify.authenticate]), + handler: userController.getuserRequestbookingsforplansforcustomer, +}); + + + +fastify.route({ + method: "POST", + url: "/api/booking/accept/:supplierId", + schema: { + description: "Accept a requested booking by supplier", + tags: ["User"], + summary: "Accept booking and move to tanker bookings", + params: { + type: "object", + properties: { + supplierId: { type: "string", description: "Supplier ID" } + }, + required: ["supplierId"] + }, + body: { + type: "object", + properties: { + _id: { type: "string", description: "Requested booking ID" }, + action: { type: "string", enum: ["accept","reject"], description: "Action to perform" } + }, + required: ["_id", "action"] + }, + security: [ + { + basicAuth: [], + }, + ], + }, + //preHandler: fastify.auth([fastify.authenticate]), + handler: userController.acceptRequestedBooking +}); + + + fastify.route({ + method: "GET", + url: "/api/getordersofcustomer/:customerId", + schema: { + tags: ["User"], + description: "Get tanker orders of customer", + summary: "Get tanker orders of customer", + params: { + type: "object", + properties: { + customerId: { type: "string" } + + }, + required: ["customerId"] + } + }, + handler: userController.getordersofcustomer + }); + +fastify.route({ + method: "GET", + url: "/api/getallsuppliers", + schema: { + tags: ["User"], + description: "Get all suppliers for showing", + summary: "Get all suppliers for showing", + }, + handler: userController.getallsuppliers + }); + + fastify.route({ + method: "GET", + url: "/api/estimationsget/:customerId", + schema: { + tags: ["User"], + description: "Get all estimations for particular customer", + summary: "Get all estimations for particular customer", + params: { + type: "object", + properties: { + customerId: { type: "string" } + + }, + required: ["customerId"] + } + }, + handler: userController.estimationsget + }); + + +fastify.route({ + method: 'PUT', + url: '/api/bookings/:bookingid/payment', + schema: { + description: 'Update payment details for a tanker booking', + tags: ['User'], + summary: 'Update payment info', + params: { + type: 'object', + properties: { + bookingid: { type: 'string', description: 'Booking ID' } + }, + required: ['bookingid'] + }, + body: { + type: 'object', + properties: { + payment_mode: { type: 'string', description: 'Payment mode (e.g., UPI, cash, card)' }, + payment_reference_number: { type: 'string', description: 'Reference/transaction ID from gateway' } + }, + // at least one should be provided — validated in controller + additionalProperties: false + }, + response: { + 200: { + type: 'object', + properties: { + status_code: { type: 'integer' }, + message: { type: 'string' }, + data: { type: 'object' } + } + }, + 400: { type: 'object' }, + 404: { type: 'object' } + }, + security: [ + { + basicAuth: [] + } + ] + }, + // preHandler: fastify.auth([fastify.authenticate]), // enable auth if needed + handler: userController.updatePaymentForBooking + }); + + fastify.route({ + method: 'PUT', + url: '/api/request-booking/:_id/supplier/quote', + schema: { + description: + 'Update quoted_amount for a supplier inside requested_suppliers for a requested booking', + tags: ['User'], + summary: 'Update supplier quoted amount by User', + params: { + type: 'object', + properties: { + _id: { type: 'string', description: 'Booking _id' }, + }, + required: ['_id'], + }, + body: { + type: 'object', + properties: { + supplierId: { type: 'string', description: 'Supplier ID' }, + amount: { type: ['number', 'string'], description: 'Quoted amount (number)' }, + }, + required: ['supplierId', 'amount'], + additionalProperties: false, + }, + response: { + 200: { + type: 'object', + }, + 400: { type: 'object' }, + 404: { type: 'object' }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), // enable if needed + handler: userController.updateQuotedAmountForSupplier, + }); + + fastify.route({ + method: 'PUT', + url: '/api/request-booking-status/:_id/supplier/quotestatus', + schema: { + description: + 'Update status for a supplier inside requested_suppliers for a requested booking', + tags: ['User'], + summary: 'Update tanker booking of pearticular supplier status by User', + params: { + type: 'object', + properties: { + _id: { type: 'string', description: 'Booking _id' }, + }, + required: ['_id'], + }, + body: { + type: 'object', + properties: { + supplierId: { type: 'string' }, + status: { type:'string'}, + }, + + }, + response: { + 200: { + type: 'object', + }, + 400: { type: 'object' }, + 404: { type: 'object' }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), // enable if needed + handler: userController.updatestatusForSupplier, + }); + + + fastify.route({ + method: 'PUT', + url: '/api/request-advance-amount/:_id/supplier/advance', + schema: { + description: + 'Update adavance with reference number for particular supplier by user', + tags: ['User'], + summary: 'Update adavance with reference number for particular supplier by user', + params: { + type: 'object', + properties: { + _id: { type: 'string', description: 'Booking _id' }, + }, + required: ['_id'], + }, + body: { + type: 'object', + properties: { + supplierId: { type: 'string' }, + advance_paid: { type: 'number' }, // ✅ fixed + advance_ref_number: { type: 'string' } + }, + + }, + response: { + 200: { + type: 'object', + }, + 400: { type: 'object' }, + 404: { type: 'object' }, + }, + security: [ + { + basicAuth: [], + }, + ], + }, + // preHandler: fastify.auth([fastify.authenticate]), // enable if needed + handler: userController.updateadvanceForSupplier, + }); + + + + +fastify.route({ + method: 'POST', + url: '/api/request-advance-amount-split/:id/supplier/split', + schema: { + description: 'Split a booking into multiple entries with individual capacity, date, time, and quoted_amount', + tags: ['User'], + summary: 'Split booking into multiple entries', + params: { + type: 'object', + properties: { + id: { type: 'string', description: 'Booking _id' }, + }, + required: ['id'], + }, + body: { + type: 'object', + properties: { + splits: { + type: 'array', + items: { + type: 'object', + properties: { + capacity: { type: 'number' }, + date: { type: 'string' }, + time: { type: 'string' }, + quoted_amount: { type: 'number' } + }, + required: ['capacity'] + } + } + }, + required: ['splits'], + }, + response: { + 200: { type: 'object' }, + 400: { type: 'object' }, + 404: { type: 'object' }, + }, + }, + handler: userController.splitBookingForSupplier, +}); + + + + next(); }; diff --git a/uploads/45246b640831a27395f9d1dfea1dd72d b/uploads/45246b640831a27395f9d1dfea1dd72d new file mode 100644 index 00000000..54f62aa4 Binary files /dev/null and b/uploads/45246b640831a27395f9d1dfea1dd72d differ diff --git a/uploads/955273029ffa0b831471711c8320d710 b/uploads/955273029ffa0b831471711c8320d710 new file mode 100644 index 00000000..dd973941 Binary files /dev/null and b/uploads/955273029ffa0b831471711c8320d710 differ diff --git a/uploads/986f0b7b4d59233ae60e99a873da1ba8 b/uploads/986f0b7b4d59233ae60e99a873da1ba8 new file mode 100644 index 00000000..dd973941 Binary files /dev/null and b/uploads/986f0b7b4d59233ae60e99a873da1ba8 differ diff --git a/uploads/9ff41d2544f312e3e471ec053b8bbcc9 b/uploads/9ff41d2544f312e3e471ec053b8bbcc9 new file mode 100644 index 00000000..54f62aa4 Binary files /dev/null and b/uploads/9ff41d2544f312e3e471ec053b8bbcc9 differ diff --git a/uploads/ae2e66bb5806eff227d85b57df4d28f7 b/uploads/ae2e66bb5806eff227d85b57df4d28f7 new file mode 100644 index 00000000..dd973941 Binary files /dev/null and b/uploads/ae2e66bb5806eff227d85b57df4d28f7 differ diff --git a/uploads/d09c81cf26a3ba3b463e7c65565e032b b/uploads/d09c81cf26a3ba3b463e7c65565e032b new file mode 100644 index 00000000..735faf85 Binary files /dev/null and b/uploads/d09c81cf26a3ba3b463e7c65565e032b differ diff --git a/uploads/d2bfa56b0d68f890ea38a77a907bc5b8 b/uploads/d2bfa56b0d68f890ea38a77a907bc5b8 new file mode 100644 index 00000000..735faf85 Binary files /dev/null and b/uploads/d2bfa56b0d68f890ea38a77a907bc5b8 differ diff --git a/watermanagement-backend b/watermanagement-backend deleted file mode 160000 index 0f11d82f..00000000 --- a/watermanagement-backend +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 0f11d82f3692b53f67a15af4ad516bf0a1240d70